code stringlengths 1 25.8M | language stringclasses 18 values | source stringclasses 4 values | repo stringclasses 78 values | path stringlengths 0 268 |
|---|---|---|---|---|
# event/attr.py
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Attribute implementation for _Dispatch classes.
The various listener targets for a particular event class are represented
as attributes, which refer to collections of listeners to be fired off.
These collections can exist at the class level as well as at the instance
level. An event is fired off using code like this::
some_object.dispatch.first_connect(arg1, arg2)
Above, ``some_object.dispatch`` would be an instance of ``_Dispatch`` and
``first_connect`` is typically an instance of ``_ListenerCollection``
if event listeners are present, or ``_EmptyListener`` if none are present.
The attribute mechanics here spend effort trying to ensure listener functions
are available with a minimum of function call overhead, that unnecessary
objects aren't created (i.e. many empty per-instance listener collections),
as well as that everything is garbage collectable when owning references are
lost. Other features such as "propagation" of listener functions across
many ``_Dispatch`` instances, "joining" of multiple ``_Dispatch`` instances,
as well as support for subclass propagation (e.g. events assigned to
``Pool`` vs. ``QueuePool``) are all implemented here.
"""
from __future__ import absolute_import, with_statement
from .. import util
from ..util import threading
from . import registry
from . import legacy
from itertools import chain
import weakref
class RefCollection(object):
@util.memoized_property
def ref(self):
return weakref.ref(self, registry._collection_gced)
class _DispatchDescriptor(RefCollection):
"""Class-level attributes on :class:`._Dispatch` classes."""
def __init__(self, parent_dispatch_cls, fn):
self.__name__ = fn.__name__
argspec = util.inspect_getargspec(fn)
self.arg_names = argspec.args[1:]
self.has_kw = bool(argspec.keywords)
self.legacy_signatures = list(reversed(
sorted(
getattr(fn, '_legacy_signatures', []),
key=lambda s: s[0]
)
))
self.__doc__ = fn.__doc__ = legacy._augment_fn_docs(
self, parent_dispatch_cls, fn)
self._clslevel = weakref.WeakKeyDictionary()
self._empty_listeners = weakref.WeakKeyDictionary()
def _adjust_fn_spec(self, fn, named):
if named:
fn = self._wrap_fn_for_kw(fn)
if self.legacy_signatures:
try:
argspec = util.get_callable_argspec(fn, no_self=True)
except TypeError:
pass
else:
fn = legacy._wrap_fn_for_legacy(self, fn, argspec)
return fn
def _wrap_fn_for_kw(self, fn):
def wrap_kw(*args, **kw):
argdict = dict(zip(self.arg_names, args))
argdict.update(kw)
return fn(**argdict)
return wrap_kw
def insert(self, event_key, propagate):
target = event_key.dispatch_target
assert isinstance(target, type), \
"Class-level Event targets must be classes."
stack = [target]
while stack:
cls = stack.pop(0)
stack.extend(cls.__subclasses__())
if cls is not target and cls not in self._clslevel:
self.update_subclass(cls)
else:
if cls not in self._clslevel:
self._clslevel[cls] = []
self._clslevel[cls].insert(0, event_key._listen_fn)
registry._stored_in_collection(event_key, self)
def append(self, event_key, propagate):
target = event_key.dispatch_target
assert isinstance(target, type), \
"Class-level Event targets must be classes."
stack = [target]
while stack:
cls = stack.pop(0)
stack.extend(cls.__subclasses__())
if cls is not target and cls not in self._clslevel:
self.update_subclass(cls)
else:
if cls not in self._clslevel:
self._clslevel[cls] = []
self._clslevel[cls].append(event_key._listen_fn)
registry._stored_in_collection(event_key, self)
def update_subclass(self, target):
if target not in self._clslevel:
self._clslevel[target] = []
clslevel = self._clslevel[target]
for cls in target.__mro__[1:]:
if cls in self._clslevel:
clslevel.extend([
fn for fn
in self._clslevel[cls]
if fn not in clslevel
])
def remove(self, event_key):
target = event_key.dispatch_target
stack = [target]
while stack:
cls = stack.pop(0)
stack.extend(cls.__subclasses__())
if cls in self._clslevel:
self._clslevel[cls].remove(event_key._listen_fn)
registry._removed_from_collection(event_key, self)
def clear(self):
"""Clear all class level listeners"""
to_clear = set()
for dispatcher in self._clslevel.values():
to_clear.update(dispatcher)
dispatcher[:] = []
registry._clear(self, to_clear)
def for_modify(self, obj):
"""Return an event collection which can be modified.
For _DispatchDescriptor at the class level of
a dispatcher, this returns self.
"""
return self
def __get__(self, obj, cls):
if obj is None:
return self
elif obj._parent_cls in self._empty_listeners:
ret = self._empty_listeners[obj._parent_cls]
else:
self._empty_listeners[obj._parent_cls] = ret = \
_EmptyListener(self, obj._parent_cls)
# assigning it to __dict__ means
# memoized for fast re-access. but more memory.
obj.__dict__[self.__name__] = ret
return ret
class _HasParentDispatchDescriptor(object):
def _adjust_fn_spec(self, fn, named):
return self.parent._adjust_fn_spec(fn, named)
class _EmptyListener(_HasParentDispatchDescriptor):
"""Serves as a class-level interface to the events
served by a _DispatchDescriptor, when there are no
instance-level events present.
Is replaced by _ListenerCollection when instance-level
events are added.
"""
def __init__(self, parent, target_cls):
if target_cls not in parent._clslevel:
parent.update_subclass(target_cls)
self.parent = parent # _DispatchDescriptor
self.parent_listeners = parent._clslevel[target_cls]
self.name = parent.__name__
self.propagate = frozenset()
self.listeners = ()
def for_modify(self, obj):
"""Return an event collection which can be modified.
For _EmptyListener at the instance level of
a dispatcher, this generates a new
_ListenerCollection, applies it to the instance,
and returns it.
"""
result = _ListenerCollection(self.parent, obj._parent_cls)
if obj.__dict__[self.name] is self:
obj.__dict__[self.name] = result
return result
def _needs_modify(self, *args, **kw):
raise NotImplementedError("need to call for_modify()")
exec_once = insert = append = remove = clear = _needs_modify
def __call__(self, *args, **kw):
"""Execute this event."""
for fn in self.parent_listeners:
fn(*args, **kw)
def __len__(self):
return len(self.parent_listeners)
def __iter__(self):
return iter(self.parent_listeners)
def __bool__(self):
return bool(self.parent_listeners)
__nonzero__ = __bool__
class _CompoundListener(_HasParentDispatchDescriptor):
_exec_once = False
@util.memoized_property
def _exec_once_mutex(self):
return threading.Lock()
def exec_once(self, *args, **kw):
"""Execute this event, but only if it has not been
executed already for this collection."""
if not self._exec_once:
with self._exec_once_mutex:
if not self._exec_once:
try:
self(*args, **kw)
finally:
self._exec_once = True
def __call__(self, *args, **kw):
"""Execute this event."""
for fn in self.parent_listeners:
fn(*args, **kw)
for fn in self.listeners:
fn(*args, **kw)
def __len__(self):
return len(self.parent_listeners) + len(self.listeners)
def __iter__(self):
return chain(self.parent_listeners, self.listeners)
def __bool__(self):
return bool(self.listeners or self.parent_listeners)
__nonzero__ = __bool__
class _ListenerCollection(RefCollection, _CompoundListener):
"""Instance-level attributes on instances of :class:`._Dispatch`.
Represents a collection of listeners.
As of 0.7.9, _ListenerCollection is only first
created via the _EmptyListener.for_modify() method.
"""
def __init__(self, parent, target_cls):
if target_cls not in parent._clslevel:
parent.update_subclass(target_cls)
self.parent_listeners = parent._clslevel[target_cls]
self.parent = parent
self.name = parent.__name__
self.listeners = []
self.propagate = set()
def for_modify(self, obj):
"""Return an event collection which can be modified.
For _ListenerCollection at the instance level of
a dispatcher, this returns self.
"""
return self
def _update(self, other, only_propagate=True):
"""Populate from the listeners in another :class:`_Dispatch`
object."""
existing_listeners = self.listeners
existing_listener_set = set(existing_listeners)
self.propagate.update(other.propagate)
other_listeners = [l for l
in other.listeners
if l not in existing_listener_set
and not only_propagate or l in self.propagate
]
existing_listeners.extend(other_listeners)
to_associate = other.propagate.union(other_listeners)
registry._stored_in_collection_multi(self, other, to_associate)
def insert(self, event_key, propagate):
if event_key._listen_fn not in self.listeners:
event_key.prepend_to_list(self, self.listeners)
if propagate:
self.propagate.add(event_key._listen_fn)
def append(self, event_key, propagate):
if event_key._listen_fn not in self.listeners:
event_key.append_to_list(self, self.listeners)
if propagate:
self.propagate.add(event_key._listen_fn)
def remove(self, event_key):
self.listeners.remove(event_key._listen_fn)
self.propagate.discard(event_key._listen_fn)
registry._removed_from_collection(event_key, self)
def clear(self):
registry._clear(self, self.listeners)
self.propagate.clear()
self.listeners[:] = []
class _JoinedDispatchDescriptor(object):
def __init__(self, name):
self.name = name
def __get__(self, obj, cls):
if obj is None:
return self
else:
obj.__dict__[self.name] = ret = _JoinedListener(
obj.parent, self.name,
getattr(obj.local, self.name)
)
return ret
class _JoinedListener(_CompoundListener):
_exec_once = False
def __init__(self, parent, name, local):
self.parent = parent
self.name = name
self.local = local
self.parent_listeners = self.local
@property
def listeners(self):
return getattr(self.parent, self.name)
def _adjust_fn_spec(self, fn, named):
return self.local._adjust_fn_spec(fn, named)
def for_modify(self, obj):
self.local = self.parent_listeners = self.local.for_modify(obj)
return self
def insert(self, event_key, propagate):
self.local.insert(event_key, propagate)
def append(self, event_key, propagate):
self.local.append(event_key, propagate)
def remove(self, event_key):
self.local.remove(event_key)
def clear(self):
raise NotImplementedError() | unknown | codeparrot/codeparrot-clean | ||
#! /usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from fractions import gcd
"""Code generation for bulk operations"""
MAX_SPECIALIZED_BITS_PER_VALUE = 24;
PACKED_64_SINGLE_BLOCK_BPV = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 16, 21, 32]
OUTPUT_FILE = "BulkOperation.java"
HEADER = """// This file has been automatically generated, DO NOT EDIT
package org.apache.lucene.util.packed;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
"""
FOOTER="""
protected int writeLong(long block, byte[] blocks, int blocksOffset) {
for (int j = 1; j <= 8; ++j) {
blocks[blocksOffset++] = (byte) (block >>> (64 - (j << 3)));
}
return blocksOffset;
}
/**
* For every number of bits per value, there is a minimum number of
* blocks (b) / values (v) you need to write in order to reach the next block
* boundary:
* - 16 bits per value -> b=2, v=1
* - 24 bits per value -> b=3, v=1
* - 50 bits per value -> b=25, v=4
* - 63 bits per value -> b=63, v=8
* - ...
*
* A bulk read consists in copying <code>iterations*v</code> values that are
* contained in <code>iterations*b</code> blocks into a <code>long[]</code>
* (higher values of <code>iterations</code> are likely to yield a better
* throughput): this requires n * (b + 8v) bytes of memory.
*
* This method computes <code>iterations</code> as
* <code>ramBudget / (b + 8v)</code> (since a long is 8 bytes).
*/
public final int computeIterations(int valueCount, int ramBudget) {
final int iterations = ramBudget / (byteBlockCount() + 8 * byteValueCount());
if (iterations == 0) {
// at least 1
return 1;
} else if ((iterations - 1) * byteValueCount() >= valueCount) {
// don't allocate for more than the size of the reader
return (int) Math.ceil((double) valueCount / byteValueCount());
} else {
return iterations;
}
}
}
"""
def is_power_of_two(n):
return n & (n - 1) == 0
def casts(typ):
cast_start = "(%s) (" %typ
cast_end = ")"
if typ == "long":
cast_start = ""
cast_end = ""
return cast_start, cast_end
def hexNoLSuffix(n):
# On 32 bit Python values > (1 << 31)-1 will have L appended by hex function:
s = hex(n)
if s.endswith('L'):
s = s[:-1]
return s
def masks(bits):
if bits == 64:
return "", ""
return "(", " & %sL)" %(hexNoLSuffix((1 << bits) - 1))
def get_type(bits):
if bits == 8:
return "byte"
elif bits == 16:
return "short"
elif bits == 32:
return "int"
elif bits == 64:
return "long"
else:
assert False
def block_value_count(bpv, bits=64):
blocks = bpv
values = blocks * bits / bpv
while blocks % 2 == 0 and values % 2 == 0:
blocks /= 2
values /= 2
assert values * bpv == bits * blocks, "%d values, %d blocks, %d bits per value" %(values, blocks, bpv)
return (blocks, values)
def packed64(bpv, f):
mask = (1 << bpv) - 1
f.write("\n")
f.write(" public BulkOperationPacked%d() {\n" %bpv)
f.write(" super(%d);\n" %bpv)
f.write(" }\n\n")
if bpv == 64:
f.write(""" @Override
public void decode(long[] blocks, int blocksOffset, long[] values, int valuesOffset, int iterations) {
System.arraycopy(blocks, blocksOffset, values, valuesOffset, valueCount() * iterations);
}
@Override
public void decode(long[] blocks, int blocksOffset, int[] values, int valuesOffset, int iterations) {
throw new UnsupportedOperationException();
}
@Override
public void decode(byte[] blocks, int blocksOffset, int[] values, int valuesOffset, int iterations) {
throw new UnsupportedOperationException();
}
@Override
public void decode(byte[] blocks, int blocksOffset, long[] values, int valuesOffset, int iterations) {
LongBuffer.wrap(values, valuesOffset, iterations * valueCount()).put(ByteBuffer.wrap(blocks, blocksOffset, 8 * iterations * blockCount()).asLongBuffer());
}
""")
else:
p64_decode(bpv, f, 32)
p64_decode(bpv, f, 64)
def p64_decode(bpv, f, bits):
blocks, values = block_value_count(bpv)
typ = get_type(bits)
cast_start, cast_end = casts(typ)
f.write(" @Override\n")
f.write(" public void decode(long[] blocks, int blocksOffset, %s[] values, int valuesOffset, int iterations) {\n" %typ)
if bits < bpv:
f.write(" throw new UnsupportedOperationException();\n")
else:
f.write(" for (int i = 0; i < iterations; ++i) {\n")
mask = (1 << bpv) - 1
if is_power_of_two(bpv):
f.write(" final long block = blocks[blocksOffset++];\n")
f.write(" for (int shift = %d; shift >= 0; shift -= %d) {\n" %(64 - bpv, bpv))
f.write(" values[valuesOffset++] = %s(block >>> shift) & %d%s;\n" %(cast_start, mask, cast_end))
f.write(" }\n")
else:
for i in xrange(0, values):
block_offset = i * bpv / 64
bit_offset = (i * bpv) % 64
if bit_offset == 0:
# start of block
f.write(" final long block%d = blocks[blocksOffset++];\n" %block_offset);
f.write(" values[valuesOffset++] = %sblock%d >>> %d%s;\n" %(cast_start, block_offset, 64 - bpv, cast_end))
elif bit_offset + bpv == 64:
# end of block
f.write(" values[valuesOffset++] = %sblock%d & %dL%s;\n" %(cast_start, block_offset, mask, cast_end))
elif bit_offset + bpv < 64:
# middle of block
f.write(" values[valuesOffset++] = %s(block%d >>> %d) & %dL%s;\n" %(cast_start, block_offset, 64 - bit_offset - bpv, mask, cast_end))
else:
# value spans across 2 blocks
mask1 = (1 << (64 - bit_offset)) -1
shift1 = bit_offset + bpv - 64
shift2 = 64 - shift1
f.write(" final long block%d = blocks[blocksOffset++];\n" %(block_offset + 1));
f.write(" values[valuesOffset++] = %s((block%d & %dL) << %d) | (block%d >>> %d)%s;\n" %(cast_start, block_offset, mask1, shift1, block_offset + 1, shift2, cast_end))
f.write(" }\n")
f.write(" }\n\n")
byte_blocks, byte_values = block_value_count(bpv, 8)
f.write(" @Override\n")
f.write(" public void decode(byte[] blocks, int blocksOffset, %s[] values, int valuesOffset, int iterations) {\n" %typ)
if bits < bpv:
f.write(" throw new UnsupportedOperationException();\n")
else:
if is_power_of_two(bpv) and bpv < 8:
f.write(" for (int j = 0; j < iterations; ++j) {\n")
f.write(" final byte block = blocks[blocksOffset++];\n")
for shift in xrange(8 - bpv, 0, -bpv):
f.write(" values[valuesOffset++] = (block >>> %d) & %d;\n" %(shift, mask))
f.write(" values[valuesOffset++] = block & %d;\n" %mask)
f.write(" }\n")
elif bpv == 8:
f.write(" for (int j = 0; j < iterations; ++j) {\n")
f.write(" values[valuesOffset++] = blocks[blocksOffset++] & 0xFF;\n")
f.write(" }\n")
elif is_power_of_two(bpv) and bpv > 8:
f.write(" for (int j = 0; j < iterations; ++j) {\n")
m = bits <= 32 and "0xFF" or "0xFFL"
f.write(" values[valuesOffset++] =")
for i in xrange(bpv / 8 - 1):
f.write(" ((blocks[blocksOffset++] & %s) << %d) |" %(m, bpv - 8))
f.write(" (blocks[blocksOffset++] & %s);\n" %m)
f.write(" }\n")
else:
f.write(" for (int i = 0; i < iterations; ++i) {\n")
for i in xrange(0, byte_values):
byte_start = i * bpv / 8
bit_start = (i * bpv) % 8
byte_end = ((i + 1) * bpv - 1) / 8
bit_end = ((i + 1) * bpv - 1) % 8
shift = lambda b: 8 * (byte_end - b - 1) + 1 + bit_end
if bit_start == 0:
f.write(" final %s byte%d = blocks[blocksOffset++] & 0xFF;\n" %(typ, byte_start))
for b in xrange(byte_start + 1, byte_end + 1):
f.write(" final %s byte%d = blocks[blocksOffset++] & 0xFF;\n" %(typ, b))
f.write(" values[valuesOffset++] =")
if byte_start == byte_end:
if bit_start == 0:
if bit_end == 7:
f.write(" byte%d" %byte_start)
else:
f.write(" byte%d >>> %d" %(byte_start, 7 - bit_end))
else:
if bit_end == 7:
f.write(" byte%d & %d" %(byte_start, 2 ** (8 - bit_start) - 1))
else:
f.write(" (byte%d >>> %d) & %d" %(byte_start, 7 - bit_end, 2 ** (bit_end - bit_start + 1) - 1))
else:
if bit_start == 0:
f.write(" (byte%d << %d)" %(byte_start, shift(byte_start)))
else:
f.write(" ((byte%d & %d) << %d)" %(byte_start, 2 ** (8 - bit_start) - 1, shift(byte_start)))
for b in xrange(byte_start + 1, byte_end):
f.write(" | (byte%d << %d)" %(b, shift(b)))
if bit_end == 7:
f.write(" | byte%d" %byte_end)
else:
f.write(" | (byte%d >>> %d)" %(byte_end, 7 - bit_end))
f.write(";\n")
f.write(" }\n")
f.write(" }\n\n")
if __name__ == '__main__':
f = open(OUTPUT_FILE, 'w')
f.write(HEADER)
f.write('\n')
f.write('''/**
* Efficient sequential read/write of packed integers.
*/\n''')
f.write('abstract class BulkOperation implements PackedInts.Decoder, PackedInts.Encoder {\n')
f.write(' private static final BulkOperation[] packedBulkOps = new BulkOperation[] {\n')
for bpv in xrange(1, 65):
if bpv > MAX_SPECIALIZED_BITS_PER_VALUE:
f.write(' new BulkOperationPacked(%d),\n' % bpv)
continue
f2 = open('BulkOperationPacked%d.java' % bpv, 'w')
f2.write(HEADER)
if bpv == 64:
f2.write('import java.nio.LongBuffer;\n')
f2.write('import java.nio.ByteBuffer;\n')
f2.write('\n')
f2.write('''/**
* Efficient sequential read/write of packed integers.
*/\n''')
f2.write('final class BulkOperationPacked%d extends BulkOperationPacked {\n' % bpv)
packed64(bpv, f2)
f2.write('}\n')
f2.close()
f.write(' new BulkOperationPacked%d(),\n' % bpv)
f.write(' };\n')
f.write('\n')
f.write(' // NOTE: this is sparse (some entries are null):\n')
f.write(' private static final BulkOperation[] packedSingleBlockBulkOps = new BulkOperation[] {\n')
for bpv in xrange(1, max(PACKED_64_SINGLE_BLOCK_BPV)+1):
if bpv in PACKED_64_SINGLE_BLOCK_BPV:
f.write(' new BulkOperationPackedSingleBlock(%d),\n' % bpv)
else:
f.write(' null,\n')
f.write(' };\n')
f.write('\n')
f.write("\n")
f.write(" public static BulkOperation of(PackedInts.Format format, int bitsPerValue) {\n")
f.write(" switch (format) {\n")
f.write(" case PACKED:\n")
f.write(" assert packedBulkOps[bitsPerValue - 1] != null;\n")
f.write(" return packedBulkOps[bitsPerValue - 1];\n")
f.write(" case PACKED_SINGLE_BLOCK:\n")
f.write(" assert packedSingleBlockBulkOps[bitsPerValue - 1] != null;\n")
f.write(" return packedSingleBlockBulkOps[bitsPerValue - 1];\n")
f.write(" default:\n")
f.write(" throw new AssertionError();\n")
f.write(" }\n")
f.write(" }\n")
f.write(FOOTER)
f.close() | unknown | codeparrot/codeparrot-clean | ||
<?php
/*
* This file is part of the Symfony package.
*
* (c) Fabien Potencier <fabien@symfony.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Symfony\Bundle\FrameworkBundle\Test;
trait WebTestAssertionsTrait
{
use BrowserKitAssertionsTrait;
use DomCrawlerAssertionsTrait;
use HttpClientAssertionsTrait;
} | php | github | https://github.com/symfony/symfony | src/Symfony/Bundle/FrameworkBundle/Test/WebTestAssertionsTrait.php |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.gradle.internal.idea;
import groovy.util.Node;
import groovy.util.NodeList;
import org.gradle.api.DefaultTask;
import org.xml.sax.SAXException;
import java.io.IOException;
import javax.xml.parsers.ParserConfigurationException;
public class EnablePreviewFeaturesTask extends DefaultTask {
public void enablePreview(String moduleFile, String languageLevel) throws IOException, ParserConfigurationException, SAXException {
IdeaXmlUtil.modifyXml(moduleFile, xml -> {
// Find the 'component' node
NodeList nodes = (NodeList) xml.depthFirst();
Node componentNode = null;
for (Object node : nodes) {
Node currentNode = (Node) node;
if ("component".equals(currentNode.name()) && "NewModuleRootManager".equals(currentNode.attribute("name"))) {
componentNode = currentNode;
break;
}
}
// Add the attribute to the 'component' node
if (componentNode != null) {
componentNode.attributes().put("LANGUAGE_LEVEL", languageLevel);
}
});
}
} | java | github | https://github.com/elastic/elasticsearch | build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/idea/EnablePreviewFeaturesTask.java |
# Copyright 2014-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
A workload is the unit of execution. It represents a set of activities are are performed
and measured together, as well as the necessary setup and teardown procedures. A single
execution of a workload produces one :class:`wlauto.core.result.WorkloadResult` that is populated with zero or more
:class:`wlauto.core.result.WorkloadMetric`\ s and/or
:class:`wlauto.core.result.Artifact`\s by the workload and active instrumentation.
"""
from wlauto.core.extension import Extension
from wlauto.exceptions import WorkloadError
class Workload(Extension):
"""
This is the base class for the workloads executed by the framework.
Each of the methods throwing NotImplementedError *must* be implemented
by the derived classes.
"""
supported_devices = []
supported_platforms = []
summary_metrics = []
requires_network = False
def __init__(self, device, **kwargs):
"""
Creates a new Workload.
:param device: the Device on which the workload will be executed.
"""
super(Workload, self).__init__(**kwargs)
if self.supported_devices and device.name not in self.supported_devices:
raise WorkloadError('Workload {} does not support device {}'.format(self.name, device.name))
if self.supported_platforms and device.platform not in self.supported_platforms:
raise WorkloadError('Workload {} does not support platform {}'.format(self.name, device.platform))
self.device = device
def init_resources(self, context):
"""
This method may be used to perform early resource discovery and initialization. This is invoked
during the initial loading stage and before the device is ready, so cannot be used for any
device-dependent initialization. This method is invoked before the workload instance is
validated.
"""
pass
def initialize(self, context):
"""
This method should be used to perform once-per-run initialization of a workload instance, i.e.,
unlike ``setup()`` it will not be invoked on each iteration.
"""
pass
def setup(self, context): # pylint: disable=unused-argument
"""
Perform the setup necessary to run the workload, such as copying the necessary files
to the device, configuring the environments, etc.
This is also the place to perform any on-device checks prior to attempting to execute
the workload.
"""
if self.requires_network:
self.check_network_connected()
def run(self, context):
"""Execute the workload. This is the method that performs the actual "work" of the"""
pass
def update_result(self, context):
"""
Update the result within the specified execution context with the metrics
form this workload iteration.
"""
pass
def teardown(self, context):
""" Perform any final clean up for the Workload. """
pass
def finalize(self, context):
pass
def check_network_connected(self):
if not self.device.is_network_connected():
message = 'Workload "{}" requires internet. Device "{}" does not appear to be connected to the internet.'
raise WorkloadError(message.format(self.name, self.device.name))
def __str__(self):
return '<Workload {}>'.format(self.name) | unknown | codeparrot/codeparrot-clean | ||
#pragma once
#include <condition_variable>
#include <memory>
#include <optional>
#include <tuple>
#include <type_traits>
#include <utility>
#include <ATen/core/Dict.h>
#include <ATen/core/List.h>
#include <ATen/core/IListRef.h>
#include <ATen/core/functional.h>
#include <ATen/core/jit_type.h>
#include <ATen/core/qualified_name.h>
#include <ATen/core/rref_interface.h>
#include <ATen/core/symbol.h>
#include <c10/core/DeviceGuard.h>
#include <c10/core/Event.h>
#include <c10/core/Scalar.h>
#include <c10/core/Stream.h>
#include <c10/core/StreamGuard.h>
#include <c10/core/TensorImpl.h>
#include <c10/core/UndefinedTensorImpl.h>
#include <c10/core/impl/DeviceGuardImplInterface.h>
#include <c10/util/FunctionRef.h>
#include <c10/util/Logging.h>
#include <c10/util/hash.h>
#include <c10/util/intrusive_ptr.h>
#include <c10/util/irange.h>
C10_DIAGNOSTIC_PUSH_AND_IGNORED_IF_DEFINED("-Wswitch-default")
namespace torch {
namespace jit {
struct Function;
struct CompilationUnit;
} // namespace jit
TORCH_API bool isCustomClass(const c10::IValue& v);
} // namespace torch
namespace c10 {
struct IValue;
struct ClassType;
struct TupleType;
struct EnumType;
struct InferredType;
// For custom class __init__ registration, we need to pass in a function
// that looks like this: [](IValue x, args...)
// However, make_boxed_from_unboxed_functor.h automatically sets the input types
// of the function by introspecting the types of the functor (which is IValue in
// this case). However, we need the type it binds to be Foo.
// Instead, we pass in a lambda [](ivalue_holder<CurClass> x, args...) from
// which getTypePtr can recover the original class pointer.
template <typename TaggedCapsuleType>
struct tagged_capsule {
IValue ivalue;
};
template <class T, class NullType>
c10::intrusive_ptr<T, NullType> IValue::moveToIntrusivePtr() {
auto t = c10::intrusive_ptr<T, NullType>::reclaim(
payload.u.as_intrusive_ptr == c10::UndefinedTensorImpl::singleton()
? NullType::singleton()
: static_cast<T*>(payload.u.as_intrusive_ptr));
clearToNone();
return t;
}
template <typename T, class NullType>
c10::intrusive_ptr<T, NullType> IValue::toIntrusivePtr() const {
if (payload.u.as_intrusive_ptr == c10::UndefinedTensorImpl::singleton()) {
return c10::intrusive_ptr<T, NullType>();
}
c10::raw::intrusive_ptr::incref(payload.u.as_intrusive_ptr);
return c10::intrusive_ptr<T, NullType>::reclaim(
static_cast<T*>(payload.u.as_intrusive_ptr));
}
template <class T, class U>
intrusive_ptr<T> static_intrusive_pointer_cast(intrusive_ptr<U> r) {
return intrusive_ptr<T>::reclaim(static_cast<T*>(r.release()));
}
template <class T, class U>
intrusive_ptr<T> dynamic_intrusive_pointer_cast(intrusive_ptr<U> r) {
return intrusive_ptr<T>::reclaim(dynamic_cast<T*>(r.release()));
}
inline c10::intrusive_ptr<ivalue::Future> IValue::toFuture() && {
AT_ASSERT(isFuture(), "Expected Future but got ", tagKind());
return moveToIntrusivePtr<ivalue::Future>();
}
inline c10::intrusive_ptr<ivalue::Future> IValue::toFuture() const& {
AT_ASSERT(isFuture(), "Expected Future but got ", tagKind());
return toIntrusivePtr<ivalue::Future>();
}
inline c10::intrusive_ptr<ivalue::Await> IValue::toAwait() && {
AT_ASSERT(isAwait(), "Expected Await but got ", tagKind());
return moveToIntrusivePtr<ivalue::Await>();
}
inline c10::intrusive_ptr<ivalue::Await> IValue::toAwait() const& {
AT_ASSERT(isAwait(), "Expected Await but got ", tagKind());
return toIntrusivePtr<ivalue::Await>();
}
inline c10::intrusive_ptr<c10::RRefInterface> IValue::toRRef() && {
AT_ASSERT(isRRef(), "Expected RRef but got ", tagKind());
return moveToIntrusivePtr<c10::RRefInterface>();
}
inline c10::intrusive_ptr<c10::RRefInterface> IValue::toRRef() const& {
AT_ASSERT(isRRef(), "Expected RRef but got ", tagKind());
return toIntrusivePtr<c10::RRefInterface>();
}
inline c10::intrusive_ptr<at::Quantizer> IValue::toQuantizer() && {
AT_ASSERT(isQuantizer(), "Expected Quantizer but got ", tagKind());
return moveToIntrusivePtr<at::Quantizer>();
}
inline c10::intrusive_ptr<at::Quantizer> IValue::toQuantizer() const& {
AT_ASSERT(isQuantizer(), "Expected Quantizer but got ", tagKind());
return toIntrusivePtr<at::Quantizer>();
}
inline c10::intrusive_ptr<ivalue::ConstantString> IValue::toString() && {
AT_ASSERT(isString(), "Expected String but got ", tagKind());
return moveToIntrusivePtr<ivalue::ConstantString>();
}
inline c10::intrusive_ptr<ivalue::ConstantString> IValue::toString() const& {
AT_ASSERT(isString(), "Expected String but got ", tagKind());
return toIntrusivePtr<ivalue::ConstantString>();
}
inline c10::intrusive_ptr<ivalue::Object> IValue::toObject() && {
AT_ASSERT(isObject(), "Expected Object but got ", tagKind());
return moveToIntrusivePtr<ivalue::Object>();
}
inline c10::intrusive_ptr<ivalue::Object> IValue::toObject() const& {
AT_ASSERT(isObject(), "Expected Object but got ", tagKind());
return toIntrusivePtr<ivalue::Object>();
}
inline c10::intrusive_ptr<ivalue::PyObjectHolder> IValue::
toPyObjectHolder() && {
TORCH_INTERNAL_ASSERT(isPyObject(), "Expected PyObject but got ", tagKind());
return moveToIntrusivePtr<ivalue::PyObjectHolder>();
}
inline c10::intrusive_ptr<ivalue::PyObjectHolder> IValue::toPyObjectHolder()
const& {
TORCH_INTERNAL_ASSERT(isPyObject(), "Expected PyObject but got ", tagKind());
return toIntrusivePtr<ivalue::PyObjectHolder>();
}
inline c10::intrusive_ptr<ivalue::EnumHolder> IValue::toEnumHolder() && {
TORCH_INTERNAL_ASSERT(isEnum(), "Expected Enum but got ", tagKind());
return moveToIntrusivePtr<ivalue::EnumHolder>();
}
inline c10::intrusive_ptr<ivalue::EnumHolder> IValue::toEnumHolder() const& {
TORCH_INTERNAL_ASSERT(isEnum(), "Expected Enum but got ", tagKind());
return toIntrusivePtr<ivalue::EnumHolder>();
}
inline c10::complex<double> IValue::toComplexDouble() const {
TORCH_INTERNAL_ASSERT(isComplexDouble(), "Expected ComplexDouble but got ", tagKind());
auto ptr = toIntrusivePtr<ivalue::ComplexHolder>();
return (*ptr).val;
}
inline at::Tensor IValue::toTensor() && {
if (C10_UNLIKELY(!isTensor())) {
reportToTensorTypeError();
}
auto result = std::move(payload.as_tensor);
// As far as I can tell, omitting the usual explicit destructor call
// is not UB in and of itself, and it's a slight perf win. The
// destructor is a no-op, because the moved-from Tensor is
// effectively an intrusive_ptr in the null state, so we don't need
// the behavior for correctness reasons either. Leaving this
// explanatory comment, including commented-out destructor call, to
// make this abundantly clear.
//
// payload.as_tensor.~Tensor();
clearToNone();
return result;
}
inline at::Tensor& IValue::toTensor() & {
if (C10_UNLIKELY(!isTensor())) {
reportToTensorTypeError();
}
return payload.as_tensor;
}
inline const at::Tensor& IValue::toTensor() const& {
if (C10_UNLIKELY(!isTensor())) {
reportToTensorTypeError();
}
return payload.as_tensor;
}
inline c10::Storage IValue::toStorage() && {
AT_ASSERT(isStorage(), "Expected Storage but got ", tagKind());
return c10::Storage(
moveToIntrusivePtr<at::StorageImpl>());
}
inline c10::Storage IValue::toStorage() const& {
AT_ASSERT(isStorage(), "Expected Storage but got ", tagKind());
return c10::Storage(toIntrusivePtr<at::StorageImpl>());
}
inline c10::Stream IValue::toStream() && {
AT_ASSERT(isStream(), "Expected Stream but got ", tagKind());
auto ptr = toIntrusivePtr<ivalue::StreamData3Holder>();
return c10::Stream::unpack3((*ptr).val.stream_id,
(*ptr).val.device_index,
(*ptr).val.device_type);
}
inline c10::Stream IValue::toStream() const& {
AT_ASSERT(isStream(), "Expected Stream but got ", tagKind());
auto ptr = toIntrusivePtr<ivalue::StreamData3Holder>();
return c10::Stream::unpack3((*ptr).val.stream_id,
(*ptr).val.device_index,
(*ptr).val.device_type);
}
inline c10::intrusive_ptr<caffe2::Blob> IValue::toBlob() && {
AT_ASSERT(isBlob(), "Expected Blob but got ", tagKind());
return moveToIntrusivePtr<caffe2::Blob>();
}
inline c10::intrusive_ptr<caffe2::Blob> IValue::toBlob() const& {
AT_ASSERT(isBlob(), "Expected Blob but got ", tagKind());
return toIntrusivePtr<caffe2::Blob>();
;
}
inline c10::intrusive_ptr<torch::CustomClassHolder> IValue::toCapsule() && {
TORCH_INTERNAL_ASSERT(isCapsule());
return moveToIntrusivePtr<torch::CustomClassHolder>();
}
inline c10::intrusive_ptr<torch::CustomClassHolder> IValue::toCapsule() const& {
TORCH_INTERNAL_ASSERT(isCapsule());
return toIntrusivePtr<torch::CustomClassHolder>();
}
inline at::Generator IValue::toGenerator() && {
AT_ASSERT(isGenerator(), "Expected Generator but got ", tagKind());
return at::Generator(moveToIntrusivePtr<at::GeneratorImpl>());
}
inline at::Generator IValue::toGenerator() const& {
AT_ASSERT(isGenerator(), "Expected Generator but got ", tagKind());
return at::Generator(toIntrusivePtr<at::GeneratorImpl>());
}
inline c10::SymInt IValue::toSymInt() && {
AT_ASSERT(isSymInt() || isInt(), "Expected SymInt or int but got ", tagKind());
if (isSymInt()) {
return c10::SymInt(moveToIntrusivePtr<c10::SymNodeImpl>());
} else {
return c10::SymInt(payload.u.as_int);
}
}
inline c10::SymInt IValue::toSymInt() const& {
AT_ASSERT(isSymInt() || isInt(), "Expected SymInt or int but got ", tagKind());
if (isSymInt()) {
return c10::SymInt(toIntrusivePtr<c10::SymNodeImpl>());
} else {
return c10::SymInt(payload.u.as_int);
}
}
inline c10::SymFloat IValue::toSymFloat() && {
AT_ASSERT(isSymFloat() || isDouble(), "Expected SymFloat or double but got ", tagKind());
if (isSymFloat()) {
return c10::SymFloat(moveToIntrusivePtr<c10::SymNodeImpl>());
} else {
return c10::SymFloat(payload.u.as_double);
}
}
inline c10::SymFloat IValue::toSymFloat() const& {
AT_ASSERT(isSymFloat() || isDouble(), "Expected SymFloat or double but got ", tagKind());
if (isSymFloat()) {
return c10::SymFloat(toIntrusivePtr<c10::SymNodeImpl>());
} else {
return c10::SymFloat(payload.u.as_double);
}
}
inline c10::SymBool IValue::toSymBool() && {
AT_ASSERT(isSymBool() || isBool(), "Expected SymBool or boolean but got ", tagKind());
if (isSymBool()) {
return c10::SymBool(moveToIntrusivePtr<c10::SymNodeImpl>());
} else {
return c10::SymBool(payload.u.as_bool);
}
}
inline c10::SymBool IValue::toSymBool() const& {
AT_ASSERT(isSymBool() || isBool(), "Expected SymBool or boolean but got ", tagKind());
if (isSymBool()) {
return c10::SymBool(toIntrusivePtr<c10::SymNodeImpl>());
} else {
return c10::SymBool(payload.u.as_bool);
}
}
namespace ivalue {
void TORCH_API
checkCustomClassType(const ClassType* expected_type, const Type* actual_type);
template <typename T>
using Shared = c10::intrusive_ptr<T>;
// string
struct TORCH_API ConstantString final : c10::intrusive_ptr_target {
private:
// NOLINTNEXTLINE(cppcoreguidelines-avoid-const-or-ref-data-members)
const std::string str_;
public:
ConstantString(std::string str) : str_(std::move(str)) {}
ConstantString(std::string_view str) : str_(std::string(str)) {}
static c10::intrusive_ptr<ConstantString> create(std::string str_);
static c10::intrusive_ptr<ConstantString> create(std::string_view str_);
static c10::intrusive_ptr<ConstantString> create(const char* str_);
const std::string& string() const {
return str_;
}
std::string_view string_view() const {
return str_;
}
operator const std::string&() const {
return string();
}
TORCH_API friend std::ostream& operator<<(
std::ostream& out,
const ConstantString& v);
};
struct Future;
struct TORCH_API TupleElements {
private:
size_t inlineSize_;
// We represent TupleElements this way to save doing a heap
// allocation in the common (at least for unpickling) case where we
// have only 3 elements. We have our own union instead of
// c10::SmallVector<IValue> because c10::SmallVector<IValue> always
// stores the begin/end/capacity pointers, which would be a waste of
// space in our use case.
union {
std::vector<IValue> elementsVector_;
// Don't want to declare a std::array because the convenient
// iteration and size members are a footgun in this case -- the
// actual size of the array may be smaller than 3!
// NOLINTNEXTLINE(*c-arrays*)
IValue elementsInline_[3];
};
void destroyInline() {
for (const auto ii : c10::irange(inlineSize_)) {
elementsInline_[ii].~IValue();
}
}
public:
using iterator = IValue*;
using const_iterator = const IValue*;
TupleElements() : inlineSize_(0) {
new (&elementsVector_) std::vector<IValue>();
}
explicit TupleElements(std::vector<IValue> elements)
: inlineSize_(0), elementsVector_(std::move(elements)) {}
explicit TupleElements(c10::ArrayRef<IValue> elements)
: inlineSize_(elements.size() <= 3 ? elements.size() : 0) {
switch (inlineSize_) {
case 3:
new (&elementsInline_[2]) IValue(elements[2]);
[[fallthrough]];
case 2:
new (&elementsInline_[1]) IValue(elements[1]);
[[fallthrough]];
case 1:
new (&elementsInline_[0]) IValue(elements[0]);
break;
case 0:
new (&elementsVector_) std::vector<IValue>(elements.begin(), elements.end());
break;
}
}
explicit TupleElements(IValue&& e1)
: inlineSize_(1) {
new (&elementsInline_[0]) IValue(std::move(e1));
}
explicit TupleElements(IValue&& e1, IValue&& e2)
: inlineSize_(2) {
new (&elementsInline_[0]) IValue(std::move(e1));
new (&elementsInline_[1]) IValue(std::move(e2));
}
explicit TupleElements(IValue&& e1, IValue&& e2, IValue&& e3)
: inlineSize_(3) {
new (&elementsInline_[0]) IValue(std::move(e1));
new (&elementsInline_[1]) IValue(std::move(e2));
new (&elementsInline_[2]) IValue(std::move(e3));
}
~TupleElements() {
if (inlineSize_) {
destroyInline();
} else {
elementsVector_.~vector();
}
}
// It would be nice to make this noncopyable to prevent people from
// writing code like `auto output =
// forward(...).toTupleRef().elements()` (which does refcount bumps on
// each element, unlike the more efficient but verbose
// ```
// auto outputIntrusivePtr = forward(...).toTuple();
// const auto& output = outputIntrusivePtr->elements();
// ```
// ), but there is simply an overwhelming amount of code that does
// it the inefficient way.
// See also operator std::vector below.
TupleElements(const TupleElements& rhs)
: inlineSize_(rhs.inlineSize_) {
if (rhs.inlineSize_) {
for (const auto ii : c10::irange(inlineSize_)) {
new (&elementsInline_[ii]) IValue(rhs.elementsInline_[ii]);
}
} else {
new (&elementsVector_) std::vector<IValue>(rhs.elementsVector_);
}
}
TupleElements& operator=(const TupleElements& rhs) {
if (inlineSize_) {
if (rhs.inlineSize_) {
for (const auto ii : c10::irange(std::min(inlineSize_, rhs.inlineSize_))) {
elementsInline_[ii] = rhs.elementsInline_[ii];
}
if (rhs.inlineSize_ > inlineSize_) {
for (const auto ii : c10::irange(inlineSize_, rhs.inlineSize_)) {
new (&elementsInline_[ii]) IValue(rhs.elementsInline_[ii]);
}
} else {
for (const auto ii : c10::irange(rhs.inlineSize_, inlineSize_)) {
elementsInline_[ii].~IValue();
}
}
} else {
destroyInline();
new (&elementsVector_) std::vector<IValue>(rhs.elementsVector_);
}
} else {
if (rhs.inlineSize_) {
elementsVector_.~vector();
for (const auto ii : c10::irange(rhs.inlineSize_)) {
new (&elementsInline_[ii]) IValue(rhs.elementsInline_[ii]);
}
} else {
elementsVector_ = rhs.elementsVector_;
}
}
inlineSize_ = rhs.inlineSize_;
return *this;
}
TupleElements(TupleElements&& rhs) noexcept
: inlineSize_(rhs.inlineSize_) {
if (inlineSize_) {
for (const auto ii : c10::irange(inlineSize_)) {
new (&elementsInline_[ii]) IValue(std::move(rhs.elementsInline_[ii]));
}
} else {
new (&elementsVector_) std::vector<IValue>(std::move(rhs.elementsVector_));
}
}
TupleElements& operator=(TupleElements&& rhs) noexcept {
if (inlineSize_) {
if (rhs.inlineSize_) {
for (const auto ii : c10::irange(std::min(inlineSize_, rhs.inlineSize_))) {
elementsInline_[ii] = std::move(rhs.elementsInline_[ii]);
}
if (rhs.inlineSize_ > inlineSize_) {
for (const auto ii : c10::irange(inlineSize_, rhs.inlineSize_)) {
new (&elementsInline_[ii]) IValue(std::move(rhs.elementsInline_[ii]));
}
} else {
for (const auto ii : c10::irange(rhs.inlineSize_, inlineSize_)) {
elementsInline_[ii].~IValue();
}
}
} else {
destroyInline();
new (&elementsVector_) std::vector<IValue>(std::move(rhs.elementsVector_));
}
} else {
if (rhs.inlineSize_) {
elementsVector_.~vector();
for (const auto ii : c10::irange(rhs.inlineSize_)) {
new (&elementsInline_[ii]) IValue(std::move(rhs.elementsInline_[ii]));
}
} else {
elementsVector_ = std::move(rhs.elementsVector_);
}
}
inlineSize_ = rhs.inlineSize_;
return *this;
}
[[nodiscard]] c10::ArrayRef<IValue> asArrayRef() const {
if (inlineSize_) {
return c10::ArrayRef<IValue>(elementsInline_, inlineSize_);
} else {
return elementsVector_;
}
}
// Mimic implicit conversion from std::vector to ArrayRef.
operator c10::ArrayRef<IValue>() const {
return asArrayRef();
}
static size_t hash(const TupleElements& v) {
return c10::hash<c10::ArrayRef<IValue>>()(v.asArrayRef());
}
void setContents(std::vector<IValue>&& contents) {
if (inlineSize_) {
destroyInline();
new (&elementsVector_) std::vector<IValue>(std::move(contents));
inlineSize_ = 0;
} else {
elementsVector_ = std::move(contents);
}
}
[[nodiscard]] bool empty() const {
return inlineSize_ ? false : elementsVector_.empty();
}
[[nodiscard]] size_t size() const {
return inlineSize_ ? inlineSize_ : elementsVector_.size();
}
[[nodiscard]] IValue& operator[](size_t idx) {
if (inlineSize_) {
return elementsInline_[idx];
} else {
return elementsVector_[idx];
}
}
[[nodiscard]] const IValue& operator[](size_t idx) const {
if (inlineSize_) {
return elementsInline_[idx];
} else {
return elementsVector_[idx];
}
}
[[nodiscard]] IValue& at(size_t idx) {
if (inlineSize_) {
TORCH_INTERNAL_ASSERT_DEBUG_ONLY(inlineSize_ <= 3);
TORCH_CHECK(idx < inlineSize_, "TupleElements: invalid index Index = ", idx, "; Length = ", inlineSize_);
return elementsInline_[idx];
} else {
return elementsVector_.at(idx);
}
}
[[nodiscard]] const IValue& at(size_t idx) const {
if (inlineSize_) {
TORCH_INTERNAL_ASSERT_DEBUG_ONLY(inlineSize_ <= 3);
TORCH_CHECK(idx < inlineSize_, "TupleElements: invalid index Index = ", idx, "; Length = ", inlineSize_);
return elementsInline_[idx];
} else {
TORCH_CHECK(idx < elementsVector_.size(), "TupleElements: invalid index Index = ", idx, "; Length = ", elementsVector_.size());
return elementsVector_.at(idx);
}
}
[[nodiscard]] iterator begin() {
if (inlineSize_) {
return elementsInline_;
} else {
return elementsVector_.data();
}
}
[[nodiscard]] iterator end() {
if (inlineSize_) {
return elementsInline_ + inlineSize_;
} else {
return elementsVector_.data() + elementsVector_.size();
}
}
[[nodiscard]] const_iterator begin() const {
if (inlineSize_) {
return elementsInline_;
} else {
return elementsVector_.data();
}
}
[[nodiscard]] const_iterator end() const {
if (inlineSize_) {
return elementsInline_ + inlineSize_;
} else {
return elementsVector_.data() + elementsVector_.size();
}
}
[[nodiscard]] const_iterator cbegin() const {
return begin();
}
[[nodiscard]] const_iterator cend() const {
return end();
}
[[nodiscard]] std::vector<IValue> vec() const& {
return asArrayRef().vec();
}
[[nodiscard]] IValue& back() {
return *(end() - 1);
}
[[nodiscard]] const IValue& back() const {
return *(end() - 1);
}
[[nodiscard]] std::vector<IValue> vec() && {
std::vector<IValue> result;
result.reserve(size());
for (auto&& iv : *this) {
result.push_back(std::move(iv));
}
return result;
}
// More compatibility shims for the overwhelming amount of code that
// likes to copy tuple elements into a vector; see comment above the
// copy constructor.
operator std::vector<IValue>() const & {
return vec();
}
operator std::vector<IValue>() && {
return vec();
}
};
template <typename T>
struct TupleTypeFactory {};
template <>
struct TORCH_API TupleTypeFactory<TupleType> {
static TupleTypePtr create(std::vector<TypePtr> types) {
return TupleType::create(std::move(types));
}
static TupleTypePtr fallback(const Type& type);
};
template <>
struct TORCH_API TupleTypeFactory<c10::DynamicType> {
static DynamicTypePtr create(const std::vector<TypePtr>& elemTypes);
static DynamicTypePtr fallback(const Type& /*unused*/);
};
struct TORCH_API Tuple : c10::intrusive_ptr_target {
private:
TupleElements elements_;
mutable c10::TypePtr type_; // lazily computed for unnamed tuples
public:
// named tuples have additional type information, so we
// directly create them tagged
static c10::intrusive_ptr<Tuple> createNamed(
std::vector<IValue> elements_,
c10::TypePtr type_) {
return c10::make_intrusive<Tuple>(std::move(elements_), std::move(type_));
}
static c10::intrusive_ptr<Tuple> createNamed(
TupleElements elements_,
std::shared_ptr<TupleType> type_) {
return c10::make_intrusive<Tuple>(std::move(elements_), std::move(type_));
}
static c10::intrusive_ptr<Tuple> createNamed(
std::initializer_list<IValue> elements_,
std::shared_ptr<TupleType> type_) {
return createNamed(TupleElements(c10::ArrayRef<IValue>(elements_)), std::move(type_));
}
// MSVC apparently can't disambiguate the other two overloads of
// create when passed an initializer_list without this.
static c10::intrusive_ptr<Tuple> create(std::initializer_list<IValue> elements_) {
return create(c10::ArrayRef<IValue>(elements_));
}
static c10::intrusive_ptr<Tuple> create(std::vector<IValue> elements_) {
return c10::make_intrusive<Tuple>(std::move(elements_));
}
static c10::intrusive_ptr<Tuple> create(TupleElements elements_) {
return c10::make_intrusive<Tuple>(std::move(elements_));
}
static c10::intrusive_ptr<Tuple> create(c10::ArrayRef<IValue> elements_) {
return create(TupleElements(elements_));
}
static c10::intrusive_ptr<Tuple> create(IValue e1) {
return c10::make_intrusive<Tuple>(std::move(e1));
}
static c10::intrusive_ptr<Tuple> create(IValue e1, IValue e2) {
return c10::make_intrusive<Tuple>(std::move(e1), std::move(e2));
}
static c10::intrusive_ptr<Tuple> create(IValue e1, IValue e2, IValue e3) {
return c10::make_intrusive<Tuple>(std::move(e1), std::move(e2), std::move(e3));
}
private:
// Workaround inability to use `>` operator in template argument list.
template <typename... Args>
static constexpr bool hasMoreThanThreeArgs() {
return sizeof...(Args) > 3;
}
public:
template <typename... Args>
static c10::intrusive_ptr<Tuple> create(Args&&... elements_) {
switch (sizeof...(Args)) {
case 1:
case 2:
case 3:
return create(IValue(std::forward<Args>(elements_))...);
default:
return create(
std::vector<IValue>{IValue(std::forward<Args>(elements_))...});
}
}
// Again, it would be nice to make this noncopyable, but there's a
// lot of extant code that copies Tuples.
// Tuple(const Tuple& rhs) = delete;
const TupleElements& elements() const& {
return elements_;
}
TupleElements elements() && {
return std::move(elements_);
}
void setElements(std::vector<IValue>&& elements) {
elements_.setContents(std::move(elements));
}
void setElements(TupleElements&& elements) {
elements_ = std::move(elements);
}
void unsafeSetElement(size_t idx, const IValue& element) {
elements_[idx] = element;
}
void unsafeSetElement(size_t idx, IValue&& element) {
elements_[idx] = std::move(element);
}
size_t size() const {
return elements_.size();
}
template <typename T = c10::TupleType>
std::shared_ptr<T> type() const {
if (!type_) {
type_ = TupleTypeFactory<T>::create(fmap(elements(), [&](const IValue& v) {
return v.type<typename T::ElementType>();
}));
}
if (auto t = type_->cast<T>()) {
return t;
}
return TupleTypeFactory<T>::fallback(*type_);
}
static size_t hash(const Tuple& t) {
return c10::get_hash(t.elements());
}
TORCH_API friend bool operator==(
const ivalue::Tuple& lhs,
const ivalue::Tuple& rhs);
private:
// NOTE: If we try to avoid the overloads without
// `std::shared_ptr<TupleType> type` by defaulting it to nullptr, we
// end up having to call (part of) the shared_ptr destructor for
// `type` even though we should know statically it won't do
// anything.
explicit Tuple(std::vector<IValue> elements)
: elements_(std::move(elements)){}
explicit Tuple(std::vector<IValue> elements, c10::TypePtr type)
: elements_(std::move(elements)), type_(std::move(type)) {}
explicit Tuple(TupleElements&& elements)
: elements_(std::move(elements)) {}
explicit Tuple(TupleElements&& elements, std::shared_ptr<TupleType> type)
: elements_(std::move(elements)), type_(std::move(type)) {}
explicit Tuple(IValue&& e1)
: elements_(std::move(e1)) {}
explicit Tuple(IValue&& e1, std::shared_ptr<TupleType> type)
: elements_(std::move(e1)), type_(std::move(type)) {}
explicit Tuple(IValue&& e1, IValue&& e2)
: elements_(std::move(e1), std::move(e2)) {}
explicit Tuple(IValue&& e1, IValue&& e2, std::shared_ptr<TupleType> type)
: elements_(std::move(e1), std::move(e2)), type_(std::move(type)) {}
explicit Tuple(IValue&& e1, IValue&& e2, IValue&& e3)
: elements_(std::move(e1), std::move(e2), std::move(e3)) {}
explicit Tuple(IValue&& e1, IValue&& e2, IValue&& e3, std::shared_ptr<TupleType> type)
: elements_(std::move(e1), std::move(e2), std::move(e3)), type_(std::move(type)) {}
friend class c10::intrusive_ptr<Tuple>;
};
struct Object;
struct PyObjectHolder;
struct EnumHolder;
} // namespace ivalue
// Future
struct C10_EXPORT ivalue::Future final : c10::intrusive_ptr_target {
private:
// Keep this private in order to force users to go through make_intrusive and
// thus prevent creating a Future that's not held by an intrusive_ptr.
explicit Future(TypePtr type, std::vector<c10::Device> devices={})
: type_(std::move(type)),
impl_(getTypeOfDevices(devices)),
devices_(sortAndDeduplicateDevices(impl_, std::move(devices))) {}
friend c10::intrusive_ptr<Future>;
struct FutureCallback {
std::function<void(Future&)> callback;
bool uses_future; // whether the Future& passed in is actually used
template <typename T>
FutureCallback(T callback, bool uses_future)
: callback(std::move(callback)), uses_future(uses_future) {}
};
public:
Future(const Future&) = delete;
Future(Future&&) = delete;
Future& operator=(const Future&) = delete;
Future& operator=(Future&&) = delete;
// Destructor
// Explicitly destroy events under device guard, otherwise it can lead to
// extra context being created on device 0. Reason: python garbage collector
// calls this destructor, but python GC does not have a device context, so a
// "default" one (usually on device 0) could be created when we go down the
// line of event destroy.
~Future() override {
while (!events_.empty()) {
c10::OptionalDeviceGuard deviceGuard(events_.back().device());
events_.pop_back();
}
}
struct TORCH_API FutureError final : public std::exception {
explicit FutureError(std::string&& error_msg_)
: error_msg(std::move(error_msg_)) {}
FutureError() = default;
const char* what() const noexcept override {
return error_msg.c_str();
}
std::string error_msg;
};
/**
* Wait on the future until it completes.
*/
void wait() {
std::unique_lock<std::mutex> lock(mutex_);
finished_cv_.wait(lock, [&]() -> bool { return completed_; });
synchronizeWithCurrentStreams();
}
/**
* Wait on the future until it completes and throw an
* exception if an error exists.
*/
void waitAndThrow() {
wait();
if (eptr_) {
std::rethrow_exception(eptr_);
}
}
/**
* Explicitly mark the future as completed with the output value. Optionally,
* the storages for all tensors in IValue can be passed as well. The DataPtrs
* of these storages are used to synchronize CUDA streams. If storages isn't
* given we will attempt to extract it from the value, if we need to (this
* happens if a non-empty set of devices was given to the constructor). Thus
* one only needs to provide storages when 1) they cannot be extracted through
* IValue::getSubValues() or through pickling in case of Python object; or
* when 2) customized storage extraction is more efficient.
*/
using WeakStorage = c10::weak_intrusive_ptr<c10::StorageImpl>;
void markCompleted(
IValue value,
std::optional<std::vector<WeakStorage>> storages = std::nullopt) {
// Start by performing all steps that can throw, before setting any field.
// Do this before even acquiring the mutex, because extractStorages might
// acquire the GIL, which could lead to a lock inversion with our mutex.
// See https://github.com/pytorch/pytorch/issues/58239.
std::vector<WeakStorage> actualStorages;
std::vector<c10::Device> usedDevices;
try {
// FIXME We should always extract DataPtrs, in order to catch the case of
// users using CUDA values but forgetting to set devices, which currently
// leads to a silent synchronization/correctness issue. However, as this
// might worsen perf in CPU-only cases, we should only do so after careful
// benchmarks.
if (impl_.type() != c10::kCPU) {
actualStorages =
storages.has_value() ? std::move(*storages) : extractStorages(value);
usedDevices = getDevicesOfStorages(impl_, actualStorages);
ensureIsSubsetOfDevices(usedDevices, devices_);
}
} catch (const std::exception&) {
setError(std::current_exception());
return;
}
std::unique_lock<std::mutex> lock(mutex_);
TORCH_CHECK(
!completed(),
"Attempting to mark a completed Future as complete again. Note that "
"a Future can only be marked completed once.");
// Only set value_ and completed_ flag once all checks and preparation steps
// have returned successfully to allow for proper error propagation.
value_ = std::move(value);
completed_ = true;
currentDevice_ = impl_.getDevice();
storages_ = std::move(actualStorages);
for (const c10::Device& device : usedDevices) {
c10::Event event(impl_.type());
event.record(impl_.getStream(device));
events_.push_back(std::move(event));
}
std::vector<FutureCallback> cbs;
cbs.swap(callbacks_);
lock.unlock();
finished_cv_.notify_all();
for (const auto& callback : cbs) {
invokeCallback(callback.callback, callback.uses_future);
}
}
void markCompleted() {
markCompleted(IValue{});
}
void setError(std::exception_ptr eptr) {
std::unique_lock<std::mutex> lock(mutex_);
setErrorInternal(std::move(eptr), lock);
}
void setErrorIfNeeded(std::exception_ptr eptr) {
std::unique_lock<std::mutex> lock(mutex_);
if (completed_) {
// This should be rare and shouldn't cause log spew. Its important to
// log errors and that's why we have this log here.
std::string msg = c10::str(
"Skipping setting following error on the Future since "
"it is already marked completed (this is not necessarily "
"an error):\n",
tryRetrieveErrorMessageInternal(std::move(eptr)));
if (eptr_) {
msg += c10::str(
", \nOriginal exception:\n",
tryRetrieveErrorMessageInternal(eptr_));
}
LOG(INFO) << msg;
return;
} else {
setErrorInternal(std::move(eptr), lock);
}
}
// Get the result of the current future.
IValue value() {
std::unique_lock<std::mutex> lock(mutex_);
AT_ASSERT(completed());
if (eptr_) {
std::rethrow_exception(eptr_);
}
return value_;
}
// This accessor should only be used if we know that the future is
// completed() with no error.
const IValue& constValue() const {
std::unique_lock<std::mutex> lock(mutex_);
AT_ASSERT(completed());
TORCH_INTERNAL_ASSERT(
!eptr_,
"value() accessor should only be used when future is not completed with ",
"an error, but future had the following error: ",
tryRetrieveErrorMessageInternal(eptr_)
);
return value_;
}
// This accessor should only be used if we know that the future is
// completed() with no error.
const std::vector<WeakStorage>& storages() const {
std::unique_lock<std::mutex> lock(mutex_);
AT_ASSERT(completed());
AT_ASSERT(!eptr_);
return storages_;
}
/**
* Add a callback to the future.
* The callbacks will be executed once the future completes.
* If the future has already completed,
* this function will execute the callback immediately.
*/
template <typename T>
void addCallback(T callback, bool uses_future = true) {
static_assert(
std::is_invocable_r_v<void, T, Future&>,
"The callback must have signature void(Future&)");
std::unique_lock<std::mutex> lock(mutex_);
if (completed()) {
lock.unlock();
invokeCallback(callback, uses_future);
return;
}
callbacks_.emplace_back(std::move(callback), uses_future);
}
/**
* Add a callback to the future, and return another Future to hold the return
* value of the callback. This is necessary when the callback provider needs
* to know for sure when the callback has finished.
*/
template <typename T>
c10::intrusive_ptr<Future> then(T callback, TypePtr type) {
using IValueWithStorages = std::tuple<IValue, std::vector<WeakStorage>>;
static_assert(
std::disjunction_v<
std::is_invocable_r<IValue, T, Future&>,
std::is_invocable_r<IValueWithStorages, T, Future&>>,
"The callback must have signature IValue(Future&) or "
"std::tuple<IValue, std::vector<Storage>>(Future&)");
auto childFut = createInstance(::std::move(type));
addCallback([childFut,
cb = std::move(callback)](Future& parentFut) {
try {
if constexpr (::std::is_convertible_v<typename std::invoke_result_t<T &&, Future&>, IValueWithStorages>) {
auto [ivalue, storages] = cb(parentFut);
childFut->markCompleted(::std::move(ivalue), ::std::move(storages));
} else {
childFut->markCompleted(cb(parentFut));
}
} catch (std::exception&) {
childFut->setError(std::current_exception());
}
});
return childFut;
}
template <typename T>
c10::intrusive_ptr<Future> thenAsync(T callback, TypePtr type) {
static_assert(
std::is_invocable_r_v<c10::intrusive_ptr<Future>, T, Future&>,
"The callback must have signature c10::intrusive_ptr<Future>(Future&)");
auto childFut = createInstance(std::move(type));
addCallback(
[childFut, cb = std::move(callback)](Future& parentFut) mutable {
c10::intrusive_ptr<Future> intermediateFut;
try {
intermediateFut = cb(parentFut);
} catch (std::exception&) {
childFut->setError(std::current_exception());
return;
}
intermediateFut->addCallback(
[childFut = std::move(childFut)](Future& intermediateFut) {
if (intermediateFut.hasError()) {
childFut->setError(intermediateFut.exception_ptr());
} else {
childFut->markCompleted(
intermediateFut.value(), intermediateFut.storages());
}
});
});
return childFut;
}
// Tries to retrieve the error message from std::exception_ptr.
std::string tryRetrieveErrorMessage() const {
TORCH_CHECK(hasError(), "No error present on the future.");
std::unique_lock<std::mutex> lock(mutex_);
return tryRetrieveErrorMessageInternal(eptr_);
}
// Check if the current future has completed
bool completed() const {
return completed_;
}
bool hasValue() const {
std::unique_lock<std::mutex> lock(mutex_);
return completed_ && !eptr_;
}
bool hasError() const {
std::unique_lock<std::mutex> lock(mutex_);
return eptr_ ? true : false;
}
std::exception_ptr exception_ptr() const {
std::unique_lock<std::mutex> lock(mutex_);
return eptr_;
}
TORCH_API friend std::ostream& operator<<(
std::ostream& out,
const Future& v);
const TypePtr& elementType() const {
return type_;
}
const std::vector<c10::Device>& devices() const {
return devices_;
}
// This method should be used when one intends to manually create a child
// future, for example when implementing a customized version of then().
c10::intrusive_ptr<Future> createInstance(at::TypePtr type) {
return c10::make_intrusive<Future>(std::move(type), devices_);
}
private:
// This method should always be used when invoking a callback (regardless of
// how/when that happens) as it will ensure that the proper "environment" is
// set up before running the callback, as in, it will set up the CUDA streams,
// synchronize them with the value, and so on (if needed).
template<typename T>
void invokeCallback(T& callback, bool uses_future) {
static_assert(
std::is_invocable_r_v<void, T, Future&>,
"The callback must have signature void(Future&)");
// The synchronization performed below shouldn't be needed when the future
// is not used by the callback.
if (uses_future) {
c10::OptionalDeviceGuard deviceGuard(currentDevice_);
std::vector<c10::Stream> streams;
streams.reserve(devices_.size());
for (const c10::Device& device : devices_) {
streams.push_back(impl_.getStreamFromGlobalPool(device));
}
c10::MultiStreamGuard streamGuard(streams);
synchronizeWithCurrentStreams();
callback(*this);
} else {
callback(*this);
}
}
// This method should be called before this future's value is used, as it
// ensures that the CUDA streams that are "current" at the callsite properly
// synchronize with the value.
void synchronizeWithCurrentStreams() {
for (c10::Event& event : events_) {
event.block(impl_.getStream(event.device()));
}
for (const WeakStorage& weak_storage : storages_) {
c10::intrusive_ptr<c10::StorageImpl> storage = weak_storage.lock();
if (!storage) {
continue;
}
if (!storage->device().is_cpu()) {
impl_.recordDataPtrOnStream(
storage->data_ptr(), impl_.getStream(storage->device()));
}
}
}
void setErrorInternal(
std::exception_ptr eptr,
std::unique_lock<std::mutex>& lock) {
TORCH_CHECK(
!eptr_,
"Error already set on this Future: ",
tryRetrieveErrorMessageInternal(eptr_),
", trying to set error: ",
tryRetrieveErrorMessageInternal(eptr));
TORCH_INTERNAL_ASSERT(!completed(), "Future is already marked completed");
completed_ = true;
eptr_ = std::move(eptr);
std::vector<FutureCallback> cbs;
cbs.swap(callbacks_);
lock.unlock();
finished_cv_.notify_all();
for (const auto& callback : cbs) {
invokeCallback(callback.callback, callback.uses_future);
}
}
// Tries to retrieve the error message from std::exception_ptr.
std::string tryRetrieveErrorMessageInternal(std::exception_ptr eptr) const {
try {
std::rethrow_exception(std::move(eptr));
} catch (const std::exception& e) {
return e.what();
} catch (...) {
return "Unknown Exception Type";
}
}
// Defined in ivalue.cpp.
static std::vector<WeakStorage> extractStorages(
const at::IValue& value);
static std::vector<c10::Device> getDevicesOfStorages(
const c10::impl::VirtualGuardImpl& impl,
const std::vector<WeakStorage>& storages) {
c10::DeviceIndex deviceCount = impl.deviceCount();
std::vector<bool> isDeviceUsed(deviceCount, false);
for (const WeakStorage& weak_storage : storages) {
c10::intrusive_ptr<c10::StorageImpl> storage = weak_storage.lock();
if (!storage) {
continue;
}
c10::Device device = storage->device();
if (!device.is_cpu()) {
TORCH_CHECK_VALUE(
device.type() == impl.type(),
"Expected all data ptrs to be on a device of type ",
impl.type(),
", got one on device ",
device);
isDeviceUsed[device.index()] = true;
}
}
std::vector<c10::Device> devices;
for (c10::DeviceIndex idx = 0; idx < deviceCount; idx++) {
if (isDeviceUsed[idx]) {
devices.emplace_back(impl.type(), idx);
}
}
return devices;
}
static std::string formatSetOfDevices(
const std::vector<c10::Device>& devices) {
if (devices.empty()) {
return "(none)";
}
std::ostringstream oss;
oss << devices[0];
for (const auto idx : c10::irange(1, devices.size())) {
if (idx == devices.size() - 1) {
oss << " and ";
} else {
oss << ", ";
}
oss << devices[idx];
}
return oss.str();
}
static c10::DeviceType getTypeOfDevices(
const std::vector<c10::Device>& devices) {
if (devices.empty()) {
return c10::kCPU;
}
c10::DeviceType deviceType = devices[0].type();
for (const auto idx : c10::irange(1, devices.size())) {
TORCH_CHECK_VALUE(
devices[idx].type() == deviceType,
"Expected all devices to be of the same type, but got a mismatch between ",
devices[0],
" and ",
devices[idx]);
}
return deviceType;
}
// We need devices to be sorted in order to use ensureIsSubsetOfDevices.
static std::vector<c10::Device> sortAndDeduplicateDevices(
const c10::impl::VirtualGuardImpl& /*impl*/,
std::vector<c10::Device> devices) {
std::sort(
devices.begin(), devices.end(),
[](const c10::Device& a, const c10::Device& b) { return a.index() < b.index(); });
// Deduplicate by compacting.
size_t targetIdx = 0;
for (const auto sourceIdx : c10::irange(devices.size())) {
TORCH_CHECK_VALUE(
devices[sourceIdx].has_index(),
"Expected devices to have indices, got ", devices[sourceIdx]);
if (targetIdx > 0 && devices[targetIdx - 1].index() == devices[sourceIdx].index()) {
// It's a duplicate, skip it.
continue;
}
if (sourceIdx != targetIdx) {
devices[targetIdx] = devices[sourceIdx];
}
targetIdx++;
}
// If there were duplicates there's now a gap at the end: trim it. Resizing
// requires the item type to be default-constructible (which c10::Device is
// not) because in principle it could be required to create new items. Since
// we know we'll shrink the vector, we provide a custom dummy value instead.
devices.resize(targetIdx, c10::Device(c10::kCPU));
return devices;
}
static void ensureIsSubsetOfDevices(
const std::vector<c10::Device>& subset,
const std::vector<c10::Device>& superset) {
// We assume the devices in both vectors have the same consistent type, and
// their indices are unique and sorted.
std::vector<c10::Device> excessDevices;
std::set_difference(
subset.begin(),
subset.end(),
superset.begin(),
superset.end(),
std::back_inserter(excessDevices),
[](const c10::Device& a, const c10::Device& b) { return a.index() < b.index(); });
TORCH_CHECK_VALUE(
excessDevices.empty(),
"The result contained tensors residing on device(s) ",
formatSetOfDevices(excessDevices),
" which are not among the expected device(s) ",
formatSetOfDevices(superset));
}
mutable std::mutex mutex_;
std::atomic_bool completed_ = {false}; // is this future complete
std::condition_variable finished_cv_;
IValue value_; // when finished the value
TypePtr type_;
std::vector<FutureCallback> callbacks_;
std::exception_ptr eptr_;
// An upcast pointer to a virtual class which allows us to manipulate events,
// streams, ... in a generic way, without an explicit dependency on CUDA.
// NOLINTNEXTLINE(cppcoreguidelines-avoid-const-or-ref-data-members)
const c10::impl::VirtualGuardImpl impl_;
// The device that was current when markCompleted was called, which we'll
// restore when invoking callbacks. It's optional because we'll only store it
// if the future completes successfully.
std::optional<c10::Device> currentDevice_;
// The events that correspond to the completion of the async I/O kernels. They
// are recorded on the appropriate streams when the future is marked completed
// and can then be queried/waited/blocked on. There is one event for each
// distinct device on which the value's tensors reside.
std::vector<c10::Event> events_;
// A cached version of the storages extracted from the value when the future
// is first marked completed.
std::vector<WeakStorage> storages_;
// The bounding set of devices that this future, and any of its children, is
// allowed to use. This is a superset of the set of devices used by the events
// above. We need this to know what streams (for which devices) to set as
// current when invoking a callback, thus allowing the callback to use devices
// that the parent future didn't use. This field is set to the value provided
// in the constructor and will be "inherited" by all child futures.
// NOLINTNEXTLINE(cppcoreguidelines-avoid-const-or-ref-data-members)
const std::vector<c10::Device> devices_;
};
struct C10_EXPORT ivalue::Await final : c10::intrusive_ptr_target {
private:
explicit Await(TypePtr elType, std::function<IValue()> fn)
: elType_(std::move(elType)), type_(AwaitType::create(elType_)), fn_(std::move(fn)) {}
explicit Await(TypePtr elType) : elType_(std::move(elType)), type_(AwaitType::create(elType_)) { }
friend c10::intrusive_ptr<Await>;
public:
Await(const Await&) = delete;
Await(Await&&) = delete;
Await& operator=(const Await&) = delete;
Await& operator=(Await&&) = delete;
~Await() override = default;
IValue wait() {
if (!completed_) {
TORCH_CHECK(fn_, "Incompleted Await: fn can't be None");
value_ = fn_();
completed_ = true;
args_ = {};
}
return value_;
}
IValue value() {
TORCH_CHECK(completed_, "Await must be completed");
return value_;
}
void setFn(std::function<IValue()> fn) {
fn_ = std::move(fn);
}
bool completed() {
return completed_;
}
void markCompleted(IValue value) {
value_ = std::move(value);
completed_ = true;
}
TORCH_API friend std::ostream& operator<<(
std::ostream& out,
const Await& v);
const TypePtr& elementType() const {
return elType_;
}
const TypePtr& type() const {
return type_;
}
void setArgs(std::vector<IValue> args) {
args_ = std::move(args);
}
std::vector<IValue>& args() {
return args_;
}
private:
TypePtr elType_;
TypePtr type_;
std::vector<IValue> args_;
std::function<IValue()> fn_;
IValue value_;
bool completed_{};
};
// Input is a list of Futures with the same target type.
// Output is a Future to the List of completed Futures.
TORCH_API intrusive_ptr<ivalue::Future> collectAll(
const c10::List<c10::intrusive_ptr<ivalue::Future>>& srcs);
// Input is a List of Futures with the same target type.
// Output is a Future that will be updated with a seen value.
TORCH_API intrusive_ptr<ivalue::Future> collectAny(
const c10::List<c10::intrusive_ptr<ivalue::Future>>& srcs);
// User-defined object.
struct C10_EXPORT ivalue::Object final : c10::intrusive_ptr_target {
public:
// In general, class types hold a shared_ptr to its owning CompilationUnit,
// so that its type and methods do not get deallocated while the class exists.
// However, the CompilationUnit holds ownership of the type's graphs, so
// inserting a constant object into a Graph would create a reference cycle if
// that constant object held a shared_ptr to its CU. For these objects we
// instantiate them with non-owning references to its CU
Object(WeakOrStrongTypePtr type, size_t numSlots) : type_(std::move(type)) {
slots_.resize(numSlots);
}
Object(StrongTypePtr type, size_t numSlots)
: type_(WeakOrStrongTypePtr(std::move(type))) {
slots_.resize(numSlots);
}
static c10::intrusive_ptr<Object> create(
WeakOrStrongTypePtr type,
size_t numSlots) {
return c10::make_intrusive<Object>(std::move(type), numSlots);
}
static c10::intrusive_ptr<Object> create(
StrongTypePtr type,
size_t numSlots) {
return c10::make_intrusive<Object>(std::move(type), numSlots);
}
static c10::intrusive_ptr<Object> create(ClassTypePtr classType, size_t numSlots);
/**
* Slot API.
*
* Attributes are stored as a simple vector so that lookups are fast at
* runtime. A "slot" is just an index into that vector, which can be computed
* statically if you have access to the class type. Use this API if you are
* writing compiler stuff.
*/
void setSlot(size_t slot, IValue v) {
if (slot >= slots_.size()) {
// for module types, it is possible that the members of the class have
// expanded after the object was created. In this case, we expand
// the slots to the right size
resizeObject(slot);
}
slots_[slot] = std::move(v);
}
const IValue& getSlot(size_t slot) const {
TORCH_INTERNAL_ASSERT_DEBUG_ONLY(slot < slots_.size());
// NOTE: This lookup is fairly hot, so we use unchecked access to the
// vector. Errors should still be detectable with ASan.
return slots_[slot];
}
void unsafeRemoveSlot(size_t slot) {
TORCH_CHECK(slot < slots_.size());
slots_.erase(slots_.begin() + static_cast<std::ptrdiff_t>(slot));
}
/**
* Attribute API.
*
* Wrappers around the slot stuff so that users can access attributes
* directly. Use this API if you are a user.
*
* Note: Unlike in Python, TorchScript must make a distinction between
* attributes (which are IValues) and methods (which are Methods). If you
* want a method, use `obj.type()->getMethod()`
*/
IValue getAttr(const std::string& name) const;
void setAttr(const std::string& name, IValue v);
// Remove attribute by name, caller is responsible for
// the safety of this operation
// We didn't remove the attribute in the type because the type
// might be shared by multiple objects.
// Therefore after removing attribute, the object is in an inconsistent
// state where it has more attribute types in its Type than
// the attribute slots it has, user needs to make sure the object
// has consistent by removing the attribute in type as well
void unsafeRemoveAttr(const std::string& name);
std::string name() const;
const std::vector<IValue>& slots() const {
return slots_;
}
std::shared_ptr<ClassType> type() const;
std::shared_ptr<torch::jit::CompilationUnit> compilation_unit() {
if (type_.holds_strong_ref()) {
return type_.cu_.getStrongRefOrThrow();
} else {
auto weak_ptr = type_.cu_.getWeakRefOrThrow();
return std::shared_ptr<torch::jit::CompilationUnit>(weak_ptr);
}
}
c10::intrusive_ptr<Object> copy_to_weak_compilation_ref() const;
void unsafe_make_weak_compilation_ref() {
type_ = WeakOrStrongTypePtr(type_.asWeakTypePtr());
}
c10::intrusive_ptr<Object> copy() const;
c10::intrusive_ptr<Object> deepcopy(
std::optional<at::Device> device = std::nullopt) const;
c10::intrusive_ptr<Object> deepcopy(
IValue::HashIdentityIValueMap& memo,
std::optional<at::Device> device = std::nullopt) const;
bool is_weak_compilation_ref() const {
return !type_.holds_strong_ref();
}
bool is_empty_strong_compilation_ref() const {
return type_.holds_empty_strong_ref();
}
private:
void resizeObject(size_t slot);
WeakOrStrongTypePtr type_;
std::vector<IValue> slots_;
};
// virtual ivalue PyObjectHolder that hold a py::object, we make this virtual
// because the py::object and refcounting logic should happen in libtorch_python
// see concrete implementation in python_ivalue.h
struct ivalue::PyObjectHolder : c10::intrusive_ptr_target {
public:
virtual PyObject* getPyObject() = 0;
virtual c10::InferredType tryToInferType() = 0;
virtual IValue toIValue(const TypePtr& type, std::optional<int32_t> N = std::nullopt) = 0;
virtual std::string toStr() = 0;
virtual std::vector<at::Tensor> extractTensors() = 0;
~PyObjectHolder() override = default;
};
struct ivalue::EnumHolder : c10::intrusive_ptr_target {
public:
EnumHolder(std::shared_ptr<EnumType> type, std::string name, IValue value)
: type_(std::move(type)),
name_(std::move(name)),
value_(std::move(value)) {}
bool is(const ivalue::EnumHolder& rhs) {
return *this == rhs;
}
friend bool operator==(
const ivalue::EnumHolder& lhs,
const ivalue::EnumHolder& rhs);
TORCH_API friend std::ostream& operator<<(
std::ostream& out,
const ivalue::EnumHolder& v);
TORCH_API const std::string& qualifiedClassName() const;
const std::string& unqualifiedClassName() const;
const std::string& name() const {
return name_;
}
const IValue& value() const {
return value_;
}
std::shared_ptr<EnumType> type() const {
return type_;
}
private:
std::shared_ptr<EnumType> type_;
std::string name_;
IValue value_;
};
#undef TORCH_FORALL_TAGS
namespace detail {
struct _guarded_unsigned_long_unique_dummy final {
_guarded_unsigned_long_unique_dummy(int64_t /*unused*/){}
};
using _guarded_unsigned_long = std::conditional_t<
std::is_same_v<unsigned long, uint32_t> ||
std::is_same_v<unsigned long, uint64_t>,
_guarded_unsigned_long_unique_dummy,
unsigned long>;
} // namespace detail
inline ivalue::Object& IValue::toObjectRef() const {
AT_ASSERT(isObject(), "Expected Object but got ", tagKind());
TORCH_INTERNAL_ASSERT_DEBUG_ONLY(payload.u.as_intrusive_ptr != c10::UndefinedTensorImpl::singleton(), "Attempted to create null reference");
return *static_cast<c10::ivalue::Object*>(payload.u.as_intrusive_ptr);
}
// note: when adding a DEFINE_TO case here you should also add a
// toX method to IValue. These named methods are much more discoverable
// than the to templated function.
#define DEFINE_TO(T, method_name) \
template <> \
inline T IValue::to<T>()&& { \
return static_cast<T>(std::move(*this).method_name()); \
} \
template <> \
inline c10::detail::ivalue_to_const_ref_overload_return<T>::type IValue::to<T>() const& { \
typedef c10::detail::ivalue_to_const_ref_overload_return<T>::type return_type; \
return static_cast<return_type>(this->method_name()); \
}
DEFINE_TO(at::Tensor, toTensor)
DEFINE_TO(at::Storage, toStorage)
DEFINE_TO(c10::Stream, toStream)
DEFINE_TO(float, toDouble)
DEFINE_TO(double, toDouble)
DEFINE_TO(c10::complex<double>, toComplexDouble)
DEFINE_TO(unsigned char, toInt)
DEFINE_TO(signed char, toInt)
DEFINE_TO(unsigned short, toInt)
DEFINE_TO(short, toInt)
DEFINE_TO(int, toInt)
DEFINE_TO(uint32_t, toInt)
DEFINE_TO(uint64_t, toInt)
DEFINE_TO(detail::_guarded_unsigned_long, toInt)
DEFINE_TO(int64_t, toInt)
DEFINE_TO(bool, toBool)
DEFINE_TO(c10::intrusive_ptr<caffe2::Blob>, toBlob)
DEFINE_TO(c10::intrusive_ptr<ivalue::ConstantString>, toString)
DEFINE_TO(c10::intrusive_ptr<ivalue::Object>, toObject)
DEFINE_TO(at::Scalar, toScalar)
DEFINE_TO(c10::List<int64_t>, toIntList)
DEFINE_TO(c10::List<c10::SymInt>, toSymIntList)
DEFINE_TO(c10::List<double>, toDoubleList)
DEFINE_TO(c10::List<c10::complex<double>>, toComplexDoubleList)
DEFINE_TO(c10::List<bool>, toBoolList)
DEFINE_TO(c10::List<at::Tensor>, toTensorList)
DEFINE_TO(c10::impl::GenericList, toList)
DEFINE_TO(c10::impl::GenericDict, toGenericDict)
DEFINE_TO(c10::intrusive_ptr<ivalue::Tuple>, toTuple)
DEFINE_TO(std::string, toStringRef)
DEFINE_TO(std::string_view, toStringView)
DEFINE_TO(c10::intrusive_ptr<ivalue::Future>, toFuture)
DEFINE_TO(c10::intrusive_ptr<ivalue::Await>, toAwait)
DEFINE_TO(c10::intrusive_ptr<c10::RRefInterface>, toRRef)
DEFINE_TO(c10::intrusive_ptr<at::Quantizer>, toQuantizer)
DEFINE_TO(IValue, toIValue)
DEFINE_TO(c10::Device, toDevice)
DEFINE_TO(at::ScalarType, toScalarType)
DEFINE_TO(at::Layout, toLayout)
DEFINE_TO(at::MemoryFormat, toMemoryFormat)
DEFINE_TO(at::QScheme, toQScheme)
DEFINE_TO(at::Dimname, toDimname)
DEFINE_TO(at::Generator, toGenerator)
DEFINE_TO(c10::SymInt, toSymInt)
DEFINE_TO(c10::SymFloat, toSymFloat)
DEFINE_TO(c10::SymBool, toSymBool)
template <class T>
struct _fake_type {};
// generic_to<T> converts an IValue from a generic list or generic dict
// to a concrete list/dict type likelike List<T>, Dict<...> or std::optional<T>.
// Note that in the case of lists, this only works for IValue-based lists,
// i.e. not for int64_t, double, ...
// generic_to<T> is an implementation detail of IValue::to<T> and not
// supposed to be called directly.
// The _fake_type<T> parameter allows us to overload
// based on the return type.
template <class Elem>
// TODO this is deprecated but we don't throw a warning because a lot of ops in
// native_functions.yaml still return std::vector.
// C10_DEPRECATED_MESSAGE("IValues based on std::vector<T> are potentially slow
// and deprecated. Please use torch::List<T> instead.")
std::vector<Elem> generic_to(IValue ivalue, _fake_type<std::vector<Elem>> /*unused*/) {
// We need to do a deep copy of the vector because there might be other
// references to this same IValue that also use the list. We can't just
// move the elements out.
auto list = std::move(ivalue).template to<List<Elem>>();
std::vector<Elem> result;
result.reserve(list.size());
for (Elem v : list) {
result.push_back(std::move(v));
}
return result;
}
template <typename T>
c10::intrusive_ptr<T> IValue::toCustomClass() && {
static_assert(
std::is_base_of_v<torch::CustomClassHolder, T> == true,
"toCustomClass requires that template parameter T must inherit "
"from torch::CustomClassHolder");
auto obj = toObject();
TORCH_CHECK(
obj->slots().size() == 1,
"Tried to cast IValue to custom class but it did "
"not contain a custom class!");
const auto* expected_type = c10::getCustomClassType<c10::intrusive_ptr<T>>().get();
ivalue::checkCustomClassType(expected_type, type().get());
auto userObj =
c10::static_intrusive_pointer_cast<T>(obj->getSlot(0).toCapsule());
return userObj;
}
template <typename T>
c10::intrusive_ptr<T> IValue::toCustomClass() const& {
static_assert(
std::is_base_of_v<torch::CustomClassHolder, T> == true,
"toCustomClass requires that template parameter T must inherit "
"from torch::CustomClassHolder");
auto obj = toObject();
TORCH_CHECK(
obj->slots().size() == 1,
"Tried to cast IValue to custom class but it did "
"not contain a custom class!");
const auto* expected_type = c10::getCustomClassType<c10::intrusive_ptr<T>>().get();
ivalue::checkCustomClassType(expected_type, type().get());
auto userObj =
c10::static_intrusive_pointer_cast<T>(obj->getSlot(0).toCapsule());
return userObj;
}
template <typename T>
T generic_to(IValue ivalue, _fake_type<T> /*unused*/) {
using ElemType = typename std::remove_pointer_t<T>::element_type;
return std::move(ivalue).template toCustomClass<ElemType>();
}
template <typename T>
tagged_capsule<T> generic_to(IValue ivalue, _fake_type<tagged_capsule<T>> /*unused*/) {
return tagged_capsule<T>{std::move(ivalue)};
}
template <typename Elem>
c10::List<Elem> generic_to(IValue ivalue, _fake_type<c10::List<Elem>> /*unused*/) {
return impl::toTypedList<Elem>(std::move(ivalue).toList());
}
template <typename T>
static T createVectorLikeFromList(const c10::detail::ListImpl* impl) {
T result;
result.reserve(impl->list.size());
for (const auto & i : impl->list) {
result.push_back(i.to<typename T::value_type>());
}
return result;
}
template <typename T>
static std::vector<T> createVectorFromList(const c10::detail::ListImpl* impl) {
return createVectorLikeFromList<std::vector<T>>(impl);
}
template <typename T>
std::vector<T> createVectorFromList(const c10::List<T>& impl) {
std::vector<T> result;
result.reserve(impl.size());
for (size_t i = 0, N = impl.size(); i < N; ++i) {
result.push_back(impl[i]);
}
return result;
}
template <typename T>
OptionalArray<T> generic_to(IValue ivalue, _fake_type<OptionalArray<T>> /*unused*/) {
if (ivalue.isNone()) {
return {};
}
return createVectorFromList<T>(
std::move(ivalue).template to<c10::List<T>>()
);
}
namespace detail {
template <typename Elem, size_t... I>
std::array<Elem, sizeof...(I)> generic_to_array(
IValue ivalue,
_fake_type<std::array<Elem, sizeof...(I)>> /*unused*/,
std::index_sequence<I...> /*unused*/) {
// We need to do a deep copy of the array because there might be other
// references to this same IValue that also use the list. We can't just
// move the elements out.
auto list = std::move(ivalue).template to<List<Elem>>();
TORCH_CHECK(
list.size() == sizeof...(I),
"Tried to convert a List with ",
list.size(),
" elements to a fixed-size array of size ",
sizeof...(I));
return {list[I]...};
}
} // namespace detail
template <typename Elem, size_t N>
std::array<Elem, N> generic_to(
IValue ivalue,
_fake_type<std::array<Elem, N>> ft) {
return detail::generic_to_array(ivalue, ft, std::make_index_sequence<N>());
}
template <typename Key, typename Value>
c10::Dict<Key, Value> generic_to(
IValue ivalue,
_fake_type<c10::Dict<Key, Value>> /*unused*/) {
return impl::toTypedDict<Key, Value>(std::move(ivalue).toGenericDict());
}
template <typename K, typename V>
C10_DEPRECATED_MESSAGE(
"IValues based on std::unordered_map are slow and deprecated. Please use c10::Dict<K, V> instead.")
std::unordered_map<K, V> generic_to(
IValue ivalue,
_fake_type<std::unordered_map<K, V>> /*unused*/) {
std::unordered_map<K, V> specialized_dict;
for (const auto& item : std::move(ivalue).toGenericDict()) {
specialized_dict[item.key().template to<K>()] = item.value().template to<V>();
}
return specialized_dict;
}
template <typename T>
std::optional<T> generic_to(IValue ivalue, _fake_type<std::optional<T>> /*unused*/) {
if (ivalue.isNone()) {
return std::nullopt;
}
return std::move(ivalue).template to<T>();
}
namespace detail {
template <typename Tuple, std::size_t... INDEX>
Tuple generic_to_tuple_impl(
const ivalue::TupleElements& t,
std::index_sequence<INDEX...> /*unused*/) {
return std::make_tuple(
t[INDEX].to<std::tuple_element_t<INDEX, Tuple>>()...);
}
} // namespace detail
template <
typename... Args,
typename Indices = std::make_index_sequence<sizeof...(Args)>,
std::enable_if_t<
!std::disjunction_v<
std::is_lvalue_reference<Args>...,
std::negation<std::is_constructible<IValue, Args>>...>,
std::nullptr_t> = nullptr>
std::tuple<Args...> generic_to(const IValue& ivalue, _fake_type<std::tuple<Args...>> /*unused*/) {
const auto& vals = ivalue.toTupleRef().elements();
TORCH_CHECK(vals.size() == sizeof...(Args));
return detail::generic_to_tuple_impl<std::tuple<Args...>>(vals, Indices{});
}
template <typename T>
inline T IValue::to() && {
return generic_to(std::move(*this), _fake_type<T>{});
}
template <>
inline std::optional<std::string_view> IValue::to() && {
// In the default implementation, the IValue is destroyed with std::move.
// But if the unboxed type is std::optional<string_view> we cannot destroy
// the IValue.
return generic_to(*this, _fake_type<std::optional<std::string_view>>{});
}
template <typename T>
inline typename c10::detail::ivalue_to_const_ref_overload_return<T>::type IValue::to() const& {
return generic_to(*this, _fake_type<T>{});
}
inline c10::List<int64_t> IValue::toIntList() && {
AT_ASSERT(isIntList(), "Expected IntList but got ", tagKind());
return c10::List<int64_t>(moveToIntrusivePtr<c10::detail::ListImpl>());
}
inline c10::List<int64_t> IValue::toIntList() const& {
AT_ASSERT(isIntList(), "Expected IntList but got ", tagKind());
return c10::List<int64_t>(toIntrusivePtr<c10::detail::ListImpl>());
}
inline std::vector<int64_t> IValue::toIntVector() const {
AT_ASSERT(isIntList(), "Expected IntList but got ", tagKind());
TORCH_INTERNAL_ASSERT_DEBUG_ONLY(
payload.u.as_intrusive_ptr != c10::UndefinedTensorImpl::singleton(),
"called toIntVector on null intrusive_ptr IValue");
return createVectorFromList<int64_t>(
static_cast<const c10::detail::ListImpl*>(payload.u.as_intrusive_ptr));
}
inline c10::List<c10::SymInt> IValue::toSymIntList() && {
AT_ASSERT(
isSymIntList() || isIntList(),
"Expected SymIntList or IntList but got ",
tagKind());
return c10::List<c10::SymInt>(moveToIntrusivePtr<c10::detail::ListImpl>());
}
inline c10::List<c10::SymInt> IValue::toSymIntList() const& {
AT_ASSERT(
isSymIntList() || isIntList(),
"Expected SymIntList or IntList but got ",
tagKind());
return c10::List<c10::SymInt>(toIntrusivePtr<c10::detail::ListImpl>());
}
inline std::vector<c10::SymInt> IValue::toSymIntVector() const {
AT_ASSERT(isSymIntList() || isIntList(), "Expected SymIntList or IntList but got ", tagKind());
TORCH_INTERNAL_ASSERT_DEBUG_ONLY(
payload.u.as_intrusive_ptr != c10::UndefinedTensorImpl::singleton(),
"called toSymIntVector on null intrusive_ptr IValue");
return createVectorFromList<c10::SymInt>(
static_cast<const c10::detail::ListImpl*>(payload.u.as_intrusive_ptr));
}
inline at::DimVector IValue::toDimVector() const {
AT_ASSERT(isIntList(), "Expected IntList but got ", tagKind());
TORCH_INTERNAL_ASSERT_DEBUG_ONLY(
payload.u.as_intrusive_ptr != c10::UndefinedTensorImpl::singleton(),
"called toDimVector on null intrusive_ptr IValue");
return createVectorLikeFromList<at::DimVector>(
static_cast<const c10::detail::ListImpl*>(payload.u.as_intrusive_ptr));
}
inline c10::List<double> IValue::toDoubleList() && {
AT_ASSERT(isDoubleList(), "Expected DoubleList but got ", tagKind());
return c10::List<double>(moveToIntrusivePtr<c10::detail::ListImpl>());
}
inline c10::List<double> IValue::toDoubleList() const& {
AT_ASSERT(isDoubleList(), "Expected DoubleList but got ", tagKind());
return c10::List<double>(toIntrusivePtr<c10::detail::ListImpl>());
}
inline std::vector<double> IValue::toDoubleVector() const {
AT_ASSERT(isDoubleList(), "Expected DoubleList but got ", tagKind());
TORCH_INTERNAL_ASSERT_DEBUG_ONLY(
payload.u.as_intrusive_ptr != c10::UndefinedTensorImpl::singleton(),
"called toDoubleVector on null intrusive_ptr IValue");
return createVectorFromList<double>(
static_cast<const c10::detail::ListImpl*>(payload.u.as_intrusive_ptr));
}
inline c10::List<c10::complex<double>> IValue::toComplexDoubleList() && {
AT_ASSERT(isComplexDoubleList(), "Expected ComplexDoubleList but got ", tagKind());
return c10::List<c10::complex<double>>(moveToIntrusivePtr<c10::detail::ListImpl>());
}
inline c10::List<c10::complex<double>> IValue::toComplexDoubleList() const& {
AT_ASSERT(isComplexDoubleList(), "Expected ComplexDoubleList but got ", tagKind());
return c10::List<c10::complex<double>>(toIntrusivePtr<c10::detail::ListImpl>());
}
inline std::vector<c10::complex<double>> IValue::toComplexDoubleVector() const {
AT_ASSERT(isComplexDoubleList(), "Expected ComplexDoubleList but got ", tagKind());
TORCH_INTERNAL_ASSERT_DEBUG_ONLY(
payload.u.as_intrusive_ptr != c10::UndefinedTensorImpl::singleton(),
"called toComplexDoubleVector on null intrusive_ptr IValue");
return createVectorFromList<c10::complex<double>>(
static_cast<const c10::detail::ListImpl*>(payload.u.as_intrusive_ptr));
}
inline c10::List<bool> IValue::toBoolList() && {
AT_ASSERT(isBoolList(), "Expected BoolList but got ", tagKind());
return c10::List<bool>(moveToIntrusivePtr<c10::detail::ListImpl>());
}
inline c10::List<bool> IValue::toBoolList() const& {
AT_ASSERT(isBoolList(), "Expected BoolList but got ", tagKind());
return c10::List<bool>(toIntrusivePtr<c10::detail::ListImpl>());
}
inline c10::List<at::Tensor> IValue::toTensorList() && {
AT_ASSERT(isTensorList(), "Expected TensorList but got ", tagKind());
return c10::List<at::Tensor>(moveToIntrusivePtr<c10::detail::ListImpl>());
}
inline c10::List<at::Tensor> IValue::toTensorList() const& {
AT_ASSERT(isTensorList(), "Expected TensorList but got ", tagKind());
return c10::List<at::Tensor>(toIntrusivePtr<c10::detail::ListImpl>());
}
inline std::vector<at::Tensor> IValue::toTensorVector() const {
AT_ASSERT(isTensorList(), "Expected TensorList but got ", tagKind());
TORCH_INTERNAL_ASSERT_DEBUG_ONLY(
payload.u.as_intrusive_ptr != c10::UndefinedTensorImpl::singleton(),
"called toTensorVector on null intrusive_ptr IValue");
return createVectorFromList<at::Tensor>(
static_cast<const c10::detail::ListImpl*>(payload.u.as_intrusive_ptr));
}
inline c10::List<std::optional<at::Tensor>> IValue::toOptionalTensorList() && {
AT_ASSERT(isOptionalTensorList(), "Expected OptionalTensorList but got ", tagKind());
return c10::List<std::optional<at::Tensor>>(moveToIntrusivePtr<c10::detail::ListImpl>());
}
inline c10::List<std::optional<at::Tensor>> IValue::toOptionalTensorList() const& {
AT_ASSERT(isOptionalTensorList(), "Expected OptionalTensorList but got ", tagKind());
return c10::List<std::optional<at::Tensor>>(toIntrusivePtr<c10::detail::ListImpl>());
}
inline std::vector<std::optional<at::Tensor>> IValue::toOptionalTensorVector() const {
AT_ASSERT(isOptionalTensorList(), "Expected OptionalTensorList but got ", tagKind());
TORCH_INTERNAL_ASSERT_DEBUG_ONLY(
payload.u.as_intrusive_ptr != c10::UndefinedTensorImpl::singleton(),
"called toOptionalTensorVector on null intrusive_ptr IValue");
return createVectorFromList<std::optional<at::Tensor>>(
static_cast<const c10::detail::ListImpl*>(payload.u.as_intrusive_ptr));
}
inline c10::List<IValue> IValue::toList() && {
AT_ASSERT(isList(), "Expected GenericList but got ", tagKind());
return c10::List<IValue>(moveToIntrusivePtr<c10::detail::ListImpl>());
}
inline c10::List<IValue> IValue::toList() const& {
AT_ASSERT(isList(), "Expected GenericList but got ", tagKind());
return c10::List<IValue>(toIntrusivePtr<c10::detail::ListImpl>());
}
inline c10::ArrayRef<IValue> IValue::toListRef() const {
AT_ASSERT(isList(), "Expected GenericList but got ", tagKind());
TORCH_INTERNAL_ASSERT_DEBUG_ONLY(
payload.u.as_intrusive_ptr != c10::UndefinedTensorImpl::singleton(),
"called toListRef on null intrusive_ptr IValue");
return static_cast<const c10::detail::ListImpl*>(payload.u.as_intrusive_ptr)
->list;
}
inline c10::Dict<IValue, IValue> IValue::toGenericDict() && {
AT_ASSERT(isGenericDict(), "Expected GenericDict but got ", tagKind());
return c10::Dict<IValue, IValue>(moveToIntrusivePtr<c10::detail::DictImpl>());
}
inline c10::Dict<IValue, IValue> IValue::toGenericDict() const& {
AT_ASSERT(isGenericDict(), "Expected GenericDict but got ", tagKind());
return c10::Dict<IValue, IValue>(toIntrusivePtr<c10::detail::DictImpl>());
}
inline c10::intrusive_ptr<ivalue::Tuple> IValue::toTuple() && {
AT_ASSERT(isTuple(), "Expected Tuple but got ", tagKind());
return moveToIntrusivePtr<ivalue::Tuple>();
}
inline c10::intrusive_ptr<ivalue::Tuple> IValue::toTuple() const& {
AT_ASSERT(isTuple(), "Expected Tuple but got ", tagKind());
return toIntrusivePtr<ivalue::Tuple>();
}
inline ivalue::Tuple& IValue::toTupleRef() const {
AT_ASSERT(isTuple(), "Expected Tuple but got ", tagKind());
TORCH_INTERNAL_ASSERT_DEBUG_ONLY(
payload.u.as_intrusive_ptr != c10::UndefinedTensorImpl::singleton(),
"called toTupleRef on null intrusive_ptr IValue");
return *static_cast<c10::ivalue::Tuple*>(
payload.u.as_intrusive_ptr);
}
inline IValue::IValue(c10::intrusive_ptr<ivalue::Tuple> v)
: tag(Tag::Tuple) {
payload.u.as_intrusive_ptr = null_to_undefined_tensor(v.release());
}
template <
typename... Args,
std::enable_if_t<
!std::disjunction_v<
std::is_lvalue_reference<Args>...,
std::negation<std::is_constructible<IValue, Args>>...>,
std::nullptr_t>>
inline IValue::IValue(const std::tuple<Args...>& t)
: IValue(std::apply(c10::ivalue::Tuple::create<const Args&...>, t)) {
}
template <
typename... Args,
std::enable_if_t<
!std::disjunction_v<
std::is_lvalue_reference<Args>...,
std::negation<std::is_constructible<IValue, Args>>...>,
std::nullptr_t>>
inline IValue::IValue(std::tuple<Args...>&& t)
: IValue(std::apply(c10::ivalue::Tuple::create<Args&&...>, std::move(t))) {
}
inline IValue::IValue(c10::intrusive_ptr<ivalue::ConstantString> v)
: tag(Tag::String) {
payload.u.as_intrusive_ptr = null_to_undefined_tensor(v.release());
}
inline IValue::IValue(std::string v)
: IValue(ivalue::ConstantString::create(std::move(v))) {}
inline IValue::IValue(c10::impl::GenericList v)
: tag(Tag::GenericList) {
payload.u.as_intrusive_ptr = null_to_undefined_tensor(v.impl_.release());
}
template <class T, IValue::enable_if_list_is_ivalue_constructible<T>>
inline IValue::IValue(c10::List<T>&& v) : IValue(impl::toList<T>(std::move(v))) {}
template <class T, IValue::enable_if_list_is_ivalue_constructible<T>>
inline IValue::IValue(const c10::List<T>& v) : IValue(impl::toList<T>(v)) {}
template <class T, IValue::enable_if_list_is_ivalue_constructible<T>>
inline IValue::IValue(at::ArrayRef<T> v) : IValue(c10::List<T>()) {
auto list = to<c10::List<T>>();
list.reserve(v.size());
for (const auto& e : v) {
list.push_back(e);
}
}
template <class T, IValue::enable_if_symint<T>>
inline IValue::IValue(at::ArrayRef<T> v) : IValue() {
auto vi = c10::asIntArrayRefSlowOpt(v);
if (vi.has_value()) {
// This list is entirely integers; ensure it is typed as
// an IntList so toIntList works
*this = IValue(*vi);
} else {
// This list has SymInts; type it as a SymInt
*this = IValue(impl::toList<c10::SymInt>(c10::List<c10::SymInt>()));
auto list = to<c10::List<c10::SymInt>>();
list.reserve(v.size());
for (const auto& e : v) {
list.push_back(e);
}
}
}
template <class T, IValue::enable_if_symint<T>>
inline IValue::IValue(at::OptionalArrayRef<T> mb_v) : IValue() {
if (!mb_v.has_value()) return;
*this = IValue(*mb_v);
}
template <class T, IValue::enable_if_symint<T>>
inline IValue::IValue(const std::vector<T>& v) : IValue() {
*this = IValue(at::ArrayRef<T>(v));
}
template <class T, IValue::enable_if_symint<T>>
inline IValue::IValue(std::vector<T>&& v) : IValue() {
auto vi = c10::asIntArrayRefSlowOpt(v);
if (vi.has_value()) {
// This list is entirely integers; ensure it is typed as
// an IntList so toIntList works
*this = IValue(*vi);
} else {
// This list has SymInts; type it as a SymInt
*this = IValue(impl::toList<c10::SymInt>(c10::List<c10::SymInt>()));
auto list = to<c10::List<c10::SymInt>>();
list.reserve(v.size());
for (auto&& e : std::move(v)) {
list.push_back(std::move(e));
}
}
}
template <class T, IValue::enable_if_list_is_ivalue_constructible<T>>
inline IValue::IValue(const std::vector<T>& v) : IValue(c10::List<T>()) {
auto list = to<c10::List<T>>();
list.reserve(v.size());
for (const auto& e : v) {
list.push_back(e);
}
}
template <class T, IValue::enable_if_list_is_ivalue_constructible<T>>
inline IValue::IValue(std::vector<T>&& v) : IValue(c10::List<T>()) {
auto list = to<c10::List<T>>();
list.reserve(v.size());
if constexpr (std::is_same_v<T, bool>) {
for (auto e : v) {
list.push_back(e);
}
} else {
for (auto&& e : std::move(v)) {
list.push_back(std::move(e));
}
}
}
template <class T, IValue::enable_if_list_is_ivalue_constructible<T>>
inline IValue::IValue(c10::OptionalArrayRef<T> v) : IValue() {
if (v.has_value()) {
*this = IValue(std::move(*v));
}
}
template <class T, size_t N>
inline IValue::IValue(std::array<T, N> v) : IValue(c10::List<T>()) {
auto list = to<c10::List<T>>();
list.reserve(v.size());
for (auto& e : v) {
list.push_back(std::move(e));
}
}
template <class T, IValue::enable_if_ilist_is_ivalue_constructible<T>>
inline IValue::IValue(c10::IListRef<T> v) : IValue() {
constexpr bool boxed_type_constructs_ivalue =
std::is_constructible_v<IValue, typename c10::IListRef<T>::boxed_type>;
// First, we try to use the boxed value.
// If we fail (either it's not in the boxed state, or its boxed type
// can not construct an IValue), we fallback to copying the list.
if (boxed_type_constructs_ivalue && v.isBoxed()) {
*this = IValue(impl::toList(v.toBoxed()));
} else {
c10::List<T> list;
list.reserve(v.size());
for (const auto& t : v) {
list.push_back(t);
}
*this = IValue(impl::toList(std::move(list)));
}
}
inline IValue::IValue(c10::impl::GenericDict v)
: tag(Tag::GenericDict) {
payload.u.as_intrusive_ptr = null_to_undefined_tensor(v.impl_.release());
}
template <class Key, class Value>
inline IValue::IValue(c10::Dict<Key, Value> v)
: IValue(impl::toGenericDict(std::move(v))) {}
template <class Key, class Value>
inline IValue::IValue(std::unordered_map<Key, Value> v)
: IValue(Dict<Key, Value>()) {
auto dict = to<c10::Dict<Key, Value>>();
dict.reserve(v.size());
for (auto& e : v) {
dict.insert(std::move(e.first), std::move(e.second));
}
}
template <class T, IValue::enable_if_ivalue_constructible<T>>
inline IValue::IValue(std::optional<T> v) : IValue() {
if (v.has_value()) {
*this = IValue(std::move(*v));
}
}
inline IValue::IValue(std::nullopt_t /*unused*/) : IValue() {}
inline IValue::IValue(c10::intrusive_ptr<ivalue::Object> v)
: tag(Tag::Object) {
payload.u.as_intrusive_ptr = null_to_undefined_tensor(v.release());
}
inline IValue::IValue(c10::intrusive_ptr<ivalue::PyObjectHolder> v)
: tag(Tag::PyObject) {
payload.u.as_intrusive_ptr = null_to_undefined_tensor(v.release());
}
inline IValue::IValue(c10::intrusive_ptr<ivalue::EnumHolder> v)
: tag(Tag::Enum) {
payload.u.as_intrusive_ptr = null_to_undefined_tensor(v.release());
}
inline IValue IValue::make_capsule(
intrusive_ptr<torch::CustomClassHolder> blob) {
IValue iv;
iv.tag = Tag::Capsule;
iv.payload.u.as_intrusive_ptr = null_to_undefined_tensor(blob.release());
return iv;
}
template <
typename T,
std::enable_if_t<std::is_base_of_v<torch::CustomClassHolder, T>, int>>
IValue::IValue(c10::intrusive_ptr<T> custom_class) : tag(Tag::Object) {
auto classType = []() {
try {
return c10::getCustomClassType<c10::intrusive_ptr<T>>();
} catch (const c10::Error&) {
throw c10::Error(
"Trying to instantiate a class that isn't a registered custom class: " +
std::string(c10::util::get_fully_qualified_type_name<T>()));
}
}();
auto ivalue_obj = c10::ivalue::Object::create(std::move(classType), /* numSlots */1);
ivalue_obj->setSlot(0, IValue::make_capsule(std::move(custom_class)));
payload.u.as_intrusive_ptr = null_to_undefined_tensor(ivalue_obj.release());
}
inline IValue::IValue(c10::intrusive_ptr<ivalue::Future> v)
: tag(Tag::Future) {
payload.u.as_intrusive_ptr = null_to_undefined_tensor(v.release());
}
inline IValue::IValue(c10::intrusive_ptr<ivalue::Await> v)
: tag(Tag::Await) {
payload.u.as_intrusive_ptr = null_to_undefined_tensor(v.release());
}
inline IValue::IValue(c10::intrusive_ptr<c10::RRefInterface> v)
: tag(Tag::RRef) {
payload.u.as_intrusive_ptr = null_to_undefined_tensor(v.release());
}
inline IValue::IValue(c10::intrusive_ptr<at::Quantizer> v)
: tag(Tag::Quantizer) {
payload.u.as_intrusive_ptr = null_to_undefined_tensor(v.release());
}
template <typename T>
inline IValue::IValue(c10::complex<T> c)
: tag(Tag::ComplexDouble) {
auto v = c10::make_intrusive<ivalue::ComplexHolder>(c);
payload.u.as_intrusive_ptr = v.release();
}
inline const std::string& IValue::toStringRef() const {
AT_ASSERT(isString(), "Expected String but got ", tagKind());
TORCH_INTERNAL_ASSERT_DEBUG_ONLY(
payload.u.as_intrusive_ptr != c10::UndefinedTensorImpl::singleton(),
"called toStringRef on null intrusive_ptr IValue");
return static_cast<const c10::ivalue::ConstantString*>(
payload.u.as_intrusive_ptr)
->string();
}
inline std::optional<std::reference_wrapper<const std::string>> IValue::
toOptionalStringRef() const {
if (isNone()) {
return std::nullopt;
}
AT_ASSERT(isString(), "Expected std::optional<string> but got ", tagKind());
TORCH_INTERNAL_ASSERT_DEBUG_ONLY(
payload.u.as_intrusive_ptr != c10::UndefinedTensorImpl::singleton(),
"called toOptionalStringRef on null intrusive_ptr IValue");
return std::reference_wrapper<const std::string>(
static_cast<const c10::ivalue::ConstantString*>(payload.u.as_intrusive_ptr)
->string());
}
inline std::string_view IValue::toStringView() const {
AT_ASSERT(isString(), "Expected String but got ", tagKind());
TORCH_INTERNAL_ASSERT_DEBUG_ONLY(
payload.u.as_intrusive_ptr != c10::UndefinedTensorImpl::singleton(),
"called toStringView on null intrusive_ptr IValue");
return static_cast<const c10::ivalue::ConstantString*>(
payload.u.as_intrusive_ptr)
->string_view();
}
inline PyObject* IValue::toPyObject() const {
return toPyObjectHolder()->getPyObject();
}
template <typename T>
inline std::optional<T> IValue::toOptional() {
if (this->isNone()) {
return std::nullopt;
}
return this->to<T>();
}
template <typename T>
inline std::optional<T> IValue::toOptional() const {
if (this->isNone()) {
return std::nullopt;
}
return this->to<T>();
}
inline bool IValue::isCustomClass() const {
return torch::isCustomClass(*this);
}
inline bool IValue::isSameIdentity(const IValue& rhs) const {
// We choose to not use memcmp for payload check due to potential random
// padding characters on union type
// Semantics:
// 1. Immutable primitive values of the same type (Int, Double, None, Bool,
// Str) return value equality
// 2. If it is a tensor type, we need to take undefined tensor into account
// 3. Undefined_tensor is None and vice versa should be true
// 4. If it is a reference type (i.e. isIntrusivePtr()), then is True when
// the pointed-to object is the same.
// 5. False for all other comparisons.
if (this->isNone() && rhs.isNone()) {
return true;
} else if (this->isBool() && rhs.isBool()) {
// for bool type, do equality check
return this->toBool() == rhs.toBool();
} else if (this->isTensor() && rhs.isTensor()) {
return this->payload.as_tensor.is_same(rhs.payload.as_tensor);
} else if (this->isTensor() && rhs.isNone()) {
// special case: undefined tensor and None are the same identity
return !this->payload.as_tensor.defined();
} else if (this->isNone() && rhs.isTensor()) {
// special case: undefined tensor and None are the same identity
return !rhs.payload.as_tensor.defined();
} else if (this->isInt() && rhs.isInt()) {
return this->toInt() == rhs.toInt();
} else if (this->isDouble() && rhs.isDouble()) {
return this->toDouble() == rhs.toDouble();
} else if (this->isString() && rhs.isString()) {
return this->toStringRef() == rhs.toStringRef();
} else {
// for objects holding in IValue, do shallow compare on pointer address to
// testify the identity
return this->isIntrusivePtr() && rhs.isIntrusivePtr() &&
this->payload.u.as_intrusive_ptr == rhs.payload.u.as_intrusive_ptr;
}
}
namespace ivalue {
namespace detail {
template <typename T>
IValue from_(T&& x, std::true_type /*unused*/) {
return IValue(std::forward<T>(x));
}
template <typename T>
IValue from_(c10::intrusive_ptr<T> x, std::false_type /*unused*/) {
return IValue(std::move(x));
}
template <typename T>
IValue from_(T&& /*x*/, std::false_type /*unused*/) {
static_assert(
guts::false_t<T>::value,
"You are calling from with a type that it doesn't support, and isn't a potential custom class (ie: is an intrusive_ptr)");
return IValue();
}
} // namespace detail
template <typename T>
IValue from(T&& x) {
return detail::from_(
std::forward<T>(x), typename std::is_constructible<IValue, T>::type{});
}
} // namespace ivalue
template <>
struct MaybeOwnedTraits<IValue> {
using owned_type = IValue;
using borrow_type = IValue;
static borrow_type createBorrow(const owned_type& from) {
if (!from.isPtrType()) {
return from;
}
if (from.isTensor()) {
return IValue(MaybeOwnedTraits<at::Tensor>::createBorrow(from.toTensor()));
} else {
return IValue(from.payload, from.tag);
}
}
static void assignBorrow(borrow_type& lhs, const borrow_type& rhs) {
lhs.clearToNone();
if (!rhs.isPtrType()) {
lhs = rhs;
} else if (rhs.isTensor()) {
lhs = IValue(MaybeOwnedTraits<at::Tensor>::createBorrow(rhs.toTensor()));
} else {
lhs = IValue(rhs.payload, rhs.tag);
}
}
static void destroyBorrow(borrow_type& toDestroy) {
toDestroy.clearToNone();
}
static const owned_type& referenceFromBorrow(const borrow_type& borrow) {
return borrow;
}
static const owned_type* pointerFromBorrow(const borrow_type& borrow) {
return &borrow;
}
static bool debugBorrowIsValid(const borrow_type& /*unused*/) {
return true;
}
};
template <>
struct IValue::TagType<c10::Type> {
static TORCH_API c10::TypePtr get(const IValue& /*v*/);
};
template <>
struct IValue::TagType<c10::DynamicType> {
static TORCH_API c10::TypePtr get(const IValue& /*v*/);
};
template <typename T>
TypePtr IValue::type() const {
return IValue::TagType<T>::get(*this);
}
} // namespace c10
C10_DIAGNOSTIC_POP() | c | github | https://github.com/pytorch/pytorch | aten/src/ATen/core/ivalue_inl.h |
############################ Copyrights and license ############################
# #
# Copyright 2012 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2012 Zearin <zearin@gonk.net> #
# Copyright 2013 AKFish <akfish@gmail.com> #
# Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2014 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2016 Jannis Gebauer <ja.geb@me.com> #
# Copyright 2016 Peter Buckley <dx-pbuckley@users.noreply.github.com> #
# Copyright 2018 Wan Liuyang <tsfdye@gmail.com> #
# Copyright 2018 sfdye <tsfdye@gmail.com> #
# #
# This file is part of PyGithub. #
# http://pygithub.readthedocs.io/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
################################################################################
import github.GithubObject
class GitAuthor(github.GithubObject.NonCompletableGithubObject):
"""
This class represents GitAuthors
"""
def __repr__(self):
return self.get__repr__({"name": self._name.value})
@property
def date(self):
"""
:type: datetime.datetime
"""
return self._date.value
@property
def email(self):
"""
:type: string
"""
return self._email.value
@property
def name(self):
"""
:type: string
"""
return self._name.value
def _initAttributes(self):
self._date = github.GithubObject.NotSet
self._email = github.GithubObject.NotSet
self._name = github.GithubObject.NotSet
def _useAttributes(self, attributes):
if "date" in attributes: # pragma no branch
self._date = self._makeDatetimeAttribute(attributes["date"])
if "email" in attributes: # pragma no branch
self._email = self._makeStringAttribute(attributes["email"])
if "name" in attributes: # pragma no branch
self._name = self._makeStringAttribute(attributes["name"]) | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python2.6
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2010,2011,2013 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for testing the generated profiles."""
import os
import unittest
from lxml import etree
import gzip
if __name__ == "__main__":
import utils
utils.import_depends()
from brokertest import TestBrokerCommand
class TestProfile(TestBrokerCommand):
def load_profile(self, name):
path = os.path.join(self.config.get("broker", "profilesdir"),
name + self.profile_suffix)
self.failUnless(os.path.exists(path))
if self.gzip_profiles:
path = gzip.open(path)
tree = etree.parse(path)
return tree
def testunittest00sysloc(self):
tree = self.load_profile("unittest00.one-nyp.ms.com")
sysloc = tree.xpath("nlist[@name='hardware']/nlist[@name='sysloc']")
self.assertEqual(len(sysloc), 1, "Number of sysloc elements was %d "
"instead of 1" % len(sysloc))
sysloc = sysloc[0]
campus = sysloc.xpath("string[@name='campus']")
self.failUnless(campus, "No campus in sysloc")
campus = campus[0]
self.assertEqual(campus.text, "ny", "Campus value was '%s' instead of ny"
% campus.text)
domains = sysloc.xpath("list[@name='dns_search_domains']/string")
self.failUnless(domains, "No DNS search domains set")
searchlist = [e.text for e in domains]
# DNS maps:
# - aqd-unittest.ms.com comes from rack ut3
# - utroom1 also has aqd-unittest.ms.com mapped _after_ td1 and td2,
# but the rack mapping is more specific, so aqd-unittest.ms.com
# remains at the beginning
# - new-york.ms.com comes from the campus
expect = ['aqd-unittest.ms.com', 'td1.aqd-unittest.ms.com',
'td2.aqd-unittest.ms.com', 'new-york.ms.com']
self.assertEqual(searchlist, expect,
"dns_search_domains in sysloc was %s instead of %s" %
(repr(searchlist), repr(expect)))
def testaquilon61sysloc(self):
tree = self.load_profile("aquilon61.aqd-unittest.ms.com")
sysloc = tree.xpath("nlist[@name='hardware']/nlist[@name='sysloc']")
self.assertEqual(len(sysloc), 1, "Number of sysloc elements was %d "
"instead of 1" % len(sysloc))
sysloc = sysloc[0]
domains = sysloc.xpath("list[@name='dns_search_domains']/string")
self.failUnless(domains, "No DNS search domains set")
searchlist = [e.text for e in domains]
# Not in utroom1, so no (td[12].)?aqd-unittest.ms.com
expect = ['new-york.ms.com']
self.assertEqual(searchlist, expect,
"dns_search_domains in sysloc was %s instead of %s" %
(repr(searchlist), repr(expect)))
def testresolver(self):
tree = self.load_profile("unittest00.one-nyp.ms.com")
rs = tree.xpath("nlist[@name='software']/nlist[@name='components']/nlist[@name='resolver']")
self.assertEqual(len(rs), 1, "Number of resolver elements was %d "
"instead of 1" % len(rs))
rs = rs[0]
searchlist = [e.text for e in rs.xpath("list[@name='search']/string")]
# DNS maps:
# - aqd-unittest.ms.com comes from rack ut3
# - utroom1 also has aqd-unittest.ms.com mapped _after_ td1 and td2,
# but the rack mapping is more specific, so aqd-unittest.ms.com
# remains at the beginning
# - new-york.ms.com comes from the campus
# - ms.com comes from DEFAULT_DOMAIN in aquilon/archetype/base.tpl
expect = ['aqd-unittest.ms.com', 'td1.aqd-unittest.ms.com',
'td2.aqd-unittest.ms.com', 'new-york.ms.com', 'ms.com']
self.assertEqual(searchlist, expect,
"search list in resolver was %s instead of %s" %
(repr(searchlist), repr(expect)))
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestProfile)
unittest.TextTestRunner(verbosity=2).run(suite) | unknown | codeparrot/codeparrot-clean | ||
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from collections import defaultdict, namedtuple
from pants.backend.jvm.targets.import_jars_mixin import ImportJarsMixin
class JarImportProducts(object):
"""Represents the products of jar import resolutions.
Jar imports are jars containing source code to be unpacked and used locally.
"""
JarImport = namedtuple('JarImport', ['coordinate', 'jar'])
"""Represents a jar containing source imports.
Each jar import has a `coordinate` :class:`pants.backend.jvm.jar_dependency_utls.M2Coordinate`
and a `jar` path that points to the resolved jar import for the `coordinate`.
"""
def __init__(self):
self._imports = defaultdict(list)
def imported(self, target, coordinate, jar):
"""Registers a :class`JarImportProducts.JarImport` for the given target.
:param target: The :class:`pants.backend.jvm.targets.import_jars_mixin.ImportJarsMixin` target
whose `imported_jar_library_specs` were resolved.
:param coordinate: The maven coordinate of the import jar.
:type coordinate: :class:`pants.backend.jvm.jar_dependency_utls.M2Coordinate`
:param string jar: The path of the resolved import jar.
"""
if not isinstance(target, ImportJarsMixin):
raise ValueError('The given target is not an `ImportJarsMixin`: {}'.format(target))
self._imports[target].append(self.JarImport(coordinate, jar))
def imports(self, target):
"""Returns a list of :class:`JarImportProducts.JarImport`s for the given target.
Will be an empty list if the the target has no jar imports.
:rtype: list
"""
return self._imports[target]
def __repr__(self):
return 'JarImportProducts({!r})'.format(self._imports) | unknown | codeparrot/codeparrot-clean | ||
#! /usr/bin/env python
# coding=UTF-8
import re
import sys
import urllib2
class Downloader:
def __init__(self):
""" do nothing
"""
@staticmethod
def _file_size(file_name):
file_size = os.path.getsize(file_name)
if file_size < 1024:
return str(file_size) + 'bytes'
if file_size < 1024 * 1024:
return str(file_size / 1024) + "KB"
if file_size < 1024 * 1024 * 1024:
return str(file_size / 1024 / 1024) + "MB"
if file_size < 1024 * 1024 * 1024 * 1024:
return str(file_size / 1024 / 1024 / 1024) + "GB"
@staticmethod
def _name_from_url(url):
index = url.rfind('/')
name = url[index + 1:]
return name
def download(self, url):
if url is not None:
print 'trying to download ' + url
output = None
try:
result = urllib2.urlopen(url)
filename = self._name_from_url(url)
output = open("./" + filename, "wb")
while True:
data = result.read(1024 * 1024)
if len(data) != 0:
output.write(data)
print self._file_size(filename)
else:
break
print self._file_size(filename)
except Exception as exception:
print exception
finally:
if output is not None:
output.close()
return 0
def main(argv=sys.argv):
print argv
if len(argv) == 1:
print 'Usage: dl.py <url>'
dl = Downloader()
dl.download(argv[1])
sys.exit(main()) | unknown | codeparrot/codeparrot-clean | ||
set -o errexit
cd src
git clone --branch=v0.3.0-jepsen-mongodb-master --depth=1 https://x-access-token:${github_token}@github.com/10gen/jepsen.git jepsen-mongodb
cd jepsen-mongodb
lein install | unknown | github | https://github.com/mongodb/mongo | evergreen/do_jepsen_setup/install_jepsen.sh |
"""
=============
Masked Arrays
=============
Arrays sometimes contain invalid or missing data. When doing operations
on such arrays, we wish to suppress invalid values, which is the purpose masked
arrays fulfill (an example of typical use is given below).
For example, examine the following array:
>>> x = np.array([2, 1, 3, np.nan, 5, 2, 3, np.nan])
When we try to calculate the mean of the data, the result is undetermined:
>>> np.mean(x)
nan
The mean is calculated using roughly ``np.sum(x)/len(x)``, but since
any number added to ``NaN`` [1]_ produces ``NaN``, this doesn't work. Enter
masked arrays:
>>> m = np.ma.masked_array(x, np.isnan(x))
>>> m
masked_array(data = [2.0 1.0 3.0 -- 5.0 2.0 3.0 --],
mask = [False False False True False False False True],
fill_value=1e+20)
Here, we construct a masked array that suppress all ``NaN`` values. We
may now proceed to calculate the mean of the other values:
>>> np.mean(m)
2.6666666666666665
.. [1] Not-a-Number, a floating point value that is the result of an
invalid operation.
.. moduleauthor:: Pierre Gerard-Marchant
.. moduleauthor:: Jarrod Millman
"""
from . import core
from .core import *
from . import extras
from .extras import *
__all__ = ['core', 'extras']
__all__ += core.__all__
__all__ += extras.__all__
from numpy._pytesttester import PytestTester
test = PytestTester(__name__)
del PytestTester | unknown | codeparrot/codeparrot-clean | ||
/*
* Copyright 2014-2025 JetBrains s.r.o and contributors. Use of this source code is governed by the Apache 2.0 license.
*/
package io.ktor.server.plugins.di
import io.ktor.server.config.*
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Deferred
import kotlin.reflect.KProperty
/**
* Functional interface for generating a fresh `DependencyResolver`.
*
* [Report a problem](https://ktor.io/feedback/?fqname=io.ktor.server.plugins.di.DependencyResolution)
*/
public fun interface DependencyResolution {
/**
* Resolves and creates a new instance of `DependencyResolver` using the provided `DependencyProvider`
* and `DependencyReflection`.
*
* [Report a problem](https://ktor.io/feedback/?fqname=io.ktor.server.plugins.di.DependencyResolution.resolve)
*
* @param provider The `DependencyProvider` instance responsible for managing dependency initializers
* and declarations.
* @param external A `DependencyMap` of externally provided dependencies available during resolution.
* @param reflection The `DependencyReflection` instance used for reflective creation of dependency
* instances.
* @return A new instance of `DependencyResolver` configured with the provided arguments
*/
public fun CoroutineScope.resolve(
provider: DependencyProvider,
external: DependencyMap,
reflection: DependencyReflection,
): DependencyResolver
}
/**
* A map of object instances.
*
* [Report a problem](https://ktor.io/feedback/?fqname=io.ktor.server.plugins.di.DependencyMap)
*/
public interface DependencyMap {
public companion object {
/**
* A predefined, immutable, and empty implementation of the `DependencyMap` interface.
*
* This object does not contain any dependencies and will always return `false` when checked
* for a dependency's presence using the `contains` method. Attempting to retrieve a dependency
* using the `get` method of this object will always throw a `MissingDependencyException`.
*
* Use this object as a default or placeholder implementation.
*
* [Report a problem](https://ktor.io/feedback/?fqname=io.ktor.server.plugins.di.DependencyMap.Companion.EMPTY)
*/
public val EMPTY: DependencyMap = object : DependencyMap {
override fun contains(key: DependencyKey): Boolean = false
override fun getInitializer(key: DependencyKey): DependencyInitializer =
throw MissingDependencyException(key)
}
public fun fromMap(map: Map<DependencyKey, Any>): DependencyMap =
fromLookup(map::get)
@Suppress("UNCHECKED_CAST")
public fun fromLookup(resolve: (DependencyKey) -> Any?): DependencyMap = object : DependencyMap {
override fun contains(key: DependencyKey): Boolean = resolve(key) != null
override fun getInitializer(key: DependencyKey): DependencyInitializer =
DependencyInitializer.Value(key, resolve(key))
}
}
/**
* Checks if the given dependency key is present in the dependency map.
*
* [Report a problem](https://ktor.io/feedback/?fqname=io.ktor.server.plugins.di.DependencyMap.contains)
*
* @param key The unique key that identifies the dependency to check.
* @return `true` if the dependency identified by the key is present in the map; otherwise `false`
*/
public operator fun contains(key: DependencyKey): Boolean
public fun getInitializer(key: DependencyKey): DependencyInitializer
}
/**
* Get an item from the dependency map synchronously.
*
* Unavailable on WASM / JS targets.
*
* [Report a problem](https://ktor.io/feedback/?fqname=io.ktor.server.plugins.di.getBlocking)
*
* @param key the unique key that identifies the dependency to retrieve
* @return the instance of the dependency associated with the given key
* @throws MissingDependencyException if no dependency is associated with the given key
*/
public expect fun <T> DependencyResolver.getBlocking(key: DependencyKey): T
/**
* A mutable extension of [DependencyMap] that allows for adding and retrieving dependencies.
*
* [Report a problem](https://ktor.io/feedback/?fqname=io.ktor.server.plugins.di.MutableDependencyMap)
*/
public interface MutableDependencyMap : DependencyMap {
/**
* Retrieves the value associated with the specified key if it exists. If the key does not already have an associated
* value, the result of invoking the [defaultValue] function will be stored and returned as the value for the given key.
*
* [Report a problem](https://ktor.io/feedback/?fqname=io.ktor.server.plugins.di.MutableDependencyMap.getOrPut)
*
* @param key the dependency key used to look up or store the value.
* @param defaultValue a lambda function that provides a default value to store and return if the key is not found.
* @return the value associated with the key, either retrieved from the existing association or newly computed and stored.
*/
public suspend fun <T> getOrPut(key: DependencyKey, defaultValue: suspend () -> T): T
}
/**
* Extends [DependencyMap] with reflection, allowing for the automatic injection of types.
*
* [Report a problem](https://ktor.io/feedback/?fqname=io.ktor.server.plugins.di.DependencyResolver)
*/
public interface DependencyResolver : MutableDependencyMap, CoroutineScope {
public val reflection: DependencyReflection
/**
* Decorates the dependency resolver with a qualified name for the expected type.
*
* Useful with delegation when used like: `val connection by dependencies.named("postgres")`
*
* [Report a problem](https://ktor.io/feedback/?fqname=io.ktor.server.plugins.di.DependencyResolver.named)
*/
public fun named(key: String): DependencyResolverContext =
DependencyResolverContext(this, key)
@Suppress("UNCHECKED_CAST")
public fun <T> getDeferred(key: DependencyKey): Deferred<T> =
getInitializer(key).resolve(this) as Deferred<T>
/**
* Retrieves an instance of the dependency associated with the given key from the dependency map.
*
* [Report a problem](https://ktor.io/feedback/?fqname=io.ktor.server.plugins.di.DependencyResolver.get)
*
* @param key the unique key that identifies the dependency to retrieve
* @return the instance of the dependency associated with the given key
* @throws MissingDependencyException if no dependency is associated with the given key
*/
public suspend fun <T> get(key: DependencyKey): T =
getDeferred<T>(key).await()
}
@Suppress("UNCHECKED_CAST")
public class MapDependencyResolver(
private val map: DependencyInitializerMap,
private val extension: DependencyMap,
override val reflection: DependencyReflection,
private var waitForValues: Boolean = false,
private val coroutineScope: CoroutineScope,
) : DependencyResolver, CoroutineScope by coroutineScope {
/**
* Updates the waitForValues flag so that future consumers will fail immediately when no initializer is found.
*
* This is called during application startup when all modules have either suspended or completed.
*
* [Report a problem](https://ktor.io/feedback/?fqname=io.ktor.server.plugins.di.MapDependencyResolver.stopWaiting)
*/
public fun stopWaiting() {
waitForValues = false
// Cancel all pending deferred values
map.values.filterIsInstance<DependencyInitializer.Missing>().forEach { placeholder ->
placeholder.throwMissing()
}
}
override fun contains(key: DependencyKey): Boolean =
map.isProvided(key) || extension.contains(key)
override fun getInitializer(key: DependencyKey): DependencyInitializer =
map[key]
?: tryExternal(key)
?: tryNullable(key)
?: onMissing(key)
override suspend fun <T> getOrPut(key: DependencyKey, defaultValue: suspend () -> T): T {
val deferred = map.getOrPut(key) {
DependencyInitializer.Explicit(key) {
defaultValue()
}
}.resolve(this)
return deferred.await() as T
}
private fun tryExternal(key: DependencyKey): DependencyInitializer? =
if (extension.contains(key)) {
extension.getInitializer(key)
} else {
null
}
private fun tryNullable(key: DependencyKey): DependencyInitializer? =
if (key.isNullable()) {
DependencyInitializer.Null(key)
} else {
null
}
private fun onMissing(key: DependencyKey): DependencyInitializer =
if (waitForValues) {
map.getOrPut(key) { DependencyInitializer.Missing(key, this) }
} else {
throw MissingDependencyException(key)
}
}
/**
* Combines two `DependencyMap`s into one.
*
* Where keys are common, precedence is given to the right-hand argument.
*
* [Report a problem](https://ktor.io/feedback/?fqname=io.ktor.server.plugins.di.plus)
*
* @param right The DependencyMap to merge with.
* @return A new DependencyMap instance that contains the keys of both.
*/
public operator fun DependencyMap.plus(right: DependencyMap): DependencyMap =
MergedDependencyMap(this, right)
/**
* Get the dependency from the map for the key represented by the type (and optionally, with the given name).
*
* [Report a problem](https://ktor.io/feedback/?fqname=io.ktor.server.plugins.di.resolve)
*/
public suspend inline fun <reified T> DependencyResolver.resolve(key: String? = null): T =
get(DependencyKey<T>(key))
internal class MergedDependencyMap(
private val left: DependencyMap,
private val right: DependencyMap,
) : DependencyMap {
override fun contains(key: DependencyKey): Boolean =
right.contains(key) || left.contains(key)
override fun getInitializer(key: DependencyKey): DependencyInitializer {
return if (right.contains(key)) {
right.getInitializer(key)
} else {
left.getInitializer(key)
}
}
}
/**
* Qualifier for specifying when a dependency key maps to a property in the file configuration.
*
* [Report a problem](https://ktor.io/feedback/?fqname=io.ktor.server.plugins.di.PropertyQualifier)
*/
public data object PropertyQualifier
/**
* Implementation of [DependencyMap] for referencing items from the server's file configuration.
*
* [Report a problem](https://ktor.io/feedback/?fqname=io.ktor.server.plugins.di.ConfigurationDependencyMap)
*/
@Suppress("UNCHECKED_CAST")
public class ConfigurationDependencyMap(
private val config: ApplicationConfig,
) : DependencyMap {
override fun contains(key: DependencyKey): Boolean =
key.qualifier == PropertyQualifier && key.name != null && config.propertyOrNull(key.name) != null
override fun getInitializer(key: DependencyKey): DependencyInitializer =
DependencyInitializer.Value(key, getPropertyValue(key))
private fun getPropertyValue(key: DependencyKey): Any? =
if (key.qualifier != PropertyQualifier || key.name == null) {
throw MissingDependencyException(key)
} else {
config.propertyOrNull(key.name)?.getAs(key.type)
?: throw MissingDependencyException(key)
}
}
/**
* Context for property delegation with chaining (i.e., `dependencies.named("foo")`)
*
* [Report a problem](https://ktor.io/feedback/?fqname=io.ktor.server.plugins.di.DependencyResolverContext)
*/
public data class DependencyResolverContext(
val resolver: DependencyResolver,
val name: String,
) {
/**
* Property delegation for [DependencyResolverContext] for use with the `named` shorthand for string qualifiers.
*
* [Report a problem](https://ktor.io/feedback/?fqname=io.ktor.server.plugins.di.DependencyResolverContext.getValue)
*/
public inline operator fun <reified T> getValue(thisRef: Any?, property: KProperty<*>): T =
resolver.getBlocking(DependencyKey<T>(name))
/**
* Get the dependency from the map for the key represented by the type (and optionally, with the given name).
*
* [Report a problem](https://ktor.io/feedback/?fqname=io.ktor.server.plugins.di.DependencyResolverContext.resolve)
*/
public suspend inline fun <reified T> DependencyResolver.resolve(key: String? = null): T =
get(DependencyKey<T>(key))
} | kotlin | github | https://github.com/ktorio/ktor | ktor-server/ktor-server-plugins/ktor-server-di/common/src/io/ktor/server/plugins/di/DependencyResolution.kt |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Kenneth D. Evensen <kevensen@redhat.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
module: pamd
author:
- "Kenneth D. Evensen (@kevensen)"
short_description: Manage PAM Modules
description:
- Edit PAM service's type, control, module path and module arguments.
In order for a PAM rule to be modified, the type, control and
module_path must match an existing rule. See man(5) pam.d for details.
version_added: "2.3"
options:
name:
required: true
description:
- The name generally refers to the PAM service file to
change, for example system-auth.
type:
required: true
description:
- The type of the PAM rule being modified. The type, control
and module_path all must match a rule to be modified.
control:
required: true
description:
- The control of the PAM rule being modified. This may be a
complicated control with brackets. If this is the case, be
sure to put "[bracketed controls]" in quotes. The type,
control and module_path all must match a rule to be modified.
module_path:
required: true
description:
- The module path of the PAM rule being modified. The type,
control and module_path all must match a rule to be modified.
new_type:
description:
- The new type to assign to the new rule.
new_control:
description:
- The new control to assign to the new rule.
new_module_path:
description:
- The new module path to be assigned to the new rule.
module_arguments:
description:
- When state is 'updated', the module_arguments will replace existing
module_arguments. When state is 'args_absent' args matching those
listed in module_arguments will be removed. When state is
'args_present' any args listed in module_arguments are added if
missing from the existing rule. Furthermore, if the module argument
takes a value denoted by '=', the value will be changed to that specified
in module_arguments. Note that module_arguments is a list. Please see
the examples for usage.
state:
default: updated
choices:
- updated
- before
- after
- args_present
- args_absent
- absent
description:
- The default of 'updated' will modify an existing rule if type,
control and module_path all match an existing rule. With 'before',
the new rule will be inserted before a rule matching type, control
and module_path. Similarly, with 'after', the new rule will be inserted
after an existing rule matching type, control and module_path. With
either 'before' or 'after' new_type, new_control, and new_module_path
must all be specified. If state is 'args_absent' or 'args_present',
new_type, new_control, and new_module_path will be ignored. State
'absent' will remove the rule. The 'absent' state was added in version
2.4 and is only available in Ansible versions >= 2.4.
path:
default: /etc/pam.d/
description:
- This is the path to the PAM service files
"""
EXAMPLES = """
- name: Update pamd rule's control in /etc/pam.d/system-auth
pamd:
name: system-auth
type: auth
control: required
module_path: pam_faillock.so
new_control: sufficient
- name: Update pamd rule's complex control in /etc/pam.d/system-auth
pamd:
name: system-auth
type: session
control: '[success=1 default=ignore]'
module_path: pam_succeed_if.so
new_control: '[success=2 default=ignore]'
- name: Insert a new rule before an existing rule
pamd:
name: system-auth
type: auth
control: required
module_path: pam_faillock.so
new_type: auth
new_control: sufficient
new_module_path: pam_faillock.so
state: before
- name: Insert a new rule pam_wheel.so with argument 'use_uid' after an \
existing rule pam_rootok.so
pamd:
name: su
type: auth
control: sufficient
module_path: pam_rootok.so
new_type: auth
new_control: required
new_module_path: pam_wheel.so
module_arguments: 'use_uid'
state: after
- name: Remove module arguments from an existing rule
pamd:
name: system-auth
type: auth
control: required
module_path: pam_faillock.so
module_arguments: ''
state: updated
- name: Replace all module arguments in an existing rule
pamd:
name: system-auth
type: auth
control: required
module_path: pam_faillock.so
module_arguments: 'preauth
silent
deny=3
unlock_time=604800
fail_interval=900'
state: updated
- name: Remove specific arguments from a rule
pamd:
name: system-auth
type: session control='[success=1 default=ignore]'
module_path: pam_succeed_if.so
module_arguments: crond,quiet
state: args_absent
- name: Ensure specific arguments are present in a rule
pamd:
name: system-auth
type: session
control: '[success=1 default=ignore]'
module_path: pam_succeed_if.so
module_arguments: crond,quiet
state: args_present
- name: Ensure specific arguments are present in a rule (alternative)
pamd:
name: system-auth
type: session
control: '[success=1 default=ignore]'
module_path: pam_succeed_if.so
module_arguments:
- crond
- quiet
state: args_present
- name: Module arguments requiring commas must be listed as a Yaml list
pamd:
name: special-module
type: account
control: required
module_path: pam_access.so
module_arguments:
- listsep=,
state: args_present
- name: Update specific argument value in a rule
pamd:
name: system-auth
type: auth
control: required
module_path: pam_faillock.so
module_arguments: 'fail_interval=300'
state: args_present
- name: Add pam common-auth rule for duo
pamd:
name: common-auth
new_type: auth
new_control: '[success=1 default=ignore]'
new_module_path: '/lib64/security/pam_duo.so'
state: after
type: auth
module_path: pam_sss.so
control: 'requisite'
"""
RETURN = '''
change_count:
description: How many rules were changed
type: int
sample: 1
returned: success
version_added: 2.4
new_rule:
description: The changes to the rule
type: string
sample: None None None sha512 shadow try_first_pass use_authtok
returned: success
version_added: 2.4
updated_rule_(n):
description: The rule(s) that was/were changed
type: string
sample:
- password sufficient pam_unix.so sha512 shadow try_first_pass
use_authtok
returned: success
version_added: 2.4
action:
description:
- "That action that was taken and is one of: update_rule,
insert_before_rule, insert_after_rule, args_present, args_absent,
absent."
returned: always
type: string
sample: "update_rule"
version_added: 2.4
dest:
description:
- "Path to pam.d service that was changed. This is only available in
Ansible version 2.3 and was removed in 2.4."
returned: success
type: string
sample: "/etc/pam.d/system-auth"
...
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
import os
import re
import time
# The PamdRule class encapsulates a rule in a pam.d service
class PamdRule(object):
def __init__(self, rule_type,
rule_control, rule_module_path,
rule_module_args=None):
self.rule_type = rule_type
self.rule_control = rule_control
self.rule_module_path = rule_module_path
try:
if (rule_module_args is not None and
type(rule_module_args) is list):
self.rule_module_args = rule_module_args
elif (rule_module_args is not None and
type(rule_module_args) is str):
self.rule_module_args = rule_module_args.split()
except AttributeError:
self.rule_module_args = []
@classmethod
def rulefromstring(cls, stringline):
pattern = None
rule_type = ''
rule_control = ''
rule_module_path = ''
rule_module_args = ''
complicated = False
if '[' in stringline:
pattern = re.compile(
r"""([\-A-Za-z0-9_]+)\s* # Rule Type
\[([A-Za-z0-9_=\s]+)\]\s* # Rule Control
([A-Za-z0-9/_\-\.]+)\s* # Rule Path
([A-Za-z0-9,_=<>\-\s\./]*)""", # Rule Args
re.X)
complicated = True
else:
pattern = re.compile(
r"""([@\-A-Za-z0-9_]+)\s* # Rule Type
([A-Za-z0-9_\-]+)\s* # Rule Control
([A-Za-z0-9/_\-\.]*)\s* # Rule Path
([A-Za-z0-9,_=<>\-\s\./]*)""", # Rule Args
re.X)
result = pattern.match(stringline)
rule_type = result.group(1)
if complicated:
rule_control = '[' + result.group(2) + ']'
else:
rule_control = result.group(2)
rule_module_path = result.group(3)
if result.group(4) is not None:
rule_module_args = result.group(4)
return cls(rule_type, rule_control, rule_module_path, rule_module_args)
def get_module_args_as_string(self):
try:
if self.rule_module_args is not None:
return ' '.join(self.rule_module_args)
except AttributeError:
pass
return ''
def __str__(self):
return "%-10s\t%s\t%s %s" % (self.rule_type,
self.rule_control,
self.rule_module_path,
self.get_module_args_as_string())
# PamdService encapsulates an entire service and contains one or more rules
class PamdService(object):
def __init__(self, ansible=None):
if ansible is not None:
self.check = ansible.check_mode
self.check = False
self.ansible = ansible
self.preamble = []
self.rules = []
self.fname = None
if ansible is not None:
self.path = self.ansible.params["path"]
self.name = self.ansible.params["name"]
def load_rules_from_file(self):
self.fname = os.path.join(self.path, self.name)
stringline = ''
try:
for line in open(self.fname, 'r'):
stringline += line.rstrip().lstrip()
stringline += '\n'
self.load_rules_from_string(stringline.replace("\\\n", ""))
except IOError as e:
self.ansible.fail_json(msg='Unable to open/read PAM module \
file %s with error %s. And line %s' %
(self.fname, to_native(e), stringline))
def load_rules_from_string(self, stringvalue):
for line in stringvalue.splitlines():
stringline = line.rstrip()
if line.startswith('#') and not line.isspace():
self.preamble.append(line.rstrip())
elif (not line.startswith('#') and
not line.isspace() and
len(line) != 0):
try:
self.ansible.log(msg="Creating rule from string %s" % stringline)
except AttributeError:
pass
self.rules.append(PamdRule.rulefromstring(stringline))
def write(self):
if self.fname is None:
self.fname = self.path + "/" + self.name
# If the file is a symbollic link, we'll write to the source.
pamd_file = os.path.realpath(self.fname)
temp_file = "/tmp/" + self.name + "_" + time.strftime("%y%m%d%H%M%S")
try:
f = open(temp_file, 'w')
f.write(str(self))
f.close()
except IOError:
self.ansible.fail_json(msg='Unable to create temporary \
file %s' % self.temp_file)
self.ansible.atomic_move(temp_file, pamd_file)
def __str__(self):
stringvalue = ''
previous_rule = None
for amble in self.preamble:
stringvalue += amble
stringvalue += '\n'
for rule in self.rules:
if (previous_rule is not None and
(previous_rule.rule_type.replace('-', '') !=
rule.rule_type.replace('-', ''))):
stringvalue += '\n'
stringvalue += str(rule).rstrip()
stringvalue += '\n'
previous_rule = rule
if stringvalue.endswith('\n'):
stringvalue = stringvalue[:-1]
return stringvalue
def update_rule(service, old_rule, new_rule):
changed = False
change_count = 0
result = {'action': 'update_rule'}
for rule in service.rules:
if (old_rule.rule_type == rule.rule_type and
old_rule.rule_control == rule.rule_control and
old_rule.rule_module_path == rule.rule_module_path):
if (new_rule.rule_type is not None and
new_rule.rule_type != rule.rule_type):
rule.rule_type = new_rule.rule_type
changed = True
if (new_rule.rule_control is not None and
new_rule.rule_control != rule.rule_control):
rule.rule_control = new_rule.rule_control
changed = True
if (new_rule.rule_module_path is not None and
new_rule.rule_module_path != rule.rule_module_path):
rule.rule_module_path = new_rule.rule_module_path
changed = True
try:
if (new_rule.rule_module_args is not None and
new_rule.get_module_args_as_string() !=
rule.get_module_args_as_string()):
rule.rule_module_args = new_rule.rule_module_args
changed = True
except AttributeError:
pass
if changed:
result['updated_rule_' + str(change_count)] = str(rule)
result['new_rule'] = str(new_rule)
change_count += 1
result['change_count'] = change_count
return changed, result
def insert_before_rule(service, old_rule, new_rule):
index = 0
change_count = 0
result = {'action':
'insert_before_rule'}
changed = False
for rule in service.rules:
if (old_rule.rule_type == rule.rule_type and
old_rule.rule_control == rule.rule_control and
old_rule.rule_module_path == rule.rule_module_path):
if index == 0:
service.rules.insert(0, new_rule)
changed = True
elif (new_rule.rule_type != service.rules[index - 1].rule_type or
new_rule.rule_control !=
service.rules[index - 1].rule_control or
new_rule.rule_module_path !=
service.rules[index - 1].rule_module_path):
service.rules.insert(index, new_rule)
changed = True
if changed:
result['new_rule'] = str(new_rule)
result['before_rule_' + str(change_count)] = str(rule)
change_count += 1
index += 1
result['change_count'] = change_count
return changed, result
def insert_after_rule(service, old_rule, new_rule):
index = 0
change_count = 0
result = {'action': 'insert_after_rule'}
changed = False
for rule in service.rules:
if (old_rule.rule_type == rule.rule_type and
old_rule.rule_control == rule.rule_control and
old_rule.rule_module_path == rule.rule_module_path):
if (index == len(service.rules) - 1):
service.rules.insert(len(service.rules), new_rule)
changed = True
elif (new_rule.rule_type != service.rules[index + 1].rule_type or
new_rule.rule_control !=
service.rules[index + 1].rule_control or
new_rule.rule_module_path !=
service.rules[index + 1].rule_module_path):
service.rules.insert(index + 1, new_rule)
changed = True
if changed:
result['new_rule'] = str(new_rule)
result['after_rule_' + str(change_count)] = str(rule)
change_count += 1
index += 1
result['change_count'] = change_count
return changed, result
def remove_module_arguments(service, old_rule, module_args):
result = {'action': 'args_absent'}
changed = False
change_count = 0
for rule in service.rules:
if (old_rule.rule_type == rule.rule_type and
old_rule.rule_control == rule.rule_control and
old_rule.rule_module_path == rule.rule_module_path):
for arg_to_remove in module_args:
for arg in rule.rule_module_args:
if arg == arg_to_remove:
rule.rule_module_args.remove(arg)
changed = True
result['removed_arg_' + str(change_count)] = arg
result['from_rule_' + str(change_count)] = str(rule)
change_count += 1
result['change_count'] = change_count
return changed, result
def add_module_arguments(service, old_rule, module_args):
result = {'action': 'args_present'}
changed = False
change_count = 0
for rule in service.rules:
if (old_rule.rule_type == rule.rule_type and
old_rule.rule_control == rule.rule_control and
old_rule.rule_module_path == rule.rule_module_path):
for arg_to_add in module_args:
if "=" in arg_to_add:
pre_string = arg_to_add[:arg_to_add.index('=') + 1]
indicies = [i for i, arg
in enumerate(rule.rule_module_args)
if arg.startswith(pre_string)]
if len(indicies) == 0:
rule.rule_module_args.append(arg_to_add)
changed = True
result['added_arg_' + str(change_count)] = arg_to_add
result['to_rule_' + str(change_count)] = str(rule)
change_count += 1
else:
for i in indicies:
if rule.rule_module_args[i] != arg_to_add:
rule.rule_module_args[i] = arg_to_add
changed = True
result['updated_arg_' +
str(change_count)] = arg_to_add
result['in_rule_' +
str(change_count)] = str(rule)
change_count += 1
elif arg_to_add not in rule.rule_module_args:
rule.rule_module_args.append(arg_to_add)
changed = True
result['added_arg_' + str(change_count)] = arg_to_add
result['to_rule_' + str(change_count)] = str(rule)
change_count += 1
result['change_count'] = change_count
return changed, result
def remove_rule(service, old_rule):
result = {'action': 'absent'}
changed = False
change_count = 0
for rule in service.rules:
if (old_rule.rule_type == rule.rule_type and
old_rule.rule_control == rule.rule_control and
old_rule.rule_module_path == rule.rule_module_path):
service.rules.remove(rule)
changed = True
return changed, result
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True, type='str'),
type=dict(required=True,
choices=['account', 'auth',
'password', 'session']),
control=dict(required=True, type='str'),
module_path=dict(required=True, type='str'),
new_type=dict(required=False,
choices=['account', 'auth',
'password', 'session']),
new_control=dict(required=False, type='str'),
new_module_path=dict(required=False, type='str'),
module_arguments=dict(required=False, type='list'),
state=dict(required=False, default="updated",
choices=['before', 'after', 'updated',
'args_absent', 'args_present', 'absent']),
path=dict(required=False, default='/etc/pam.d', type='str')
),
supports_check_mode=True,
required_if=[
("state", "args_present", ["module_arguments"]),
("state", "args_absent", ["module_arguments"]),
("state", "before", ["new_control"]),
("state", "before", ["new_type"]),
("state", "before", ["new_module_path"]),
("state", "after", ["new_control"]),
("state", "after", ["new_type"]),
("state", "after", ["new_module_path"])
]
)
service = module.params['name']
old_type = module.params['type']
old_control = module.params['control']
old_module_path = module.params['module_path']
new_type = module.params['new_type']
new_control = module.params['new_control']
new_module_path = module.params['new_module_path']
module_arguments = module.params['module_arguments']
state = module.params['state']
path = module.params['path']
pamd = PamdService(module)
pamd.load_rules_from_file()
old_rule = PamdRule(old_type,
old_control,
old_module_path)
new_rule = PamdRule(new_type,
new_control,
new_module_path,
module_arguments)
if state == 'updated':
change, result = update_rule(pamd,
old_rule,
new_rule)
elif state == 'before':
change, result = insert_before_rule(pamd,
old_rule,
new_rule)
elif state == 'after':
change, result = insert_after_rule(pamd,
old_rule,
new_rule)
elif state == 'args_absent':
change, result = remove_module_arguments(pamd,
old_rule,
module_arguments)
elif state == 'args_present':
change, result = add_module_arguments(pamd,
old_rule,
module_arguments)
elif state == 'absent':
change, result = remove_rule(pamd,
old_rule)
if not module.check_mode and change:
pamd.write()
facts = {}
facts['pamd'] = {'changed': change, 'result': result}
module.params['dest'] = pamd.fname
module.exit_json(changed=change, ansible_facts=facts)
if __name__ == '__main__':
main() | unknown | codeparrot/codeparrot-clean | ||
# Written to test interrupted system calls interfering with our many buffered
# IO implementations. http://bugs.python.org/issue12268
#
# This tests the '_io' module. Similar tests for Python 2.x's older
# default file I/O implementation exist within test_file2k.py.
#
# It was suggested that this code could be merged into test_io and the tests
# made to work using the same method as the existing signal tests in test_io.
# I was unable to get single process tests using alarm or setitimer that way
# to reproduce the EINTR problems. This process based test suite reproduces
# the problems prior to the issue12268 patch reliably on Linux and OSX.
# - gregory.p.smith
import os
import select
import signal
import subprocess
import sys
from test.test_support import run_unittest
import time
import unittest
# Test import all of the things we're about to try testing up front.
from _io import FileIO
@unittest.skipUnless(os.name == 'posix', 'tests requires a posix system.')
class TestFileIOSignalInterrupt(unittest.TestCase):
def setUp(self):
self._process = None
def tearDown(self):
if self._process and self._process.poll() is None:
try:
self._process.kill()
except OSError:
pass
def _generate_infile_setup_code(self):
"""Returns the infile = ... line of code for the reader process.
subclasseses should override this to test different IO objects.
"""
return ('import _io ;'
'infile = _io.FileIO(sys.stdin.fileno(), "rb")')
def fail_with_process_info(self, why, stdout=b'', stderr=b'',
communicate=True):
"""A common way to cleanup and fail with useful debug output.
Kills the process if it is still running, collects remaining output
and fails the test with an error message including the output.
Args:
why: Text to go after "Error from IO process" in the message.
stdout, stderr: standard output and error from the process so
far to include in the error message.
communicate: bool, when True we call communicate() on the process
after killing it to gather additional output.
"""
if self._process.poll() is None:
time.sleep(0.1) # give it time to finish printing the error.
try:
self._process.terminate() # Ensure it dies.
except OSError:
pass
if communicate:
stdout_end, stderr_end = self._process.communicate()
stdout += stdout_end
stderr += stderr_end
self.fail('Error from IO process %s:\nSTDOUT:\n%sSTDERR:\n%s\n' %
(why, stdout.decode(), stderr.decode()))
def _test_reading(self, data_to_write, read_and_verify_code):
"""Generic buffered read method test harness to validate EINTR behavior.
Also validates that Python signal handlers are run during the read.
Args:
data_to_write: String to write to the child process for reading
before sending it a signal, confirming the signal was handled,
writing a final newline and closing the infile pipe.
read_and_verify_code: Single "line" of code to read from a file
object named 'infile' and validate the result. This will be
executed as part of a python subprocess fed data_to_write.
"""
infile_setup_code = self._generate_infile_setup_code()
# Total pipe IO in this function is smaller than the minimum posix OS
# pipe buffer size of 512 bytes. No writer should block.
assert len(data_to_write) < 512, 'data_to_write must fit in pipe buf.'
# Start a subprocess to call our read method while handling a signal.
self._process = subprocess.Popen(
[sys.executable, '-u', '-c',
'import io, signal, sys ;'
'signal.signal(signal.SIGINT, '
'lambda s, f: sys.stderr.write("$\\n")) ;'
+ infile_setup_code + ' ;' +
'sys.stderr.write("Worm Sign!\\n") ;'
+ read_and_verify_code + ' ;' +
'infile.close()'
],
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
# Wait for the signal handler to be installed.
worm_sign = self._process.stderr.read(len(b'Worm Sign!\n'))
if worm_sign != b'Worm Sign!\n': # See also, Dune by Frank Herbert.
self.fail_with_process_info('while awaiting a sign',
stderr=worm_sign)
self._process.stdin.write(data_to_write)
signals_sent = 0
rlist = []
# We don't know when the read_and_verify_code in our child is actually
# executing within the read system call we want to interrupt. This
# loop waits for a bit before sending the first signal to increase
# the likelihood of that. Implementations without correct EINTR
# and signal handling usually fail this test.
while not rlist:
rlist, _, _ = select.select([self._process.stderr], (), (), 0.05)
self._process.send_signal(signal.SIGINT)
signals_sent += 1
if signals_sent > 200:
self._process.kill()
self.fail('reader process failed to handle our signals.')
# This assumes anything unexpected that writes to stderr will also
# write a newline. That is true of the traceback printing code.
signal_line = self._process.stderr.readline()
if signal_line != b'$\n':
self.fail_with_process_info('while awaiting signal',
stderr=signal_line)
# We append a newline to our input so that a readline call can
# end on its own before the EOF is seen and so that we're testing
# the read call that was interrupted by a signal before the end of
# the data stream has been reached.
stdout, stderr = self._process.communicate(input=b'\n')
if self._process.returncode:
self.fail_with_process_info(
'exited rc=%d' % self._process.returncode,
stdout, stderr, communicate=False)
# PASS!
# String format for the read_and_verify_code used by read methods.
_READING_CODE_TEMPLATE = (
'got = infile.{read_method_name}() ;'
'expected = {expected!r} ;'
'assert got == expected, ('
'"{read_method_name} returned wrong data.\\n"'
'"got data %r\\nexpected %r" % (got, expected))'
)
def test_readline(self):
"""readline() must handle signals and not lose data."""
self._test_reading(
data_to_write=b'hello, world!',
read_and_verify_code=self._READING_CODE_TEMPLATE.format(
read_method_name='readline',
expected=b'hello, world!\n'))
def test_readlines(self):
"""readlines() must handle signals and not lose data."""
self._test_reading(
data_to_write=b'hello\nworld!',
read_and_verify_code=self._READING_CODE_TEMPLATE.format(
read_method_name='readlines',
expected=[b'hello\n', b'world!\n']))
def test_readall(self):
"""readall() must handle signals and not lose data."""
self._test_reading(
data_to_write=b'hello\nworld!',
read_and_verify_code=self._READING_CODE_TEMPLATE.format(
read_method_name='readall',
expected=b'hello\nworld!\n'))
# read() is the same thing as readall().
self._test_reading(
data_to_write=b'hello\nworld!',
read_and_verify_code=self._READING_CODE_TEMPLATE.format(
read_method_name='read',
expected=b'hello\nworld!\n'))
class TestBufferedIOSignalInterrupt(TestFileIOSignalInterrupt):
def _generate_infile_setup_code(self):
"""Returns the infile = ... line of code to make a BufferedReader."""
return ('infile = io.open(sys.stdin.fileno(), "rb") ;'
'import _io ;assert isinstance(infile, _io.BufferedReader)')
def test_readall(self):
"""BufferedReader.read() must handle signals and not lose data."""
self._test_reading(
data_to_write=b'hello\nworld!',
read_and_verify_code=self._READING_CODE_TEMPLATE.format(
read_method_name='read',
expected=b'hello\nworld!\n'))
class TestTextIOSignalInterrupt(TestFileIOSignalInterrupt):
def _generate_infile_setup_code(self):
"""Returns the infile = ... line of code to make a TextIOWrapper."""
return ('infile = io.open(sys.stdin.fileno(), "rt", newline=None) ;'
'import _io ;assert isinstance(infile, _io.TextIOWrapper)')
def test_readline(self):
"""readline() must handle signals and not lose data."""
self._test_reading(
data_to_write=b'hello, world!',
read_and_verify_code=self._READING_CODE_TEMPLATE.format(
read_method_name='readline',
expected='hello, world!\n'))
def test_readlines(self):
"""readlines() must handle signals and not lose data."""
self._test_reading(
data_to_write=b'hello\r\nworld!',
read_and_verify_code=self._READING_CODE_TEMPLATE.format(
read_method_name='readlines',
expected=['hello\n', 'world!\n']))
def test_readall(self):
"""read() must handle signals and not lose data."""
self._test_reading(
data_to_write=b'hello\nworld!',
read_and_verify_code=self._READING_CODE_TEMPLATE.format(
read_method_name='read',
expected="hello\nworld!\n"))
def test_main():
test_cases = [
tc for tc in globals().values()
if isinstance(tc, type) and issubclass(tc, unittest.TestCase)]
run_unittest(*test_cases)
if __name__ == '__main__':
test_main() | unknown | codeparrot/codeparrot-clean | ||
# Copyright (c) 2006-2008 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
class InstanceInfo(object):
"""
Represents an EC2 Instance status response from CloudWatch
"""
def __init__(self, connection=None, id=None, state=None):
"""
:ivar str id: The instance's EC2 ID.
:ivar str state: Specifies the current status of the instance.
"""
self.connection = connection
self.id = id
self.state = state
def __repr__(self):
return 'InstanceInfo:%s' % self.id
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'instanceId' or name == 'InstanceId':
self.id = value
elif name == 'state':
self.state = value
else:
setattr(self, name, value) | unknown | codeparrot/codeparrot-clean | ||
name: check-style
# Get the repository with all commits to ensure that we can analyze
# all of the commits contributed via the Pull Request.
on:
pull_request:
types: [opened, synchronize]
# Avoid unnecessary builds. Unlike the main CI jobs, these are not
# ci-configurable (but could be).
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
check-style:
env:
CC: clang
jobname: ClangFormat
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
with:
fetch-depth: 0
- run: ci/install-dependencies.sh
- name: git clang-format
continue-on-error: true
id: check_out
run: |
./ci/run-style-check.sh \
"${{github.event.pull_request.base.sha}}" | unknown | github | https://github.com/git/git | .github/workflows/check-style.yml |
#!/usr/bin/env python
'''
A PPP over MAVLink module
Andrew Tridgell
May 2012
'''
import time, os, fcntl, pty
from MAVProxy.modules.lib import mp_module
class PPPModule(mp_module.MPModule):
def __init__(self, mpstate):
super(PPPModule, self).__init__(mpstate, "ppp", "PPP link")
self.command = "noauth nodefaultroute nodetach nodeflate nobsdcomp mtu 128".split()
self.packet_count = 0
self.byte_count = 0
self.ppp_fd = -1
self.pid = -1
self.add_command('ppp', self.cmd_ppp, "ppp link control")
def ppp_read(self, ppp_fd):
'''called from main select loop in mavproxy when the pppd child
sends us some data'''
buf = os.read(ppp_fd, 100)
if len(buf) == 0:
# EOF on the child fd
self.stop_ppp_link()
return
print("ppp packet len=%u" % len(buf))
master = self.master
master.mav.ppp_send(len(buf), buf)
def start_ppp_link(self):
'''startup the link'''
cmd = ['pppd']
cmd.extend(self.command)
(self.pid, self.ppp_fd) = pty.fork()
if self.pid == 0:
os.execvp("pppd", cmd)
raise RuntimeError("pppd exited")
if self.ppp_fd == -1:
print("Failed to create link fd")
return
# ensure fd is non-blocking
fcntl.fcntl(self.ppp_fd, fcntl.F_SETFL, fcntl.fcntl(self.ppp_fd, fcntl.F_GETFL) | os.O_NONBLOCK)
self.byte_count = 0
self.packet_count = 0
# ask mavproxy to add us to the select loop
self.mpself.select_extra[self.ppp_fd] = (self.ppp_read, self.ppp_fd)
def stop_ppp_link(self):
'''stop the link'''
if self.ppp_fd == -1:
return
try:
self.mpself.select_extra.pop(self.ppp_fd)
os.close(self.ppp_fd)
os.waitpid(self.pid, 0)
except Exception:
pass
self.pid = -1
self.ppp_fd = -1
print("stopped ppp link")
def cmd_ppp(self, args):
'''set ppp parameters and start link'''
usage = "ppp <command|start|stop>"
if len(args) == 0:
print(usage)
return
if args[0] == "command":
if len(args) == 1:
print("ppp.command=%s" % " ".join(self.command))
else:
self.command = args[1:]
elif args[0] == "start":
self.start_ppp_link()
elif args[0] == "stop":
self.stop_ppp_link()
elif args[0] == "status":
self.console.writeln("%u packets %u bytes" % (self.packet_count, self.byte_count))
def unload(self):
'''unload module'''
self.stop_ppp_link()
def mavlink_packet(self, m):
'''handle an incoming mavlink packet'''
if m.get_type() == 'PPP' and self.ppp_fd != -1:
print("got ppp mavlink pkt len=%u" % m.length)
os.write(self.ppp_fd, m.data[:m.length])
def init(mpstate):
'''initialise module'''
return PPPModule(mpstate) | unknown | codeparrot/codeparrot-clean | ||
# -*- test-case-name: twisted.test.test_formmethod -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Form-based method objects.
This module contains support for descriptive method signatures that can be used
to format methods.
"""
import calendar
class FormException(Exception):
"""An error occurred calling the form method.
"""
def __init__(self, *args, **kwargs):
Exception.__init__(self, *args)
self.descriptions = kwargs
class InputError(FormException):
"""
An error occurred with some input.
"""
class Argument:
"""Base class for form arguments."""
# default value for argument, if no other default is given
defaultDefault = None
def __init__(self, name, default=None, shortDesc=None,
longDesc=None, hints=None, allowNone=1):
self.name = name
self.allowNone = allowNone
if default is None:
default = self.defaultDefault
self.default = default
self.shortDesc = shortDesc
self.longDesc = longDesc
if not hints:
hints = {}
self.hints = hints
def addHints(self, **kwargs):
self.hints.update(kwargs)
def getHint(self, name, default=None):
return self.hints.get(name, default)
def getShortDescription(self):
return self.shortDesc or self.name.capitalize()
def getLongDescription(self):
return self.longDesc or '' #self.shortDesc or "The %s." % self.name
def coerce(self, val):
"""Convert the value to the correct format."""
raise NotImplementedError("implement in subclass")
class String(Argument):
"""A single string.
"""
defaultDefault = ''
min = 0
max = None
def __init__(self, name, default=None, shortDesc=None,
longDesc=None, hints=None, allowNone=1, min=0, max=None):
Argument.__init__(self, name, default=default, shortDesc=shortDesc,
longDesc=longDesc, hints=hints, allowNone=allowNone)
self.min = min
self.max = max
def coerce(self, val):
s = str(val)
if len(s) < self.min:
raise InputError("Value must be at least %s characters long" % self.min)
if self.max != None and len(s) > self.max:
raise InputError("Value must be at most %s characters long" % self.max)
return str(val)
class Text(String):
"""A long string.
"""
class Password(String):
"""A string which should be obscured when input.
"""
class VerifiedPassword(String):
"""A string that should be obscured when input and needs verification."""
def coerce(self, vals):
if len(vals) != 2 or vals[0] != vals[1]:
raise InputError("Please enter the same password twice.")
s = str(vals[0])
if len(s) < self.min:
raise InputError("Value must be at least %s characters long" % self.min)
if self.max != None and len(s) > self.max:
raise InputError("Value must be at most %s characters long" % self.max)
return s
class Hidden(String):
"""A string which is not displayed.
The passed default is used as the value.
"""
class Integer(Argument):
"""A single integer.
"""
defaultDefault = None
def __init__(self, name, allowNone=1, default=None, shortDesc=None,
longDesc=None, hints=None):
#although Argument now has allowNone, that was recently added, and
#putting it at the end kept things which relied on argument order
#from breaking. However, allowNone originally was in here, so
#I have to keep the same order, to prevent breaking code that
#depends on argument order only
Argument.__init__(self, name, default, shortDesc, longDesc, hints,
allowNone)
def coerce(self, val):
if not val.strip() and self.allowNone:
return None
try:
return int(val)
except ValueError:
raise InputError("%s is not valid, please enter a whole number, e.g. 10" % val)
class IntegerRange(Integer):
def __init__(self, name, min, max, allowNone=1, default=None, shortDesc=None,
longDesc=None, hints=None):
self.min = min
self.max = max
Integer.__init__(self, name, allowNone=allowNone, default=default, shortDesc=shortDesc,
longDesc=longDesc, hints=hints)
def coerce(self, val):
result = Integer.coerce(self, val)
if self.allowNone and result == None:
return result
if result < self.min:
raise InputError("Value %s is too small, it should be at least %s" % (result, self.min))
if result > self.max:
raise InputError("Value %s is too large, it should be at most %s" % (result, self.max))
return result
class Float(Argument):
defaultDefault = None
def __init__(self, name, allowNone=1, default=None, shortDesc=None,
longDesc=None, hints=None):
#although Argument now has allowNone, that was recently added, and
#putting it at the end kept things which relied on argument order
#from breaking. However, allowNone originally was in here, so
#I have to keep the same order, to prevent breaking code that
#depends on argument order only
Argument.__init__(self, name, default, shortDesc, longDesc, hints,
allowNone)
def coerce(self, val):
if not val.strip() and self.allowNone:
return None
try:
return float(val)
except ValueError:
raise InputError("Invalid float: %s" % val)
class Choice(Argument):
"""
The result of a choice between enumerated types. The choices should
be a list of tuples of tag, value, and description. The tag will be
the value returned if the user hits "Submit", and the description
is the bale for the enumerated type. default is a list of all the
values (seconds element in choices). If no defaults are specified,
initially the first item will be selected. Only one item can (should)
be selected at once.
"""
def __init__(self, name, choices=[], default=[], shortDesc=None,
longDesc=None, hints=None, allowNone=1):
self.choices = choices
if choices and not default:
default.append(choices[0][1])
Argument.__init__(self, name, default, shortDesc, longDesc, hints, allowNone=allowNone)
def coerce(self, inIdent):
for ident, val, desc in self.choices:
if ident == inIdent:
return val
else:
raise InputError("Invalid Choice: %s" % inIdent)
class Flags(Argument):
"""
The result of a checkbox group or multi-menu. The flags should be a
list of tuples of tag, value, and description. The tag will be
the value returned if the user hits "Submit", and the description
is the bale for the enumerated type. default is a list of all the
values (second elements in flags). If no defaults are specified,
initially nothing will be selected. Several items may be selected at
once.
"""
def __init__(self, name, flags=(), default=(), shortDesc=None,
longDesc=None, hints=None, allowNone=1):
self.flags = flags
Argument.__init__(self, name, default, shortDesc, longDesc, hints, allowNone=allowNone)
def coerce(self, inFlagKeys):
if not inFlagKeys:
return []
outFlags = []
for inFlagKey in inFlagKeys:
for flagKey, flagVal, flagDesc in self.flags:
if inFlagKey == flagKey:
outFlags.append(flagVal)
break
else:
raise InputError("Invalid Flag: %s" % inFlagKey)
return outFlags
class CheckGroup(Flags):
pass
class RadioGroup(Choice):
pass
class Boolean(Argument):
def coerce(self, inVal):
if not inVal:
return 0
lInVal = str(inVal).lower()
if lInVal in ('no', 'n', 'f', 'false', '0'):
return 0
return 1
class File(Argument):
def __init__(self, name, allowNone=1, shortDesc=None, longDesc=None,
hints=None):
Argument.__init__(self, name, None, shortDesc, longDesc, hints,
allowNone=allowNone)
def coerce(self, file):
if not file and self.allowNone:
return None
elif file:
return file
else:
raise InputError("Invalid File")
def positiveInt(x):
x = int(x)
if x <= 0: raise ValueError
return x
class Date(Argument):
"""A date -- (year, month, day) tuple."""
defaultDefault = None
def __init__(self, name, allowNone=1, default=None, shortDesc=None,
longDesc=None, hints=None):
Argument.__init__(self, name, default, shortDesc, longDesc, hints)
self.allowNone = allowNone
if not allowNone:
self.defaultDefault = (1970, 1, 1)
def coerce(self, args):
"""Return tuple of ints (year, month, day)."""
if tuple(args) == ("", "", "") and self.allowNone:
return None
try:
year, month, day = map(positiveInt, args)
except ValueError:
raise InputError("Invalid date")
if (month, day) == (2, 29):
if not calendar.isleap(year):
raise InputError("%d was not a leap year" % year)
else:
return year, month, day
try:
mdays = calendar.mdays[month]
except IndexError:
raise InputError("Invalid date")
if day > mdays:
raise InputError("Invalid date")
return year, month, day
class Submit(Choice):
"""Submit button or a reasonable facsimile thereof."""
def __init__(self, name, choices=[("Submit", "submit", "Submit form")],
reset=0, shortDesc=None, longDesc=None, allowNone=0, hints=None):
Choice.__init__(self, name, choices=choices, shortDesc=shortDesc,
longDesc=longDesc, hints=hints)
self.allowNone = allowNone
self.reset = reset
def coerce(self, value):
if self.allowNone and not value:
return None
else:
return Choice.coerce(self, value)
class PresentationHint:
"""
A hint to a particular system.
"""
class MethodSignature:
def __init__(self, *sigList):
"""
"""
self.methodSignature = sigList
def getArgument(self, name):
for a in self.methodSignature:
if a.name == name:
return a
def method(self, callable, takesRequest=False):
return FormMethod(self, callable, takesRequest)
class FormMethod:
"""A callable object with a signature."""
def __init__(self, signature, callable, takesRequest=False):
self.signature = signature
self.callable = callable
self.takesRequest = takesRequest
def getArgs(self):
return tuple(self.signature.methodSignature)
def call(self,*args,**kw):
return self.callable(*args,**kw) | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
from lxml import objectify
import urlparse
from openerp.addons.payment.models.payment_acquirer import ValidationError
from openerp.addons.payment.tests.common import PaymentAcquirerCommon
from openerp.addons.payment_adyen.controllers.main import AdyenController
from openerp.osv.orm import except_orm
from openerp.tools import mute_logger
class AdyenCommon(PaymentAcquirerCommon):
def setUp(self):
super(AdyenCommon, self).setUp()
cr, uid = self.cr, self.uid
self.base_url = self.registry('ir.config_parameter').get_param(cr, uid, 'web.base.url')
# get the adyen account
model, self.adyen_id = self.registry('ir.model.data').get_object_reference(cr, uid, 'payment_adyen', 'payment_acquirer_adyen')
# some CC (always use expiration date 06 / 2016, cvc 737, cid 7373 (amex))
self.amex = (('370000000000002', '7373'))
self.dinersclub = (('36006666333344', '737'))
self.discover = (('6011601160116611', '737'), ('644564456445644', '737'))
self.jcb = (('3530111333300000', '737'))
self.mastercard = (('5555444433331111', '737'), ('5555555555554444', '737'))
self.visa = (('4111 1111 1111 1111', '737'), ('4444333322221111', '737'))
self.mcdebit = (('5500000000000004', '737'))
self.visadebit = (('4400000000000008', '737'))
self.maestro = (('6731012345678906', '737'))
self.laser = (('630495060000000000', '737'))
self.hipercard = (('6062828888666688', '737'))
self.dsmastercard = (('521234567890 1234', '737', 'user', 'password'))
self.dsvisa = (('4212345678901237', '737', 'user', 'password'))
self.mistercash = (('6703444444444449', None, 'user', 'password'))
class AdyenServer2Server(AdyenCommon):
def test_00_tx_management(self):
cr, uid, context = self.cr, self.uid, {}
class AdyenForm(AdyenCommon):
def test_10_adyen_form_render(self):
cr, uid, context = self.cr, self.uid, {}
# be sure not to do stupid things
adyen = self.payment_acquirer.browse(self.cr, self.uid, self.adyen_id, None)
self.assertEqual(adyen.environment, 'test', 'test without test environment')
# ----------------------------------------
# Test: button direct rendering
# ----------------------------------------
form_values = {
'merchantAccount': 'OpenERPCOM',
'merchantReference': 'test_ref0',
'skinCode': 'cbqYWvVL',
'paymentAmount': '1',
'currencyCode': 'EUR',
'resURL': '%s' % urlparse.urljoin(self.base_url, AdyenController._return_url),
}
# render the button
res = self.payment_acquirer.render(
cr, uid, self.adyen_id,
'test_ref0', 0.01, self.currency_euro_id,
partner_id=None,
partner_values=self.buyer_values,
context=context)
# check form result
tree = objectify.fromstring(res)
self.assertEqual(tree.get('action'), 'https://test.adyen.com/hpp/pay.shtml', 'adyen: wrong form POST url')
for form_input in tree.input:
if form_input.get('name') in ['submit', 'shipBeforeDate', 'sessionValidity', 'shopperLocale', 'merchantSig']:
continue
self.assertEqual(
form_input.get('value'),
form_values[form_input.get('name')],
'adyen: wrong value for input %s: received %s instead of %s' % (form_input.get('name'), form_input.get('value'), form_values[form_input.get('name')])
)
# @mute_logger('openerp.addons.payment_adyen.models.adyen', 'ValidationError')
# def test_20_paypal_form_management(self):
# cr, uid, context = self.cr, self.uid, {}
# # be sure not to do stupid things
# adyen = self.payment_acquirer.browse(self.cr, self.uid, self.adyen_id, None)
# self.assertEqual(adyen.env, 'test', 'test without test env')
# {'authResult': u'AUTHORISED',
# 'merchantReference': u'SO014',
# 'merchantReturnData': u'return_url=/shop/payment/validate',
# 'merchantSig': u'GaLRO8aMHFaQX3gQ5BVP/YETzeA=',
# 'paymentMethod': u'visa',
# 'pspReference': u'8813859935907337',
# 'shopperLocale': u'en_US',
# 'skinCode': u'cbqYWvVL'} | unknown | codeparrot/codeparrot-clean | ||
import numpy as nm
import libxml2
from array import *
from libxml2 import xmlAttr
import matplotlib.pyplot as plt
# read data
eVdoc = libxml2.parseFile("./eV.xml")
ctxt = eVdoc.xpathNewContext()
Volume=nm.array(map(float,map(xmlAttr.getContent,ctxt.xpathEval("//@volume"))))
totalEnergy=nm.array(map(float,map(xmlAttr.getContent,ctxt.xpathEval("//@totalEnergy"))))
# make quadratic fit
p=nm.polyfit(Volume,totalEnergy,2)
curve=nm.poly1d(p)
# find root of derivative to get minimum
minv=nm.roots(nm.polyder(p))
print 'minimum Volume '+str(minv)
print 'minimum energy at scale '+str(pow(minv/2,1./3.))
# x values for plotting polynomial
xa = nm.linspace(Volume[0],Volume[-1],100)
#plot
plt.figure(1)
plt.title('Ag Volume')
plt.ylabel(r'total energy in $[Hartree]$')
plt.xlabel(r'volume in $[Bohr]^3$')
plt.plot(xa,curve(xa),'-')
plt.plot(Volume,totalEnergy,'o')
plt.annotate('minimum Volume '+str(minv), xy=(minv,curve(minv)), xycoords='data' ,
xytext=(minv-7,curve(minv)+0.002) , arrowprops=dict(arrowstyle="->"))
plt.savefig('EV.png')
print 'plot saved as EV.png'
plt.show() | unknown | codeparrot/codeparrot-clean | ||
/* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_CORE_PLATFORM_CLOUD_OAUTH_CLIENT_H_
#define TENSORFLOW_CORE_PLATFORM_CLOUD_OAUTH_CLIENT_H_
#include <memory>
#include "json/json.h"
#include "xla/tsl/platform/cloud/oauth_client.h"
#include "tensorflow/core/platform/cloud/http_request.h"
#include "tensorflow/core/platform/env.h"
#include "tensorflow/core/platform/status.h"
namespace tensorflow {
using tsl::OAuthClient; // NOLINT(misc-unused-using-decls)
} // namespace tensorflow
#endif // TENSORFLOW_CORE_PLATFORM_CLOUD_OAUTH_CLIENT_H_ | c | github | https://github.com/tensorflow/tensorflow | tensorflow/core/platform/cloud/oauth_client.h |
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package clistate
import (
"encoding/json"
"errors"
"fmt"
"io"
"io/ioutil"
"os"
"path/filepath"
"sync"
"time"
"github.com/hashicorp/terraform/internal/command/workdir"
"github.com/hashicorp/terraform/internal/states/statemgr"
)
// LocalState manages a state storage that is local to the filesystem.
type LocalState struct {
mu sync.Mutex
// Path is the path to read the state from. PathOut is the path to
// write the state to. If PathOut is not specified, Path will be used.
// If PathOut already exists, it will be overwritten.
Path string
PathOut string
// the file handle corresponding to PathOut
stateFileOut *os.File
// While the stateFileOut will correspond to the lock directly,
// store and check the lock ID to maintain a strict state.Locker
// implementation.
lockID string
// created is set to true if stateFileOut didn't exist before we created it.
// This is mostly so we can clean up empty files during tests, but doesn't
// hurt to remove file we never wrote to.
created bool
state *workdir.BackendStateFile
readState *workdir.BackendStateFile
written bool
}
// SetState will force a specific state in-memory for this local state.
func (s *LocalState) SetState(state *workdir.BackendStateFile) {
s.mu.Lock()
defer s.mu.Unlock()
s.state = state.DeepCopy()
s.readState = state.DeepCopy()
}
// StateReader impl.
func (s *LocalState) State() *workdir.BackendStateFile {
return s.state.DeepCopy()
}
// WriteState for LocalState always persists the state as well.
// TODO: this should use a more robust method of writing state, by first
// writing to a temp file on the same filesystem, and renaming the file over
// the original.
//
// StateWriter impl.
func (s *LocalState) WriteState(state *workdir.BackendStateFile) error {
s.mu.Lock()
defer s.mu.Unlock()
if s.stateFileOut == nil {
if err := s.createStateFiles(); err != nil {
return nil
}
}
defer s.stateFileOut.Sync()
s.state = state.DeepCopy() // don't want mutations before we actually get this written to disk
if _, err := s.stateFileOut.Seek(0, io.SeekStart); err != nil {
return err
}
if err := s.stateFileOut.Truncate(0); err != nil {
return err
}
if state == nil {
// if we have no state, don't write anything else.
return nil
}
raw, err := workdir.EncodeBackendStateFile(state)
if err != nil {
return err
}
_, err = s.stateFileOut.Write(raw)
if err != nil {
return err
}
s.written = true
return nil
}
// PersistState for LocalState is a no-op since WriteState always persists.
//
// StatePersister impl.
func (s *LocalState) PersistState() error {
return nil
}
// StateRefresher impl.
func (s *LocalState) RefreshState() error {
s.mu.Lock()
defer s.mu.Unlock()
if s.PathOut == "" {
s.PathOut = s.Path
}
var reader io.Reader
// The s.Path file is only OK to read if we have not written any state out
// (in which case the same state needs to be read in), and no state output file
// has been opened (possibly via a lock) or the input path is different
// than the output path.
// This is important for Windows, as if the input file is the same as the
// output file, and the output file has been locked already, we can't open
// the file again.
if !s.written && (s.stateFileOut == nil || s.Path != s.PathOut) {
// we haven't written a state file yet, so load from Path
f, err := os.Open(s.Path)
if err != nil {
// It is okay if the file doesn't exist, we treat that as a nil state
if !os.IsNotExist(err) {
return err
}
// a nil reader means no state at all, handled below
reader = nil
} else {
defer f.Close()
reader = f
}
} else {
// no state to refresh
if s.stateFileOut == nil {
return nil
}
// we have a state file, make sure we're at the start
s.stateFileOut.Seek(0, io.SeekStart)
reader = s.stateFileOut
}
var state *workdir.BackendStateFile
if reader != nil { // otherwise we'll leave state as nil
raw, err := io.ReadAll(reader)
if err != nil {
return err
}
state, err = workdir.ParseBackendStateFile(raw)
if err != nil {
return err
}
}
s.state = state
s.readState = s.state.DeepCopy()
return nil
}
// Lock implements a local filesystem state.Locker.
func (s *LocalState) Lock(info *statemgr.LockInfo) (string, error) {
s.mu.Lock()
defer s.mu.Unlock()
if s.stateFileOut == nil {
if err := s.createStateFiles(); err != nil {
return "", err
}
}
if s.lockID != "" {
return "", fmt.Errorf("state %q already locked", s.stateFileOut.Name())
}
if err := s.lock(); err != nil {
info, infoErr := s.lockInfo()
if infoErr != nil {
err = errors.Join(err, infoErr)
}
lockErr := &statemgr.LockError{
Info: info,
Err: err,
}
return "", lockErr
}
s.lockID = info.ID
return s.lockID, s.writeLockInfo(info)
}
func (s *LocalState) Unlock(id string) error {
s.mu.Lock()
defer s.mu.Unlock()
if s.lockID == "" {
return fmt.Errorf("LocalState not locked")
}
if id != s.lockID {
idErr := fmt.Errorf("invalid lock id: %q. current id: %q", id, s.lockID)
info, err := s.lockInfo()
if err != nil {
idErr = errors.Join(idErr, err)
}
return &statemgr.LockError{
Err: idErr,
Info: info,
}
}
os.Remove(s.lockInfoPath())
fileName := s.stateFileOut.Name()
unlockErr := s.unlock()
s.stateFileOut.Close()
s.stateFileOut = nil
s.lockID = ""
// clean up the state file if we created it an never wrote to it
stat, err := os.Stat(fileName)
if err == nil && stat.Size() == 0 && s.created {
os.Remove(fileName)
}
return unlockErr
}
// Open the state file, creating the directories and file as needed.
func (s *LocalState) createStateFiles() error {
if s.PathOut == "" {
s.PathOut = s.Path
}
// yes this could race, but we only use it to clean up empty files
if _, err := os.Stat(s.PathOut); os.IsNotExist(err) {
s.created = true
}
// Create all the directories
if err := os.MkdirAll(filepath.Dir(s.PathOut), 0755); err != nil {
return err
}
f, err := os.OpenFile(s.PathOut, os.O_RDWR|os.O_CREATE, 0666)
if err != nil {
return err
}
s.stateFileOut = f
return nil
}
// return the path for the lockInfo metadata.
func (s *LocalState) lockInfoPath() string {
stateDir, stateName := filepath.Split(s.Path)
if stateName == "" {
panic("empty state file path")
}
if stateName[0] == '.' {
stateName = stateName[1:]
}
return filepath.Join(stateDir, fmt.Sprintf(".%s.lock.info", stateName))
}
// lockInfo returns the data in a lock info file
func (s *LocalState) lockInfo() (*statemgr.LockInfo, error) {
path := s.lockInfoPath()
infoData, err := ioutil.ReadFile(path)
if err != nil {
return nil, err
}
info := statemgr.LockInfo{}
err = json.Unmarshal(infoData, &info)
if err != nil {
return nil, fmt.Errorf("state file %q locked, but could not unmarshal lock info: %s", s.Path, err)
}
return &info, nil
}
// write a new lock info file
func (s *LocalState) writeLockInfo(info *statemgr.LockInfo) error {
path := s.lockInfoPath()
info.Path = s.Path
info.Created = time.Now().UTC()
err := ioutil.WriteFile(path, info.Marshal(), 0600)
if err != nil {
return fmt.Errorf("could not write lock info for %q: %s", s.Path, err)
}
return nil
} | go | github | https://github.com/hashicorp/terraform | internal/command/clistate/local_state.go |
// Copyright 2025 The etcd Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package auth
import (
"testing"
"go.etcd.io/etcd/client/pkg/v3/testutil"
)
func TestMain(m *testing.M) {
testutil.MustTestMainWithLeakDetection(m)
} | go | github | https://github.com/etcd-io/etcd | server/auth/main_test.go |
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import testtools
from tempest.api.compute import base
from tempest import config
from tempest import test
CONF = config.CONF
class DeleteServersTestJSON(base.BaseV2ComputeTest):
# NOTE: Server creations of each test class should be under 10
# for preventing "Quota exceeded for instances"
@classmethod
def setup_clients(cls):
super(DeleteServersTestJSON, cls).setup_clients()
cls.client = cls.servers_client
@test.idempotent_id('9e6e0c87-3352-42f7-9faf-5d6210dbd159')
def test_delete_server_while_in_building_state(self):
# Delete a server while it's VM state is Building
server = self.create_test_server(wait_until='BUILD')
self.client.delete_server(server['id'])
self.client.wait_for_server_termination(server['id'])
@test.idempotent_id('925fdfb4-5b13-47ea-ac8a-c36ae6fddb05')
def test_delete_active_server(self):
# Delete a server while it's VM state is Active
server = self.create_test_server(wait_until='ACTIVE')
self.client.delete_server(server['id'])
self.client.wait_for_server_termination(server['id'])
@test.idempotent_id('546d368c-bb6c-4645-979a-83ed16f3a6be')
def test_delete_server_while_in_shutoff_state(self):
# Delete a server while it's VM state is Shutoff
server = self.create_test_server(wait_until='ACTIVE')
self.client.stop(server['id'])
self.client.wait_for_server_status(server['id'], 'SHUTOFF')
self.client.delete_server(server['id'])
self.client.wait_for_server_termination(server['id'])
@test.idempotent_id('943bd6e8-4d7a-4904-be83-7a6cc2d4213b')
@testtools.skipUnless(CONF.compute_feature_enabled.pause,
'Pause is not available.')
def test_delete_server_while_in_pause_state(self):
# Delete a server while it's VM state is Pause
server = self.create_test_server(wait_until='ACTIVE')
self.client.pause_server(server['id'])
self.client.wait_for_server_status(server['id'], 'PAUSED')
self.client.delete_server(server['id'])
self.client.wait_for_server_termination(server['id'])
@test.idempotent_id('1f82ebd3-8253-4f4e-b93f-de9b7df56d8b')
@testtools.skipUnless(CONF.compute_feature_enabled.suspend,
'Suspend is not available.')
def test_delete_server_while_in_suspended_state(self):
# Delete a server while it's VM state is Suspended
server = self.create_test_server(wait_until='ACTIVE')
self.client.suspend_server(server['id'])
self.client.wait_for_server_status(server['id'], 'SUSPENDED')
self.client.delete_server(server['id'])
self.client.wait_for_server_termination(server['id'])
@test.idempotent_id('bb0cb402-09dd-4947-b6e5-5e7e1cfa61ad')
@testtools.skipUnless(CONF.compute_feature_enabled.shelve,
'Shelve is not available.')
def test_delete_server_while_in_shelved_state(self):
# Delete a server while it's VM state is Shelved
server = self.create_test_server(wait_until='ACTIVE')
self.client.shelve_server(server['id'])
offload_time = CONF.compute.shelved_offload_time
if offload_time >= 0:
self.client.wait_for_server_status(server['id'],
'SHELVED_OFFLOADED',
extra_timeout=offload_time)
else:
self.client.wait_for_server_status(server['id'],
'SHELVED')
self.client.delete_server(server['id'])
self.client.wait_for_server_termination(server['id'])
@test.idempotent_id('ab0c38b4-cdd8-49d3-9b92-0cb898723c01')
@testtools.skipIf(not CONF.compute_feature_enabled.resize,
'Resize not available.')
def test_delete_server_while_in_verify_resize_state(self):
# Delete a server while it's VM state is VERIFY_RESIZE
server = self.create_test_server(wait_until='ACTIVE')
self.client.resize(server['id'], self.flavor_ref_alt)
self.client.wait_for_server_status(server['id'], 'VERIFY_RESIZE')
self.client.delete_server(server['id'])
self.client.wait_for_server_termination(server['id'])
@test.idempotent_id('d0f3f0d6-d9b6-4a32-8da4-23015dcab23c')
@test.services('volume')
def test_delete_server_while_in_attached_volume(self):
# Delete a server while a volume is attached to it
volumes_client = self.volumes_extensions_client
device = '/dev/%s' % CONF.compute.volume_device_name
server = self.create_test_server(wait_until='ACTIVE')
volume = volumes_client.create_volume()
self.addCleanup(volumes_client.delete_volume, volume['id'])
volumes_client.wait_for_volume_status(volume['id'], 'available')
self.client.attach_volume(server['id'],
volume['id'],
device=device)
volumes_client.wait_for_volume_status(volume['id'], 'in-use')
self.client.delete_server(server['id'])
self.client.wait_for_server_termination(server['id'])
volumes_client.wait_for_volume_status(volume['id'], 'available')
class DeleteServersAdminTestJSON(base.BaseV2ComputeAdminTest):
# NOTE: Server creations of each test class should be under 10
# for preventing "Quota exceeded for instances".
@classmethod
def setup_clients(cls):
super(DeleteServersAdminTestJSON, cls).setup_clients()
cls.non_admin_client = cls.servers_client
cls.admin_client = cls.os_adm.servers_client
@test.idempotent_id('99774678-e072-49d1-9d2a-49a59bc56063')
def test_delete_server_while_in_error_state(self):
# Delete a server while it's VM state is error
server = self.create_test_server(wait_until='ACTIVE')
self.admin_client.reset_state(server['id'], state='error')
# Verify server's state
server = self.non_admin_client.show_server(server['id'])
self.assertEqual(server['status'], 'ERROR')
self.non_admin_client.delete_server(server['id'])
self.servers_client.wait_for_server_termination(server['id'],
ignore_error=True)
@test.idempotent_id('73177903-6737-4f27-a60c-379e8ae8cf48')
def test_admin_delete_servers_of_others(self):
# Administrator can delete servers of others
server = self.create_test_server(wait_until='ACTIVE')
self.admin_client.delete_server(server['id'])
self.servers_client.wait_for_server_termination(server['id']) | unknown | codeparrot/codeparrot-clean | ||
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The Extended Ips API extension."""
import itertools
from nova.api.openstack import common
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
authorize = extensions.soft_extension_authorizer('compute', 'extended_ips_mac')
class ExtendedIpsMacController(wsgi.Controller):
def __init__(self, *args, **kwargs):
super(ExtendedIpsMacController, self).__init__(*args, **kwargs)
def _extend_server(self, context, server, instance):
key = "%s:mac_addr" % Extended_ips_mac.alias
networks = common.get_networks_for_instance(context, instance)
for label, network in networks.items():
# NOTE(vish): ips are hidden in some states via the
# hide_server_addresses extension.
if label in server['addresses']:
all_ips = itertools.chain(network["ips"],
network["floating_ips"])
for i, ip in enumerate(all_ips):
server['addresses'][label][i][key] = ip['mac_address']
@wsgi.extends
def show(self, req, resp_obj, id):
context = req.environ['nova.context']
if authorize(context):
server = resp_obj.obj['server']
db_instance = req.get_db_instance(server['id'])
# server['id'] is guaranteed to be in the cache due to
# the core API adding it in its 'show' method.
self._extend_server(context, server, db_instance)
@wsgi.extends
def detail(self, req, resp_obj):
context = req.environ['nova.context']
if authorize(context):
servers = list(resp_obj.obj['servers'])
for server in servers:
db_instance = req.get_db_instance(server['id'])
# server['id'] is guaranteed to be in the cache due to
# the core API adding it in its 'detail' method.
self._extend_server(context, server, db_instance)
class Extended_ips_mac(extensions.ExtensionDescriptor):
"""Adds mac address parameter to the ip list."""
name = "ExtendedIpsMac"
alias = "OS-EXT-IPS-MAC"
namespace = ("http://docs.openstack.org/compute/ext/"
"extended_ips_mac/api/v1.1")
updated = "2013-03-07T00:00:00Z"
def get_controller_extensions(self):
controller = ExtendedIpsMacController()
extension = extensions.ControllerExtension(self, 'servers', controller)
return [extension] | unknown | codeparrot/codeparrot-clean | ||
// This file was automatically generated from channels.md by Knit tool. Do not edit.
package kotlinx.coroutines.guide.exampleChannel03
import kotlinx.coroutines.*
import kotlinx.coroutines.channels.*
fun CoroutineScope.produceSquares(): ReceiveChannel<Int> = produce {
for (x in 1..5) send(x * x)
}
fun main() = runBlocking {
val squares = produceSquares()
squares.consumeEach { println(it) }
println("Done!")
} | kotlin | github | https://github.com/Kotlin/kotlinx.coroutines | kotlinx-coroutines-core/jvm/test/guide/example-channel-03.kt |
{
"name": "angular.dev",
"version": "0.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "angular.dev",
"version": "0.0.0",
"dependencies": {
"@angular/common": "^21.1.0-next",
"@angular/compiler": "^21.1.0-next",
"@angular/core": "^21.1.0-next",
"@angular/forms": "^21.1.0-next",
"@angular/platform-browser": "^21.1.0-next",
"rxjs": "~7.8.0",
"tslib": "^2.3.0",
"zone.js": "~0.16.0"
},
"devDependencies": {
"@angular/build": "^21.1.0-next",
"@angular/cli": "^21.1.0-next",
"@angular/compiler-cli": "^21.1.0-next",
"typescript": "~5.9.2"
}
},
"node_modules/@algolia/abtesting": {
"version": "1.12.2",
"resolved": "https://registry.npmjs.org/@algolia/abtesting/-/abtesting-1.12.2.tgz",
"integrity": "sha512-oWknd6wpfNrmRcH0vzed3UPX0i17o4kYLM5OMITyMVM2xLgaRbIafoxL0e8mcrNNb0iORCJA0evnNDKRYth5WQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@algolia/client-common": "5.46.2",
"@algolia/requester-browser-xhr": "5.46.2",
"@algolia/requester-fetch": "5.46.2",
"@algolia/requester-node-http": "5.46.2"
},
"engines": {
"node": ">= 14.0.0"
}
},
"node_modules/@algolia/client-abtesting": {
"version": "5.46.2",
"resolved": "https://registry.npmjs.org/@algolia/client-abtesting/-/client-abtesting-5.46.2.tgz",
"integrity": "sha512-oRSUHbylGIuxrlzdPA8FPJuwrLLRavOhAmFGgdAvMcX47XsyM+IOGa9tc7/K5SPvBqn4nhppOCEz7BrzOPWc4A==",
"dev": true,
"license": "MIT",
"dependencies": {
"@algolia/client-common": "5.46.2",
"@algolia/requester-browser-xhr": "5.46.2",
"@algolia/requester-fetch": "5.46.2",
"@algolia/requester-node-http": "5.46.2"
},
"engines": {
"node": ">= 14.0.0"
}
},
"node_modules/@algolia/client-analytics": {
"version": "5.46.2",
"resolved": "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-5.46.2.tgz",
"integrity": "sha512-EPBN2Oruw0maWOF4OgGPfioTvd+gmiNwx0HmD9IgmlS+l75DatcBkKOPNJN+0z3wBQWUO5oq602ATxIfmTQ8bA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@algolia/client-common": "5.46.2",
"@algolia/requester-browser-xhr": "5.46.2",
"@algolia/requester-fetch": "5.46.2",
"@algolia/requester-node-http": "5.46.2"
},
"engines": {
"node": ">= 14.0.0"
}
},
"node_modules/@algolia/client-common": {
"version": "5.46.2",
"resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-5.46.2.tgz",
"integrity": "sha512-Hj8gswSJNKZ0oyd0wWissqyasm+wTz1oIsv5ZmLarzOZAp3vFEda8bpDQ8PUhO+DfkbiLyVnAxsPe4cGzWtqkg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 14.0.0"
}
},
"node_modules/@algolia/client-insights": {
"version": "5.46.2",
"resolved": "https://registry.npmjs.org/@algolia/client-insights/-/client-insights-5.46.2.tgz",
"integrity": "sha512-6dBZko2jt8FmQcHCbmNLB0kCV079Mx/DJcySTL3wirgDBUH7xhY1pOuUTLMiGkqM5D8moVZTvTdRKZUJRkrwBA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@algolia/client-common": "5.46.2",
"@algolia/requester-browser-xhr": "5.46.2",
"@algolia/requester-fetch": "5.46.2",
"@algolia/requester-node-http": "5.46.2"
},
"engines": {
"node": ">= 14.0.0"
}
},
"node_modules/@algolia/client-personalization": {
"version": "5.46.2",
"resolved": "https://registry.npmjs.org/@algolia/client-personalization/-/client-personalization-5.46.2.tgz",
"integrity": "sha512-1waE2Uqh/PHNeDXGn/PM/WrmYOBiUGSVxAWqiJIj73jqPqvfzZgzdakHscIVaDl6Cp+j5dwjsZ5LCgaUr6DtmA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@algolia/client-common": "5.46.2",
"@algolia/requester-browser-xhr": "5.46.2",
"@algolia/requester-fetch": "5.46.2",
"@algolia/requester-node-http": "5.46.2"
},
"engines": {
"node": ">= 14.0.0"
}
},
"node_modules/@algolia/client-query-suggestions": {
"version": "5.46.2",
"resolved": "https://registry.npmjs.org/@algolia/client-query-suggestions/-/client-query-suggestions-5.46.2.tgz",
"integrity": "sha512-EgOzTZkyDcNL6DV0V/24+oBJ+hKo0wNgyrOX/mePBM9bc9huHxIY2352sXmoZ648JXXY2x//V1kropF/Spx83w==",
"dev": true,
"license": "MIT",
"dependencies": {
"@algolia/client-common": "5.46.2",
"@algolia/requester-browser-xhr": "5.46.2",
"@algolia/requester-fetch": "5.46.2",
"@algolia/requester-node-http": "5.46.2"
},
"engines": {
"node": ">= 14.0.0"
}
},
"node_modules/@algolia/client-search": {
"version": "5.46.2",
"resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-5.46.2.tgz",
"integrity": "sha512-ZsOJqu4HOG5BlvIFnMU0YKjQ9ZI6r3C31dg2jk5kMWPSdhJpYL9xa5hEe7aieE+707dXeMI4ej3diy6mXdZpgA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@algolia/client-common": "5.46.2",
"@algolia/requester-browser-xhr": "5.46.2",
"@algolia/requester-fetch": "5.46.2",
"@algolia/requester-node-http": "5.46.2"
},
"engines": {
"node": ">= 14.0.0"
}
},
"node_modules/@algolia/ingestion": {
"version": "1.46.2",
"resolved": "https://registry.npmjs.org/@algolia/ingestion/-/ingestion-1.46.2.tgz",
"integrity": "sha512-1Uw2OslTWiOFDtt83y0bGiErJYy5MizadV0nHnOoHFWMoDqWW0kQoMFI65pXqRSkVvit5zjXSLik2xMiyQJDWQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@algolia/client-common": "5.46.2",
"@algolia/requester-browser-xhr": "5.46.2",
"@algolia/requester-fetch": "5.46.2",
"@algolia/requester-node-http": "5.46.2"
},
"engines": {
"node": ">= 14.0.0"
}
},
"node_modules/@algolia/monitoring": {
"version": "1.46.2",
"resolved": "https://registry.npmjs.org/@algolia/monitoring/-/monitoring-1.46.2.tgz",
"integrity": "sha512-xk9f+DPtNcddWN6E7n1hyNNsATBCHIqAvVGG2EAGHJc4AFYL18uM/kMTiOKXE/LKDPyy1JhIerrh9oYb7RBrgw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@algolia/client-common": "5.46.2",
"@algolia/requester-browser-xhr": "5.46.2",
"@algolia/requester-fetch": "5.46.2",
"@algolia/requester-node-http": "5.46.2"
},
"engines": {
"node": ">= 14.0.0"
}
},
"node_modules/@algolia/recommend": {
"version": "5.46.2",
"resolved": "https://registry.npmjs.org/@algolia/recommend/-/recommend-5.46.2.tgz",
"integrity": "sha512-NApbTPj9LxGzNw4dYnZmj2BoXiAc8NmbbH6qBNzQgXklGklt/xldTvu+FACN6ltFsTzoNU6j2mWNlHQTKGC5+Q==",
"dev": true,
"license": "MIT",
"dependencies": {
"@algolia/client-common": "5.46.2",
"@algolia/requester-browser-xhr": "5.46.2",
"@algolia/requester-fetch": "5.46.2",
"@algolia/requester-node-http": "5.46.2"
},
"engines": {
"node": ">= 14.0.0"
}
},
"node_modules/@algolia/requester-browser-xhr": {
"version": "5.46.2",
"resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-5.46.2.tgz",
"integrity": "sha512-ekotpCwpSp033DIIrsTpYlGUCF6momkgupRV/FA3m62SreTSZUKjgK6VTNyG7TtYfq9YFm/pnh65bATP/ZWJEg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@algolia/client-common": "5.46.2"
},
"engines": {
"node": ">= 14.0.0"
}
},
"node_modules/@algolia/requester-fetch": {
"version": "5.46.2",
"resolved": "https://registry.npmjs.org/@algolia/requester-fetch/-/requester-fetch-5.46.2.tgz",
"integrity": "sha512-gKE+ZFi/6y7saTr34wS0SqYFDcjHW4Wminv8PDZEi0/mE99+hSrbKgJWxo2ztb5eqGirQTgIh1AMVacGGWM1iw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@algolia/client-common": "5.46.2"
},
"engines": {
"node": ">= 14.0.0"
}
},
"node_modules/@algolia/requester-node-http": {
"version": "5.46.2",
"resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-5.46.2.tgz",
"integrity": "sha512-ciPihkletp7ttweJ8Zt+GukSVLp2ANJHU+9ttiSxsJZThXc4Y2yJ8HGVWesW5jN1zrsZsezN71KrMx/iZsOYpg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@algolia/client-common": "5.46.2"
},
"engines": {
"node": ">= 14.0.0"
}
},
"node_modules/@ampproject/remapping": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz",
"integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
"@jridgewell/gen-mapping": "^0.3.5",
"@jridgewell/trace-mapping": "^0.3.24"
},
"engines": {
"node": ">=6.0.0"
}
},
"node_modules/@angular-devkit/architect": {
"version": "0.2101.0",
"resolved": "https://registry.npmjs.org/@angular-devkit/architect/-/architect-0.2101.0.tgz",
"integrity": "sha512-vnNAzWXwSRGTHk2K7woIQsj7WDYZp69Z3DBdlxkK0H08ymkJ/ELbhN0/AnIJNNtYCqEb57AH7Ro98n422beDuw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@angular-devkit/core": "21.1.0",
"rxjs": "7.8.2"
},
"bin": {
"architect": "bin/cli.js"
},
"engines": {
"node": "^20.19.0 || ^22.12.0 || >=24.0.0",
"npm": "^6.11.0 || ^7.5.6 || >=8.0.0",
"yarn": ">= 1.13.0"
}
},
"node_modules/@angular-devkit/core": {
"version": "21.1.0",
"resolved": "https://registry.npmjs.org/@angular-devkit/core/-/core-21.1.0.tgz",
"integrity": "sha512-dPfVy0CictDjWffRv4pGTPOFjdlJL3ZkGUqxzaosUjMbJW+Ai9cNn1VNr7zxYZ4kem3BxLBh1thzDsCPrkXlZA==",
"dev": true,
"license": "MIT",
"dependencies": {
"ajv": "8.17.1",
"ajv-formats": "3.0.1",
"jsonc-parser": "3.3.1",
"picomatch": "4.0.3",
"rxjs": "7.8.2",
"source-map": "0.7.6"
},
"engines": {
"node": "^20.19.0 || ^22.12.0 || >=24.0.0",
"npm": "^6.11.0 || ^7.5.6 || >=8.0.0",
"yarn": ">= 1.13.0"
},
"peerDependencies": {
"chokidar": "^5.0.0"
},
"peerDependenciesMeta": {
"chokidar": {
"optional": true
}
}
},
"node_modules/@angular-devkit/schematics": {
"version": "21.1.0",
"resolved": "https://registry.npmjs.org/@angular-devkit/schematics/-/schematics-21.1.0.tgz",
"integrity": "sha512-sVgTntCZCOV7mOpHzj6V14KOAoy4B9Ur9yHNRFZVgL2yD77TYRrJ0qwq+l7Im9fSjMCar6csjboqCvyAEpfV1g==",
"dev": true,
"license": "MIT",
"dependencies": {
"@angular-devkit/core": "21.1.0",
"jsonc-parser": "3.3.1",
"magic-string": "0.30.21",
"ora": "9.0.0",
"rxjs": "7.8.2"
},
"engines": {
"node": "^20.19.0 || ^22.12.0 || >=24.0.0",
"npm": "^6.11.0 || ^7.5.6 || >=8.0.0",
"yarn": ">= 1.13.0"
}
},
"node_modules/@angular/build": {
"version": "21.1.0",
"resolved": "https://registry.npmjs.org/@angular/build/-/build-21.1.0.tgz",
"integrity": "sha512-ftms4F/TlkRNhf/4ykFO12zTG0f9sIRZ4fGFnaOVGmnKYkPKZklWvMCPoaoIligHS2pqKye1a5JSiTgTeUDp9w==",
"dev": true,
"license": "MIT",
"dependencies": {
"@ampproject/remapping": "2.3.0",
"@angular-devkit/architect": "0.2101.0",
"@babel/core": "7.28.5",
"@babel/helper-annotate-as-pure": "7.27.3",
"@babel/helper-split-export-declaration": "7.24.7",
"@inquirer/confirm": "5.1.21",
"@vitejs/plugin-basic-ssl": "2.1.0",
"beasties": "0.3.5",
"browserslist": "^4.26.0",
"esbuild": "0.27.2",
"https-proxy-agent": "7.0.6",
"istanbul-lib-instrument": "6.0.3",
"jsonc-parser": "3.3.1",
"listr2": "9.0.5",
"magic-string": "0.30.21",
"mrmime": "2.0.1",
"parse5-html-rewriting-stream": "8.0.0",
"picomatch": "4.0.3",
"piscina": "5.1.4",
"rolldown": "1.0.0-beta.58",
"sass": "1.97.1",
"semver": "7.7.3",
"source-map-support": "0.5.21",
"tinyglobby": "0.2.15",
"undici": "7.18.0",
"vite": "7.3.0",
"watchpack": "2.5.0"
},
"engines": {
"node": "^20.19.0 || ^22.12.0 || >=24.0.0",
"npm": "^6.11.0 || ^7.5.6 || >=8.0.0",
"yarn": ">= 1.13.0"
},
"optionalDependencies": {
"lmdb": "3.4.4"
},
"peerDependencies": {
"@angular/compiler": "^21.0.0",
"@angular/compiler-cli": "^21.0.0",
"@angular/core": "^21.0.0",
"@angular/localize": "^21.0.0",
"@angular/platform-browser": "^21.0.0",
"@angular/platform-server": "^21.0.0",
"@angular/service-worker": "^21.0.0",
"@angular/ssr": "^21.1.0",
"karma": "^6.4.0",
"less": "^4.2.0",
"ng-packagr": "^21.0.0",
"postcss": "^8.4.0",
"tailwindcss": "^2.0.0 || ^3.0.0 || ^4.0.0",
"tslib": "^2.3.0",
"typescript": ">=5.9 <6.0",
"vitest": "^4.0.8"
},
"peerDependenciesMeta": {
"@angular/core": {
"optional": true
},
"@angular/localize": {
"optional": true
},
"@angular/platform-browser": {
"optional": true
},
"@angular/platform-server": {
"optional": true
},
"@angular/service-worker": {
"optional": true
},
"@angular/ssr": {
"optional": true
},
"karma": {
"optional": true
},
"less": {
"optional": true
},
"ng-packagr": {
"optional": true
},
"postcss": {
"optional": true
},
"tailwindcss": {
"optional": true
},
"vitest": {
"optional": true
}
}
},
"node_modules/@angular/cli": {
"version": "21.1.0",
"resolved": "https://registry.npmjs.org/@angular/cli/-/cli-21.1.0.tgz",
"integrity": "sha512-kzk8du388x6EBybJeq5AB27qGm8oGC9HhvBJDbu8o+aBIOY+JwVON9m4SYLCzeT+EVK8sKA1NMVYi2CDerk6hA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@angular-devkit/architect": "0.2101.0",
"@angular-devkit/core": "21.1.0",
"@angular-devkit/schematics": "21.1.0",
"@inquirer/prompts": "7.10.1",
"@listr2/prompt-adapter-inquirer": "3.0.5",
"@modelcontextprotocol/sdk": "1.25.2",
"@schematics/angular": "21.1.0",
"@yarnpkg/lockfile": "1.1.0",
"algoliasearch": "5.46.2",
"ini": "6.0.0",
"jsonc-parser": "3.3.1",
"listr2": "9.0.5",
"npm-package-arg": "13.0.2",
"pacote": "21.0.4",
"parse5-html-rewriting-stream": "8.0.0",
"resolve": "1.22.11",
"semver": "7.7.3",
"yargs": "18.0.0",
"zod": "4.3.5"
},
"bin": {
"ng": "bin/ng.js"
},
"engines": {
"node": "^20.19.0 || ^22.12.0 || >=24.0.0",
"npm": "^6.11.0 || ^7.5.6 || >=8.0.0",
"yarn": ">= 1.13.0"
}
},
"node_modules/@babel/code-frame": {
"version": "7.28.6",
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.28.6.tgz",
"integrity": "sha512-JYgintcMjRiCvS8mMECzaEn+m3PfoQiyqukOMCCVQtoJGYJw8j/8LBJEiqkHLkfwCcs74E3pbAUFNg7d9VNJ+Q==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/helper-validator-identifier": "^7.28.5",
"js-tokens": "^4.0.0",
"picocolors": "^1.1.1"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/compat-data": {
"version": "7.28.6",
"resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.28.6.tgz",
"integrity": "sha512-2lfu57JtzctfIrcGMz992hyLlByuzgIk58+hhGCxjKZ3rWI82NnVLjXcaTqkI2NvlcvOskZaiZ5kjUALo3Lpxg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/core": {
"version": "7.28.5",
"resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.5.tgz",
"integrity": "sha512-e7jT4DxYvIDLk1ZHmU/m/mB19rex9sv0c2ftBtjSBv+kVM/902eh0fINUzD7UwLLNR+jU585GxUJ8/EBfAM5fw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/code-frame": "^7.27.1",
"@babel/generator": "^7.28.5",
"@babel/helper-compilation-targets": "^7.27.2",
"@babel/helper-module-transforms": "^7.28.3",
"@babel/helpers": "^7.28.4",
"@babel/parser": "^7.28.5",
"@babel/template": "^7.27.2",
"@babel/traverse": "^7.28.5",
"@babel/types": "^7.28.5",
"@jridgewell/remapping": "^2.3.5",
"convert-source-map": "^2.0.0",
"debug": "^4.1.0",
"gensync": "^1.0.0-beta.2",
"json5": "^2.2.3",
"semver": "^6.3.1"
},
"engines": {
"node": ">=6.9.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/babel"
}
},
"node_modules/@babel/core/node_modules/convert-source-map": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz",
"integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==",
"dev": true,
"license": "MIT"
},
"node_modules/@babel/core/node_modules/semver": {
"version": "6.3.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
"dev": true,
"license": "ISC",
"bin": {
"semver": "bin/semver.js"
}
},
"node_modules/@babel/generator": {
"version": "7.28.6",
"resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.6.tgz",
"integrity": "sha512-lOoVRwADj8hjf7al89tvQ2a1lf53Z+7tiXMgpZJL3maQPDxh0DgLMN62B2MKUOFcoodBHLMbDM6WAbKgNy5Suw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/parser": "^7.28.6",
"@babel/types": "^7.28.6",
"@jridgewell/gen-mapping": "^0.3.12",
"@jridgewell/trace-mapping": "^0.3.28",
"jsesc": "^3.0.2"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helper-annotate-as-pure": {
"version": "7.27.3",
"resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.27.3.tgz",
"integrity": "sha512-fXSwMQqitTGeHLBC08Eq5yXz2m37E4pJX1qAU1+2cNedz/ifv/bVXft90VeSav5nFO61EcNgwr0aJxbyPaWBPg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/types": "^7.27.3"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helper-compilation-targets": {
"version": "7.28.6",
"resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.28.6.tgz",
"integrity": "sha512-JYtls3hqi15fcx5GaSNL7SCTJ2MNmjrkHXg4FSpOA/grxK8KwyZ5bubHsCq8FXCkua6xhuaaBit+3b7+VZRfcA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/compat-data": "^7.28.6",
"@babel/helper-validator-option": "^7.27.1",
"browserslist": "^4.24.0",
"lru-cache": "^5.1.1",
"semver": "^6.3.1"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helper-compilation-targets/node_modules/semver": {
"version": "6.3.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
"dev": true,
"license": "ISC",
"bin": {
"semver": "bin/semver.js"
}
},
"node_modules/@babel/helper-globals": {
"version": "7.28.0",
"resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz",
"integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helper-module-imports": {
"version": "7.28.6",
"resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.28.6.tgz",
"integrity": "sha512-l5XkZK7r7wa9LucGw9LwZyyCUscb4x37JWTPz7swwFE/0FMQAGpiWUZn8u9DzkSBWEcK25jmvubfpw2dnAMdbw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/traverse": "^7.28.6",
"@babel/types": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helper-module-transforms": {
"version": "7.28.6",
"resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.6.tgz",
"integrity": "sha512-67oXFAYr2cDLDVGLXTEABjdBJZ6drElUSI7WKp70NrpyISso3plG9SAGEF6y7zbha/wOzUByWWTJvEDVNIUGcA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/helper-module-imports": "^7.28.6",
"@babel/helper-validator-identifier": "^7.28.5",
"@babel/traverse": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
},
"peerDependencies": {
"@babel/core": "^7.0.0"
}
},
"node_modules/@babel/helper-split-export-declaration": {
"version": "7.24.7",
"resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.24.7.tgz",
"integrity": "sha512-oy5V7pD+UvfkEATUKvIjvIAH/xCzfsFVw7ygW2SI6NClZzquT+mwdTfgfdbUiceh6iQO0CHtCPsyze/MZ2YbAA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/types": "^7.24.7"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helper-string-parser": {
"version": "7.27.1",
"resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz",
"integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helper-validator-identifier": {
"version": "7.28.5",
"resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz",
"integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helper-validator-option": {
"version": "7.27.1",
"resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz",
"integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helpers": {
"version": "7.28.6",
"resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.6.tgz",
"integrity": "sha512-xOBvwq86HHdB7WUDTfKfT/Vuxh7gElQ+Sfti2Cy6yIWNW05P8iUslOVcZ4/sKbE+/jQaukQAdz/gf3724kYdqw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/template": "^7.28.6",
"@babel/types": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/parser": {
"version": "7.28.6",
"resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.6.tgz",
"integrity": "sha512-TeR9zWR18BvbfPmGbLampPMW+uW1NZnJlRuuHso8i87QZNq2JRF9i6RgxRqtEq+wQGsS19NNTWr2duhnE49mfQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/types": "^7.28.6"
},
"bin": {
"parser": "bin/babel-parser.js"
},
"engines": {
"node": ">=6.0.0"
}
},
"node_modules/@babel/template": {
"version": "7.28.6",
"resolved": "https://registry.npmjs.org/@babel/template/-/template-7.28.6.tgz",
"integrity": "sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/code-frame": "^7.28.6",
"@babel/parser": "^7.28.6",
"@babel/types": "^7.28.6"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/traverse": {
"version": "7.28.6",
"resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.6.tgz",
"integrity": "sha512-fgWX62k02qtjqdSNTAGxmKYY/7FSL9WAS1o2Hu5+I5m9T0yxZzr4cnrfXQ/MX0rIifthCSs6FKTlzYbJcPtMNg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/code-frame": "^7.28.6",
"@babel/generator": "^7.28.6",
"@babel/helper-globals": "^7.28.0",
"@babel/parser": "^7.28.6",
"@babel/template": "^7.28.6",
"@babel/types": "^7.28.6",
"debug": "^4.3.1"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/types": {
"version": "7.28.6",
"resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.6.tgz",
"integrity": "sha512-0ZrskXVEHSWIqZM/sQZ4EV3jZJXRkio/WCxaqKZP1g//CEWEPSfeZFcms4XeKBCHU0ZKnIkdJeU/kF+eRp5lBg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/helper-string-parser": "^7.27.1",
"@babel/helper-validator-identifier": "^7.28.5"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@emnapi/core": {
"version": "1.8.1",
"resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.8.1.tgz",
"integrity": "sha512-AvT9QFpxK0Zd8J0jopedNm+w/2fIzvtPKPjqyw9jwvBaReTTqPBk9Hixaz7KbjimP+QNz605/XnjFcDAL2pqBg==",
"dev": true,
"license": "MIT",
"optional": true,
"dependencies": {
"@emnapi/wasi-threads": "1.1.0",
"tslib": "^2.4.0"
}
},
"node_modules/@emnapi/runtime": {
"version": "1.8.1",
"resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.8.1.tgz",
"integrity": "sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg==",
"dev": true,
"license": "MIT",
"optional": true,
"dependencies": {
"tslib": "^2.4.0"
}
},
"node_modules/@emnapi/wasi-threads": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@emnapi/wasi-threads/-/wasi-threads-1.1.0.tgz",
"integrity": "sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ==",
"dev": true,
"license": "MIT",
"optional": true,
"dependencies": {
"tslib": "^2.4.0"
}
},
"node_modules/@esbuild/aix-ppc64": {
"version": "0.27.2",
"resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.2.tgz",
"integrity": "sha512-GZMB+a0mOMZs4MpDbj8RJp4cw+w1WV5NYD6xzgvzUJ5Ek2jerwfO2eADyI6ExDSUED+1X8aMbegahsJi+8mgpw==",
"cpu": [
"ppc64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"aix"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/android-arm": {
"version": "0.27.2",
"resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.2.tgz",
"integrity": "sha512-DVNI8jlPa7Ujbr1yjU2PfUSRtAUZPG9I1RwW4F4xFB1Imiu2on0ADiI/c3td+KmDtVKNbi+nffGDQMfcIMkwIA==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/android-arm64": {
"version": "0.27.2",
"resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.2.tgz",
"integrity": "sha512-pvz8ZZ7ot/RBphf8fv60ljmaoydPU12VuXHImtAs0XhLLw+EXBi2BLe3OYSBslR4rryHvweW5gmkKFwTiFy6KA==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/android-x64": {
"version": "0.27.2",
"resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.2.tgz",
"integrity": "sha512-z8Ank4Byh4TJJOh4wpz8g2vDy75zFL0TlZlkUkEwYXuPSgX8yzep596n6mT7905kA9uHZsf/o2OJZubl2l3M7A==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/darwin-arm64": {
"version": "0.27.2",
"resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.2.tgz",
"integrity": "sha512-davCD2Zc80nzDVRwXTcQP/28fiJbcOwvdolL0sOiOsbwBa72kegmVU0Wrh1MYrbuCL98Omp5dVhQFWRKR2ZAlg==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/darwin-x64": {
"version": "0.27.2",
"resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.2.tgz",
"integrity": "sha512-ZxtijOmlQCBWGwbVmwOF/UCzuGIbUkqB1faQRf5akQmxRJ1ujusWsb3CVfk/9iZKr2L5SMU5wPBi1UWbvL+VQA==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/freebsd-arm64": {
"version": "0.27.2",
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.2.tgz",
"integrity": "sha512-lS/9CN+rgqQ9czogxlMcBMGd+l8Q3Nj1MFQwBZJyoEKI50XGxwuzznYdwcav6lpOGv5BqaZXqvBSiB/kJ5op+g==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"freebsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/freebsd-x64": {
"version": "0.27.2",
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.2.tgz",
"integrity": "sha512-tAfqtNYb4YgPnJlEFu4c212HYjQWSO/w/h/lQaBK7RbwGIkBOuNKQI9tqWzx7Wtp7bTPaGC6MJvWI608P3wXYA==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"freebsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-arm": {
"version": "0.27.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.2.tgz",
"integrity": "sha512-vWfq4GaIMP9AIe4yj1ZUW18RDhx6EPQKjwe7n8BbIecFtCQG4CfHGaHuh7fdfq+y3LIA2vGS/o9ZBGVxIDi9hw==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-arm64": {
"version": "0.27.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.2.tgz",
"integrity": "sha512-hYxN8pr66NsCCiRFkHUAsxylNOcAQaxSSkHMMjcpx0si13t1LHFphxJZUiGwojB1a/Hd5OiPIqDdXONia6bhTw==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-ia32": {
"version": "0.27.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.2.tgz",
"integrity": "sha512-MJt5BRRSScPDwG2hLelYhAAKh9imjHK5+NE/tvnRLbIqUWa+0E9N4WNMjmp/kXXPHZGqPLxggwVhz7QP8CTR8w==",
"cpu": [
"ia32"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-loong64": {
"version": "0.27.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.2.tgz",
"integrity": "sha512-lugyF1atnAT463aO6KPshVCJK5NgRnU4yb3FUumyVz+cGvZbontBgzeGFO1nF+dPueHD367a2ZXe1NtUkAjOtg==",
"cpu": [
"loong64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-mips64el": {
"version": "0.27.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.2.tgz",
"integrity": "sha512-nlP2I6ArEBewvJ2gjrrkESEZkB5mIoaTswuqNFRv/WYd+ATtUpe9Y09RnJvgvdag7he0OWgEZWhviS1OTOKixw==",
"cpu": [
"mips64el"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-ppc64": {
"version": "0.27.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.2.tgz",
"integrity": "sha512-C92gnpey7tUQONqg1n6dKVbx3vphKtTHJaNG2Ok9lGwbZil6DrfyecMsp9CrmXGQJmZ7iiVXvvZH6Ml5hL6XdQ==",
"cpu": [
"ppc64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-riscv64": {
"version": "0.27.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.2.tgz",
"integrity": "sha512-B5BOmojNtUyN8AXlK0QJyvjEZkWwy/FKvakkTDCziX95AowLZKR6aCDhG7LeF7uMCXEJqwa8Bejz5LTPYm8AvA==",
"cpu": [
"riscv64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-s390x": {
"version": "0.27.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.2.tgz",
"integrity": "sha512-p4bm9+wsPwup5Z8f4EpfN63qNagQ47Ua2znaqGH6bqLlmJ4bx97Y9JdqxgGZ6Y8xVTixUnEkoKSHcpRlDnNr5w==",
"cpu": [
"s390x"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-x64": {
"version": "0.27.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.2.tgz",
"integrity": "sha512-uwp2Tip5aPmH+NRUwTcfLb+W32WXjpFejTIOWZFw/v7/KnpCDKG66u4DLcurQpiYTiYwQ9B7KOeMJvLCu/OvbA==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/netbsd-arm64": {
"version": "0.27.2",
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.2.tgz",
"integrity": "sha512-Kj6DiBlwXrPsCRDeRvGAUb/LNrBASrfqAIok+xB0LxK8CHqxZ037viF13ugfsIpePH93mX7xfJp97cyDuTZ3cw==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"netbsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/netbsd-x64": {
"version": "0.27.2",
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.2.tgz",
"integrity": "sha512-HwGDZ0VLVBY3Y+Nw0JexZy9o/nUAWq9MlV7cahpaXKW6TOzfVno3y3/M8Ga8u8Yr7GldLOov27xiCnqRZf0tCA==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"netbsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/openbsd-arm64": {
"version": "0.27.2",
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.2.tgz",
"integrity": "sha512-DNIHH2BPQ5551A7oSHD0CKbwIA/Ox7+78/AWkbS5QoRzaqlev2uFayfSxq68EkonB+IKjiuxBFoV8ESJy8bOHA==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"openbsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/openbsd-x64": {
"version": "0.27.2",
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.2.tgz",
"integrity": "sha512-/it7w9Nb7+0KFIzjalNJVR5bOzA9Vay+yIPLVHfIQYG/j+j9VTH84aNB8ExGKPU4AzfaEvN9/V4HV+F+vo8OEg==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"openbsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/openharmony-arm64": {
"version": "0.27.2",
"resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.2.tgz",
"integrity": "sha512-LRBbCmiU51IXfeXk59csuX/aSaToeG7w48nMwA6049Y4J4+VbWALAuXcs+qcD04rHDuSCSRKdmY63sruDS5qag==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"openharmony"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/sunos-x64": {
"version": "0.27.2",
"resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.2.tgz",
"integrity": "sha512-kMtx1yqJHTmqaqHPAzKCAkDaKsffmXkPHThSfRwZGyuqyIeBvf08KSsYXl+abf5HDAPMJIPnbBfXvP2ZC2TfHg==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"sunos"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/win32-arm64": {
"version": "0.27.2",
"resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.2.tgz",
"integrity": "sha512-Yaf78O/B3Kkh+nKABUF++bvJv5Ijoy9AN1ww904rOXZFLWVc5OLOfL56W+C8F9xn5JQZa3UX6m+IktJnIb1Jjg==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/win32-ia32": {
"version": "0.27.2",
"resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.2.tgz",
"integrity": "sha512-Iuws0kxo4yusk7sw70Xa2E2imZU5HoixzxfGCdxwBdhiDgt9vX9VUCBhqcwY7/uh//78A1hMkkROMJq9l27oLQ==",
"cpu": [
"ia32"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/win32-x64": {
"version": "0.27.2",
"resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.2.tgz",
"integrity": "sha512-sRdU18mcKf7F+YgheI/zGf5alZatMUTKj/jNS6l744f9u3WFu4v7twcUI9vu4mknF4Y9aDlblIie0IM+5xxaqQ==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@hono/node-server": {
"version": "1.19.9",
"resolved": "https://registry.npmjs.org/@hono/node-server/-/node-server-1.19.9.tgz",
"integrity": "sha512-vHL6w3ecZsky+8P5MD+eFfaGTyCeOHUIFYMGpQGbrBTSmNNoxv0if69rEZ5giu36weC5saFuznL411gRX7bJDw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=18.14.1"
},
"peerDependencies": {
"hono": "^4"
}
},
"node_modules/@inquirer/ansi": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/@inquirer/ansi/-/ansi-1.0.2.tgz",
"integrity": "sha512-S8qNSZiYzFd0wAcyG5AXCvUHC5Sr7xpZ9wZ2py9XR88jUz8wooStVx5M6dRzczbBWjic9NP7+rY0Xi7qqK/aMQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=18"
}
},
"node_modules/@inquirer/checkbox": {
"version": "4.3.2",
"resolved": "https://registry.npmjs.org/@inquirer/checkbox/-/checkbox-4.3.2.tgz",
"integrity": "sha512-VXukHf0RR1doGe6Sm4F0Em7SWYLTHSsbGfJdS9Ja2bX5/D5uwVOEjr07cncLROdBvmnvCATYEWlHqYmXv2IlQA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@inquirer/ansi": "^1.0.2",
"@inquirer/core": "^10.3.2",
"@inquirer/figures": "^1.0.15",
"@inquirer/type": "^3.0.10",
"yoctocolors-cjs": "^2.1.3"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"@types/node": ">=18"
},
"peerDependenciesMeta": {
"@types/node": {
"optional": true
}
}
},
"node_modules/@inquirer/confirm": {
"version": "5.1.21",
"resolved": "https://registry.npmjs.org/@inquirer/confirm/-/confirm-5.1.21.tgz",
"integrity": "sha512-KR8edRkIsUayMXV+o3Gv+q4jlhENF9nMYUZs9PA2HzrXeHI8M5uDag70U7RJn9yyiMZSbtF5/UexBtAVtZGSbQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@inquirer/core": "^10.3.2",
"@inquirer/type": "^3.0.10"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"@types/node": ">=18"
},
"peerDependenciesMeta": {
"@types/node": {
"optional": true
}
}
},
"node_modules/@inquirer/core": {
"version": "10.3.2",
"resolved": "https://registry.npmjs.org/@inquirer/core/-/core-10.3.2.tgz",
"integrity": "sha512-43RTuEbfP8MbKzedNqBrlhhNKVwoK//vUFNW3Q3vZ88BLcrs4kYpGg+B2mm5p2K/HfygoCxuKwJJiv8PbGmE0A==",
"dev": true,
"license": "MIT",
"dependencies": {
"@inquirer/ansi": "^1.0.2",
"@inquirer/figures": "^1.0.15",
"@inquirer/type": "^3.0.10",
"cli-width": "^4.1.0",
"mute-stream": "^2.0.0",
"signal-exit": "^4.1.0",
"wrap-ansi": "^6.2.0",
"yoctocolors-cjs": "^2.1.3"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"@types/node": ">=18"
},
"peerDependenciesMeta": {
"@types/node": {
"optional": true
}
}
},
"node_modules/@inquirer/editor": {
"version": "4.2.23",
"resolved": "https://registry.npmjs.org/@inquirer/editor/-/editor-4.2.23.tgz",
"integrity": "sha512-aLSROkEwirotxZ1pBaP8tugXRFCxW94gwrQLxXfrZsKkfjOYC1aRvAZuhpJOb5cu4IBTJdsCigUlf2iCOu4ZDQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@inquirer/core": "^10.3.2",
"@inquirer/external-editor": "^1.0.3",
"@inquirer/type": "^3.0.10"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"@types/node": ">=18"
},
"peerDependenciesMeta": {
"@types/node": {
"optional": true
}
}
},
"node_modules/@inquirer/expand": {
"version": "4.0.23",
"resolved": "https://registry.npmjs.org/@inquirer/expand/-/expand-4.0.23.tgz",
"integrity": "sha512-nRzdOyFYnpeYTTR2qFwEVmIWypzdAx/sIkCMeTNTcflFOovfqUk+HcFhQQVBftAh9gmGrpFj6QcGEqrDMDOiew==",
"dev": true,
"license": "MIT",
"dependencies": {
"@inquirer/core": "^10.3.2",
"@inquirer/type": "^3.0.10",
"yoctocolors-cjs": "^2.1.3"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"@types/node": ">=18"
},
"peerDependenciesMeta": {
"@types/node": {
"optional": true
}
}
},
"node_modules/@inquirer/external-editor": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/@inquirer/external-editor/-/external-editor-1.0.3.tgz",
"integrity": "sha512-RWbSrDiYmO4LbejWY7ttpxczuwQyZLBUyygsA9Nsv95hpzUWwnNTVQmAq3xuh7vNwCp07UTmE5i11XAEExx4RA==",
"dev": true,
"license": "MIT",
"dependencies": {
"chardet": "^2.1.1",
"iconv-lite": "^0.7.0"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"@types/node": ">=18"
},
"peerDependenciesMeta": {
"@types/node": {
"optional": true
}
}
},
"node_modules/@inquirer/figures": {
"version": "1.0.15",
"resolved": "https://registry.npmjs.org/@inquirer/figures/-/figures-1.0.15.tgz",
"integrity": "sha512-t2IEY+unGHOzAaVM5Xx6DEWKeXlDDcNPeDyUpsRc6CUhBfU3VQOEl+Vssh7VNp1dR8MdUJBWhuObjXCsVpjN5g==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=18"
}
},
"node_modules/@inquirer/input": {
"version": "4.3.1",
"resolved": "https://registry.npmjs.org/@inquirer/input/-/input-4.3.1.tgz",
"integrity": "sha512-kN0pAM4yPrLjJ1XJBjDxyfDduXOuQHrBB8aLDMueuwUGn+vNpF7Gq7TvyVxx8u4SHlFFj4trmj+a2cbpG4Jn1g==",
"dev": true,
"license": "MIT",
"dependencies": {
"@inquirer/core": "^10.3.2",
"@inquirer/type": "^3.0.10"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"@types/node": ">=18"
},
"peerDependenciesMeta": {
"@types/node": {
"optional": true
}
}
},
"node_modules/@inquirer/number": {
"version": "3.0.23",
"resolved": "https://registry.npmjs.org/@inquirer/number/-/number-3.0.23.tgz",
"integrity": "sha512-5Smv0OK7K0KUzUfYUXDXQc9jrf8OHo4ktlEayFlelCjwMXz0299Y8OrI+lj7i4gCBY15UObk76q0QtxjzFcFcg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@inquirer/core": "^10.3.2",
"@inquirer/type": "^3.0.10"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"@types/node": ">=18"
},
"peerDependenciesMeta": {
"@types/node": {
"optional": true
}
}
},
"node_modules/@inquirer/password": {
"version": "4.0.23",
"resolved": "https://registry.npmjs.org/@inquirer/password/-/password-4.0.23.tgz",
"integrity": "sha512-zREJHjhT5vJBMZX/IUbyI9zVtVfOLiTO66MrF/3GFZYZ7T4YILW5MSkEYHceSii/KtRk+4i3RE7E1CUXA2jHcA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@inquirer/ansi": "^1.0.2",
"@inquirer/core": "^10.3.2",
"@inquirer/type": "^3.0.10"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"@types/node": ">=18"
},
"peerDependenciesMeta": {
"@types/node": {
"optional": true
}
}
},
"node_modules/@inquirer/prompts": {
"version": "7.10.1",
"resolved": "https://registry.npmjs.org/@inquirer/prompts/-/prompts-7.10.1.tgz",
"integrity": "sha512-Dx/y9bCQcXLI5ooQ5KyvA4FTgeo2jYj/7plWfV5Ak5wDPKQZgudKez2ixyfz7tKXzcJciTxqLeK7R9HItwiByg==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@inquirer/checkbox": "^4.3.2",
"@inquirer/confirm": "^5.1.21",
"@inquirer/editor": "^4.2.23",
"@inquirer/expand": "^4.0.23",
"@inquirer/input": "^4.3.1",
"@inquirer/number": "^3.0.23",
"@inquirer/password": "^4.0.23",
"@inquirer/rawlist": "^4.1.11",
"@inquirer/search": "^3.2.2",
"@inquirer/select": "^4.4.2"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"@types/node": ">=18"
},
"peerDependenciesMeta": {
"@types/node": {
"optional": true
}
}
},
"node_modules/@inquirer/rawlist": {
"version": "4.1.11",
"resolved": "https://registry.npmjs.org/@inquirer/rawlist/-/rawlist-4.1.11.tgz",
"integrity": "sha512-+LLQB8XGr3I5LZN/GuAHo+GpDJegQwuPARLChlMICNdwW7OwV2izlCSCxN6cqpL0sMXmbKbFcItJgdQq5EBXTw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@inquirer/core": "^10.3.2",
"@inquirer/type": "^3.0.10",
"yoctocolors-cjs": "^2.1.3"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"@types/node": ">=18"
},
"peerDependenciesMeta": {
"@types/node": {
"optional": true
}
}
},
"node_modules/@inquirer/search": {
"version": "3.2.2",
"resolved": "https://registry.npmjs.org/@inquirer/search/-/search-3.2.2.tgz",
"integrity": "sha512-p2bvRfENXCZdWF/U2BXvnSI9h+tuA8iNqtUKb9UWbmLYCRQxd8WkvwWvYn+3NgYaNwdUkHytJMGG4MMLucI1kA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@inquirer/core": "^10.3.2",
"@inquirer/figures": "^1.0.15",
"@inquirer/type": "^3.0.10",
"yoctocolors-cjs": "^2.1.3"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"@types/node": ">=18"
},
"peerDependenciesMeta": {
"@types/node": {
"optional": true
}
}
},
"node_modules/@inquirer/select": {
"version": "4.4.2",
"resolved": "https://registry.npmjs.org/@inquirer/select/-/select-4.4.2.tgz",
"integrity": "sha512-l4xMuJo55MAe+N7Qr4rX90vypFwCajSakx59qe/tMaC1aEHWLyw68wF4o0A4SLAY4E0nd+Vt+EyskeDIqu1M6w==",
"dev": true,
"license": "MIT",
"dependencies": {
"@inquirer/ansi": "^1.0.2",
"@inquirer/core": "^10.3.2",
"@inquirer/figures": "^1.0.15",
"@inquirer/type": "^3.0.10",
"yoctocolors-cjs": "^2.1.3"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"@types/node": ">=18"
},
"peerDependenciesMeta": {
"@types/node": {
"optional": true
}
}
},
"node_modules/@inquirer/type": {
"version": "3.0.10",
"resolved": "https://registry.npmjs.org/@inquirer/type/-/type-3.0.10.tgz",
"integrity": "sha512-BvziSRxfz5Ov8ch0z/n3oijRSEcEsHnhggm4xFZe93DHcUCTlutlq9Ox4SVENAfcRD22UQq7T/atg9Wr3k09eA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=18"
},
"peerDependencies": {
"@types/node": ">=18"
},
"peerDependenciesMeta": {
"@types/node": {
"optional": true
}
}
},
"node_modules/@isaacs/balanced-match": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/@isaacs/balanced-match/-/balanced-match-4.0.1.tgz",
"integrity": "sha512-yzMTt9lEb8Gv7zRioUilSglI0c0smZ9k5D65677DLWLtWJaXIS3CqcGyUFByYKlnUj6TkjLVs54fBl6+TiGQDQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": "20 || >=22"
}
},
"node_modules/@isaacs/brace-expansion": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/@isaacs/brace-expansion/-/brace-expansion-5.0.0.tgz",
"integrity": "sha512-ZT55BDLV0yv0RBm2czMiZ+SqCGO7AvmOM3G/w2xhVPH+te0aKgFjmBvGlL1dH+ql2tgGO3MVrbb3jCKyvpgnxA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@isaacs/balanced-match": "^4.0.1"
},
"engines": {
"node": "20 || >=22"
}
},
"node_modules/@isaacs/fs-minipass": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz",
"integrity": "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w==",
"dev": true,
"license": "ISC",
"dependencies": {
"minipass": "^7.0.4"
},
"engines": {
"node": ">=18.0.0"
}
},
"node_modules/@istanbuljs/schema": {
"version": "0.1.3",
"resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz",
"integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
}
},
"node_modules/@jridgewell/gen-mapping": {
"version": "0.3.13",
"resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz",
"integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@jridgewell/sourcemap-codec": "^1.5.0",
"@jridgewell/trace-mapping": "^0.3.24"
}
},
"node_modules/@jridgewell/remapping": {
"version": "2.3.5",
"resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz",
"integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@jridgewell/gen-mapping": "^0.3.5",
"@jridgewell/trace-mapping": "^0.3.24"
}
},
"node_modules/@jridgewell/resolve-uri": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz",
"integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6.0.0"
}
},
"node_modules/@jridgewell/sourcemap-codec": {
"version": "1.5.5",
"resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz",
"integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==",
"dev": true,
"license": "MIT"
},
"node_modules/@jridgewell/trace-mapping": {
"version": "0.3.31",
"resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz",
"integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@jridgewell/resolve-uri": "^3.1.0",
"@jridgewell/sourcemap-codec": "^1.4.14"
}
},
"node_modules/@listr2/prompt-adapter-inquirer": {
"version": "3.0.5",
"resolved": "https://registry.npmjs.org/@listr2/prompt-adapter-inquirer/-/prompt-adapter-inquirer-3.0.5.tgz",
"integrity": "sha512-WELs+hj6xcilkloBXYf9XXK8tYEnKsgLj01Xl5ONUJpKjmT5hGVUzNUS5tooUxs7pGMrw+jFD/41WpqW4V3LDA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@inquirer/type": "^3.0.8"
},
"engines": {
"node": ">=20.0.0"
},
"peerDependencies": {
"@inquirer/prompts": ">= 3 < 8",
"listr2": "9.0.5"
}
},
"node_modules/@lmdb/lmdb-darwin-arm64": {
"version": "3.4.4",
"resolved": "https://registry.npmjs.org/@lmdb/lmdb-darwin-arm64/-/lmdb-darwin-arm64-3.4.4.tgz",
"integrity": "sha512-XaKL705gDWd6XVls3ATDj13ZdML/LqSIxwgnYpG8xTzH2ifArx8fMMDdvqGE/Emd+W6R90W2fveZcJ0AyS8Y0w==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
]
},
"node_modules/@lmdb/lmdb-darwin-x64": {
"version": "3.4.4",
"resolved": "https://registry.npmjs.org/@lmdb/lmdb-darwin-x64/-/lmdb-darwin-x64-3.4.4.tgz",
"integrity": "sha512-GPHGEVcwJlkD01GmIr7B4kvbIcUDS2+kBadVEd7lU4can1RZaZQLDDBJRrrNfS2Kavvl0VLI/cMv7UASAXGrww==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
]
},
"node_modules/@lmdb/lmdb-linux-arm": {
"version": "3.4.4",
"resolved": "https://registry.npmjs.org/@lmdb/lmdb-linux-arm/-/lmdb-linux-arm-3.4.4.tgz",
"integrity": "sha512-cmev5/dZr5ACKri9f6GU6lZCXTjMhV72xujlbOhFCgFXrt4W0TxGsmY8kA1BITvH60JBKE50cSxsiulybAbrrw==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@lmdb/lmdb-linux-arm64": {
"version": "3.4.4",
"resolved": "https://registry.npmjs.org/@lmdb/lmdb-linux-arm64/-/lmdb-linux-arm64-3.4.4.tgz",
"integrity": "sha512-mALqr7DE42HsiwVTKpQWxacjHoJk+e9p00RWIJqTACh/hpucxp/0lK/XMh5XzWnU/TDCZLukq1+vNqnNumTP/Q==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@lmdb/lmdb-linux-x64": {
"version": "3.4.4",
"resolved": "https://registry.npmjs.org/@lmdb/lmdb-linux-x64/-/lmdb-linux-x64-3.4.4.tgz",
"integrity": "sha512-QjLs8OcmCNcraAcLoZyFlo0atzBJniQLLwhtR+ymQqS5kLYpV5RqwriL87BW+ZiR9ZiGgZx3evrz5vnWPtJ1fQ==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@lmdb/lmdb-win32-arm64": {
"version": "3.4.4",
"resolved": "https://registry.npmjs.org/@lmdb/lmdb-win32-arm64/-/lmdb-win32-arm64-3.4.4.tgz",
"integrity": "sha512-tr/pwHDlZ33forLGAr0tI04cRmP4SgF93yHbb+2zvZiDEyln5yMHhbKDySxY66aUOkhvBvTuHq9q/3YmTj6ZHQ==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
]
},
"node_modules/@lmdb/lmdb-win32-x64": {
"version": "3.4.4",
"resolved": "https://registry.npmjs.org/@lmdb/lmdb-win32-x64/-/lmdb-win32-x64-3.4.4.tgz",
"integrity": "sha512-KRzfocJzB/mgoTCqnMawuLSKheHRVTqWfSmouIgYpFs6Hx4zvZSvsZKSCEb5gHmICy7qsx9l06jk3MFTtiFVAQ==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
]
},
"node_modules/@modelcontextprotocol/sdk": {
"version": "1.25.2",
"resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.25.2.tgz",
"integrity": "sha512-LZFeo4F9M5qOhC/Uc1aQSrBHxMrvxett+9KLHt7OhcExtoiRN9DKgbZffMP/nxjutWDQpfMDfP3nkHI4X9ijww==",
"dev": true,
"license": "MIT",
"dependencies": {
"@hono/node-server": "^1.19.7",
"ajv": "^8.17.1",
"ajv-formats": "^3.0.1",
"content-type": "^1.0.5",
"cors": "^2.8.5",
"cross-spawn": "^7.0.5",
"eventsource": "^3.0.2",
"eventsource-parser": "^3.0.0",
"express": "^5.0.1",
"express-rate-limit": "^7.5.0",
"jose": "^6.1.1",
"json-schema-typed": "^8.0.2",
"pkce-challenge": "^5.0.0",
"raw-body": "^3.0.0",
"zod": "^3.25 || ^4.0",
"zod-to-json-schema": "^3.25.0"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"@cfworker/json-schema": "^4.1.1",
"zod": "^3.25 || ^4.0"
},
"peerDependenciesMeta": {
"@cfworker/json-schema": {
"optional": true
},
"zod": {
"optional": false
}
}
},
"node_modules/@msgpackr-extract/msgpackr-extract-darwin-arm64": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-darwin-arm64/-/msgpackr-extract-darwin-arm64-3.0.3.tgz",
"integrity": "sha512-QZHtlVgbAdy2zAqNA9Gu1UpIuI8Xvsd1v8ic6B2pZmeFnFcMWiPLfWXh7TVw4eGEZ/C9TH281KwhVoeQUKbyjw==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
]
},
"node_modules/@msgpackr-extract/msgpackr-extract-darwin-x64": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-darwin-x64/-/msgpackr-extract-darwin-x64-3.0.3.tgz",
"integrity": "sha512-mdzd3AVzYKuUmiWOQ8GNhl64/IoFGol569zNRdkLReh6LRLHOXxU4U8eq0JwaD8iFHdVGqSy4IjFL4reoWCDFw==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
]
},
"node_modules/@msgpackr-extract/msgpackr-extract-linux-arm": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-arm/-/msgpackr-extract-linux-arm-3.0.3.tgz",
"integrity": "sha512-fg0uy/dG/nZEXfYilKoRe7yALaNmHoYeIoJuJ7KJ+YyU2bvY8vPv27f7UKhGRpY6euFYqEVhxCFZgAUNQBM3nw==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@msgpackr-extract/msgpackr-extract-linux-arm64": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-arm64/-/msgpackr-extract-linux-arm64-3.0.3.tgz",
"integrity": "sha512-YxQL+ax0XqBJDZiKimS2XQaf+2wDGVa1enVRGzEvLLVFeqa5kx2bWbtcSXgsxjQB7nRqqIGFIcLteF/sHeVtQg==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@msgpackr-extract/msgpackr-extract-linux-x64": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-x64/-/msgpackr-extract-linux-x64-3.0.3.tgz",
"integrity": "sha512-cvwNfbP07pKUfq1uH+S6KJ7dT9K8WOE4ZiAcsrSes+UY55E/0jLYc+vq+DO7jlmqRb5zAggExKm0H7O/CBaesg==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@msgpackr-extract/msgpackr-extract-win32-x64": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-win32-x64/-/msgpackr-extract-win32-x64-3.0.3.tgz",
"integrity": "sha512-x0fWaQtYp4E6sktbsdAqnehxDgEc/VwM7uLsRCYWaiGu0ykYdZPiS8zCWdnjHwyiumousxfBm4SO31eXqwEZhQ==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
]
},
"node_modules/@napi-rs/nice": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice/-/nice-1.1.1.tgz",
"integrity": "sha512-xJIPs+bYuc9ASBl+cvGsKbGrJmS6fAKaSZCnT0lhahT5rhA2VVy9/EcIgd2JhtEuFOJNx7UHNn/qiTPTY4nrQw==",
"dev": true,
"license": "MIT",
"optional": true,
"engines": {
"node": ">= 10"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/Brooooooklyn"
},
"optionalDependencies": {
"@napi-rs/nice-android-arm-eabi": "1.1.1",
"@napi-rs/nice-android-arm64": "1.1.1",
"@napi-rs/nice-darwin-arm64": "1.1.1",
"@napi-rs/nice-darwin-x64": "1.1.1",
"@napi-rs/nice-freebsd-x64": "1.1.1",
"@napi-rs/nice-linux-arm-gnueabihf": "1.1.1",
"@napi-rs/nice-linux-arm64-gnu": "1.1.1",
"@napi-rs/nice-linux-arm64-musl": "1.1.1",
"@napi-rs/nice-linux-ppc64-gnu": "1.1.1",
"@napi-rs/nice-linux-riscv64-gnu": "1.1.1",
"@napi-rs/nice-linux-s390x-gnu": "1.1.1",
"@napi-rs/nice-linux-x64-gnu": "1.1.1",
"@napi-rs/nice-linux-x64-musl": "1.1.1",
"@napi-rs/nice-openharmony-arm64": "1.1.1",
"@napi-rs/nice-win32-arm64-msvc": "1.1.1",
"@napi-rs/nice-win32-ia32-msvc": "1.1.1",
"@napi-rs/nice-win32-x64-msvc": "1.1.1"
}
},
"node_modules/@napi-rs/nice-android-arm-eabi": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-android-arm-eabi/-/nice-android-arm-eabi-1.1.1.tgz",
"integrity": "sha512-kjirL3N6TnRPv5iuHw36wnucNqXAO46dzK9oPb0wj076R5Xm8PfUVA9nAFB5ZNMmfJQJVKACAPd/Z2KYMppthw==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@napi-rs/nice-android-arm64": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-android-arm64/-/nice-android-arm64-1.1.1.tgz",
"integrity": "sha512-blG0i7dXgbInN5urONoUCNf+DUEAavRffrO7fZSeoRMJc5qD+BJeNcpr54msPF6qfDD6kzs9AQJogZvT2KD5nw==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@napi-rs/nice-darwin-arm64": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-darwin-arm64/-/nice-darwin-arm64-1.1.1.tgz",
"integrity": "sha512-s/E7w45NaLqTGuOjC2p96pct4jRfo61xb9bU1unM/MJ/RFkKlJyJDx7OJI/O0ll/hrfpqKopuAFDV8yo0hfT7A==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@napi-rs/nice-darwin-x64": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-darwin-x64/-/nice-darwin-x64-1.1.1.tgz",
"integrity": "sha512-dGoEBnVpsdcC+oHHmW1LRK5eiyzLwdgNQq3BmZIav+9/5WTZwBYX7r5ZkQC07Nxd3KHOCkgbHSh4wPkH1N1LiQ==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@napi-rs/nice-freebsd-x64": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-freebsd-x64/-/nice-freebsd-x64-1.1.1.tgz",
"integrity": "sha512-kHv4kEHAylMYmlNwcQcDtXjklYp4FCf0b05E+0h6nDHsZ+F0bDe04U/tXNOqrx5CmIAth4vwfkjjUmp4c4JktQ==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"freebsd"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@napi-rs/nice-linux-arm-gnueabihf": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-linux-arm-gnueabihf/-/nice-linux-arm-gnueabihf-1.1.1.tgz",
"integrity": "sha512-E1t7K0efyKXZDoZg1LzCOLxgolxV58HCkaEkEvIYQx12ht2pa8hoBo+4OB3qh7e+QiBlp1SRf+voWUZFxyhyqg==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@napi-rs/nice-linux-arm64-gnu": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-linux-arm64-gnu/-/nice-linux-arm64-gnu-1.1.1.tgz",
"integrity": "sha512-CIKLA12DTIZlmTaaKhQP88R3Xao+gyJxNWEn04wZwC2wmRapNnxCUZkVwggInMJvtVElA+D4ZzOU5sX4jV+SmQ==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@napi-rs/nice-linux-arm64-musl": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-linux-arm64-musl/-/nice-linux-arm64-musl-1.1.1.tgz",
"integrity": "sha512-+2Rzdb3nTIYZ0YJF43qf2twhqOCkiSrHx2Pg6DJaCPYhhaxbLcdlV8hCRMHghQ+EtZQWGNcS2xF4KxBhSGeutg==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@napi-rs/nice-linux-ppc64-gnu": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-linux-ppc64-gnu/-/nice-linux-ppc64-gnu-1.1.1.tgz",
"integrity": "sha512-4FS8oc0GeHpwvv4tKciKkw3Y4jKsL7FRhaOeiPei0X9T4Jd619wHNe4xCLmN2EMgZoeGg+Q7GY7BsvwKpL22Tg==",
"cpu": [
"ppc64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@napi-rs/nice-linux-riscv64-gnu": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-linux-riscv64-gnu/-/nice-linux-riscv64-gnu-1.1.1.tgz",
"integrity": "sha512-HU0nw9uD4FO/oGCCk409tCi5IzIZpH2agE6nN4fqpwVlCn5BOq0MS1dXGjXaG17JaAvrlpV5ZeyZwSon10XOXw==",
"cpu": [
"riscv64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@napi-rs/nice-linux-s390x-gnu": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-linux-s390x-gnu/-/nice-linux-s390x-gnu-1.1.1.tgz",
"integrity": "sha512-2YqKJWWl24EwrX0DzCQgPLKQBxYDdBxOHot1KWEq7aY2uYeX+Uvtv4I8xFVVygJDgf6/92h9N3Y43WPx8+PAgQ==",
"cpu": [
"s390x"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@napi-rs/nice-linux-x64-gnu": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-linux-x64-gnu/-/nice-linux-x64-gnu-1.1.1.tgz",
"integrity": "sha512-/gaNz3R92t+dcrfCw/96pDopcmec7oCcAQ3l/M+Zxr82KT4DljD37CpgrnXV+pJC263JkW572pdbP3hP+KjcIg==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@napi-rs/nice-linux-x64-musl": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-linux-x64-musl/-/nice-linux-x64-musl-1.1.1.tgz",
"integrity": "sha512-xScCGnyj/oppsNPMnevsBe3pvNaoK7FGvMjT35riz9YdhB2WtTG47ZlbxtOLpjeO9SqqQ2J2igCmz6IJOD5JYw==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@napi-rs/nice-openharmony-arm64": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-openharmony-arm64/-/nice-openharmony-arm64-1.1.1.tgz",
"integrity": "sha512-6uJPRVwVCLDeoOaNyeiW0gp2kFIM4r7PL2MczdZQHkFi9gVlgm+Vn+V6nTWRcu856mJ2WjYJiumEajfSm7arPQ==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"openharmony"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@napi-rs/nice-win32-arm64-msvc": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-win32-arm64-msvc/-/nice-win32-arm64-msvc-1.1.1.tgz",
"integrity": "sha512-uoTb4eAvM5B2aj/z8j+Nv8OttPf2m+HVx3UjA5jcFxASvNhQriyCQF1OB1lHL43ZhW+VwZlgvjmP5qF3+59atA==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@napi-rs/nice-win32-ia32-msvc": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-win32-ia32-msvc/-/nice-win32-ia32-msvc-1.1.1.tgz",
"integrity": "sha512-CNQqlQT9MwuCsg1Vd/oKXiuH+TcsSPJmlAFc5frFyX/KkOh0UpBLEj7aoY656d5UKZQMQFP7vJNa1DNUNORvug==",
"cpu": [
"ia32"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@napi-rs/nice-win32-x64-msvc": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-win32-x64-msvc/-/nice-win32-x64-msvc-1.1.1.tgz",
"integrity": "sha512-vB+4G/jBQCAh0jelMTY3+kgFy00Hlx2f2/1zjMoH821IbplbWZOkLiTYXQkygNTzQJTq5cvwBDgn2ppHD+bglQ==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@napi-rs/wasm-runtime": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@napi-rs/wasm-runtime/-/wasm-runtime-1.1.1.tgz",
"integrity": "sha512-p64ah1M1ld8xjWv3qbvFwHiFVWrq1yFvV4f7w+mzaqiR4IlSgkqhcRdHwsGgomwzBH51sRY4NEowLxnaBjcW/A==",
"dev": true,
"license": "MIT",
"optional": true,
"dependencies": {
"@emnapi/core": "^1.7.1",
"@emnapi/runtime": "^1.7.1",
"@tybys/wasm-util": "^0.10.1"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/Brooooooklyn"
}
},
"node_modules/@npmcli/agent": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-4.0.0.tgz",
"integrity": "sha512-kAQTcEN9E8ERLVg5AsGwLNoFb+oEG6engbqAU2P43gD4JEIkNGMHdVQ096FsOAAYpZPB0RSt0zgInKIAS1l5QA==",
"dev": true,
"license": "ISC",
"dependencies": {
"agent-base": "^7.1.0",
"http-proxy-agent": "^7.0.0",
"https-proxy-agent": "^7.0.1",
"lru-cache": "^11.2.1",
"socks-proxy-agent": "^8.0.3"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/@npmcli/agent/node_modules/lru-cache": {
"version": "11.2.4",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.4.tgz",
"integrity": "sha512-B5Y16Jr9LB9dHVkh6ZevG+vAbOsNOYCX+sXvFWFu7B3Iz5mijW3zdbMyhsh8ANd2mSWBYdJgnqi+mL7/LrOPYg==",
"dev": true,
"license": "BlueOak-1.0.0",
"engines": {
"node": "20 || >=22"
}
},
"node_modules/@npmcli/fs": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-5.0.0.tgz",
"integrity": "sha512-7OsC1gNORBEawOa5+j2pXN9vsicaIOH5cPXxoR6fJOmH6/EXpJB2CajXOu1fPRFun2m1lktEFX11+P89hqO/og==",
"dev": true,
"license": "ISC",
"dependencies": {
"semver": "^7.3.5"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/@npmcli/git": {
"version": "7.0.1",
"resolved": "https://registry.npmjs.org/@npmcli/git/-/git-7.0.1.tgz",
"integrity": "sha512-+XTFxK2jJF/EJJ5SoAzXk3qwIDfvFc5/g+bD274LZ7uY7LE8sTfG6Z8rOanPl2ZEvZWqNvmEdtXC25cE54VcoA==",
"dev": true,
"license": "ISC",
"dependencies": {
"@npmcli/promise-spawn": "^9.0.0",
"ini": "^6.0.0",
"lru-cache": "^11.2.1",
"npm-pick-manifest": "^11.0.1",
"proc-log": "^6.0.0",
"promise-retry": "^2.0.1",
"semver": "^7.3.5",
"which": "^6.0.0"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/@npmcli/git/node_modules/isexe": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz",
"integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==",
"dev": true,
"license": "ISC",
"engines": {
"node": ">=16"
}
},
"node_modules/@npmcli/git/node_modules/lru-cache": {
"version": "11.2.4",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.4.tgz",
"integrity": "sha512-B5Y16Jr9LB9dHVkh6ZevG+vAbOsNOYCX+sXvFWFu7B3Iz5mijW3zdbMyhsh8ANd2mSWBYdJgnqi+mL7/LrOPYg==",
"dev": true,
"license": "BlueOak-1.0.0",
"engines": {
"node": "20 || >=22"
}
},
"node_modules/@npmcli/git/node_modules/which": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/which/-/which-6.0.0.tgz",
"integrity": "sha512-f+gEpIKMR9faW/JgAgPK1D7mekkFoqbmiwvNzuhsHetni20QSgzg9Vhn0g2JSJkkfehQnqdUAx7/e15qS1lPxg==",
"dev": true,
"license": "ISC",
"dependencies": {
"isexe": "^3.1.1"
},
"bin": {
"node-which": "bin/which.js"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/@npmcli/installed-package-contents": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/@npmcli/installed-package-contents/-/installed-package-contents-4.0.0.tgz",
"integrity": "sha512-yNyAdkBxB72gtZ4GrwXCM0ZUedo9nIbOMKfGjt6Cu6DXf0p8y1PViZAKDC8q8kv/fufx0WTjRBdSlyrvnP7hmA==",
"dev": true,
"license": "ISC",
"dependencies": {
"npm-bundled": "^5.0.0",
"npm-normalize-package-bin": "^5.0.0"
},
"bin": {
"installed-package-contents": "bin/index.js"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/@npmcli/node-gyp": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/@npmcli/node-gyp/-/node-gyp-5.0.0.tgz",
"integrity": "sha512-uuG5HZFXLfyFKqg8QypsmgLQW7smiRjVc45bqD/ofZZcR/uxEjgQU8qDPv0s9TEeMUiAAU/GC5bR6++UdTirIQ==",
"dev": true,
"license": "ISC",
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/@npmcli/package-json": {
"version": "7.0.4",
"resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-7.0.4.tgz",
"integrity": "sha512-0wInJG3j/K40OJt/33ax47WfWMzZTm6OQxB9cDhTt5huCP2a9g2GnlsxmfN+PulItNPIpPrZ+kfwwUil7eHcZQ==",
"dev": true,
"license": "ISC",
"dependencies": {
"@npmcli/git": "^7.0.0",
"glob": "^13.0.0",
"hosted-git-info": "^9.0.0",
"json-parse-even-better-errors": "^5.0.0",
"proc-log": "^6.0.0",
"semver": "^7.5.3",
"validate-npm-package-license": "^3.0.4"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/@npmcli/promise-spawn": {
"version": "9.0.1",
"resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-9.0.1.tgz",
"integrity": "sha512-OLUaoqBuyxeTqUvjA3FZFiXUfYC1alp3Sa99gW3EUDz3tZ3CbXDdcZ7qWKBzicrJleIgucoWamWH1saAmH/l2Q==",
"dev": true,
"license": "ISC",
"dependencies": {
"which": "^6.0.0"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/@npmcli/promise-spawn/node_modules/isexe": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz",
"integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==",
"dev": true,
"license": "ISC",
"engines": {
"node": ">=16"
}
},
"node_modules/@npmcli/promise-spawn/node_modules/which": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/which/-/which-6.0.0.tgz",
"integrity": "sha512-f+gEpIKMR9faW/JgAgPK1D7mekkFoqbmiwvNzuhsHetni20QSgzg9Vhn0g2JSJkkfehQnqdUAx7/e15qS1lPxg==",
"dev": true,
"license": "ISC",
"dependencies": {
"isexe": "^3.1.1"
},
"bin": {
"node-which": "bin/which.js"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/@npmcli/redact": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/@npmcli/redact/-/redact-4.0.0.tgz",
"integrity": "sha512-gOBg5YHMfZy+TfHArfVogwgfBeQnKbbGo3pSUyK/gSI0AVu+pEiDVcKlQb0D8Mg1LNRZILZ6XG8I5dJ4KuAd9Q==",
"dev": true,
"license": "ISC",
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/@npmcli/run-script": {
"version": "10.0.3",
"resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-10.0.3.tgz",
"integrity": "sha512-ER2N6itRkzWbbtVmZ9WKaWxVlKlOeBFF1/7xx+KA5J1xKa4JjUwBdb6tDpk0v1qA+d+VDwHI9qmLcXSWcmi+Rw==",
"dev": true,
"license": "ISC",
"dependencies": {
"@npmcli/node-gyp": "^5.0.0",
"@npmcli/package-json": "^7.0.0",
"@npmcli/promise-spawn": "^9.0.0",
"node-gyp": "^12.1.0",
"proc-log": "^6.0.0",
"which": "^6.0.0"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/@npmcli/run-script/node_modules/isexe": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz",
"integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==",
"dev": true,
"license": "ISC",
"engines": {
"node": ">=16"
}
},
"node_modules/@npmcli/run-script/node_modules/which": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/which/-/which-6.0.0.tgz",
"integrity": "sha512-f+gEpIKMR9faW/JgAgPK1D7mekkFoqbmiwvNzuhsHetni20QSgzg9Vhn0g2JSJkkfehQnqdUAx7/e15qS1lPxg==",
"dev": true,
"license": "ISC",
"dependencies": {
"isexe": "^3.1.1"
},
"bin": {
"node-which": "bin/which.js"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/@oxc-project/types": {
"version": "0.106.0",
"resolved": "https://registry.npmjs.org/@oxc-project/types/-/types-0.106.0.tgz",
"integrity": "sha512-QdsH3rZq480VnOHSHgPYOhjL8O8LBdcnSjM408BpPCCUc0JYYZPG9Gafl9i3OcGk/7137o+gweb4cCv3WAUykg==",
"dev": true,
"license": "MIT",
"funding": {
"url": "https://github.com/sponsors/Boshen"
}
},
"node_modules/@parcel/watcher": {
"version": "2.5.4",
"resolved": "https://registry.npmjs.org/@parcel/watcher/-/watcher-2.5.4.tgz",
"integrity": "sha512-WYa2tUVV5HiArWPB3ydlOc4R2ivq0IDrlqhMi3l7mVsFEXNcTfxYFPIHXHXIh/ca/y/V5N4E1zecyxdIBjYnkQ==",
"dev": true,
"hasInstallScript": true,
"license": "MIT",
"optional": true,
"dependencies": {
"detect-libc": "^2.0.3",
"is-glob": "^4.0.3",
"node-addon-api": "^7.0.0",
"picomatch": "^4.0.3"
},
"engines": {
"node": ">= 10.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
},
"optionalDependencies": {
"@parcel/watcher-android-arm64": "2.5.4",
"@parcel/watcher-darwin-arm64": "2.5.4",
"@parcel/watcher-darwin-x64": "2.5.4",
"@parcel/watcher-freebsd-x64": "2.5.4",
"@parcel/watcher-linux-arm-glibc": "2.5.4",
"@parcel/watcher-linux-arm-musl": "2.5.4",
"@parcel/watcher-linux-arm64-glibc": "2.5.4",
"@parcel/watcher-linux-arm64-musl": "2.5.4",
"@parcel/watcher-linux-x64-glibc": "2.5.4",
"@parcel/watcher-linux-x64-musl": "2.5.4",
"@parcel/watcher-win32-arm64": "2.5.4",
"@parcel/watcher-win32-ia32": "2.5.4",
"@parcel/watcher-win32-x64": "2.5.4"
}
},
"node_modules/@parcel/watcher-android-arm64": {
"version": "2.5.4",
"resolved": "https://registry.npmjs.org/@parcel/watcher-android-arm64/-/watcher-android-arm64-2.5.4.tgz",
"integrity": "sha512-hoh0vx4v+b3BNI7Cjoy2/B0ARqcwVNrzN/n7DLq9ZB4I3lrsvhrkCViJyfTj/Qi5xM9YFiH4AmHGK6pgH1ss7g==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">= 10.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/watcher-darwin-arm64": {
"version": "2.5.4",
"resolved": "https://registry.npmjs.org/@parcel/watcher-darwin-arm64/-/watcher-darwin-arm64-2.5.4.tgz",
"integrity": "sha512-kphKy377pZiWpAOyTgQYPE5/XEKVMaj6VUjKT5VkNyUJlr2qZAn8gIc7CPzx+kbhvqHDT9d7EqdOqRXT6vk0zw==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": ">= 10.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/watcher-darwin-x64": {
"version": "2.5.4",
"resolved": "https://registry.npmjs.org/@parcel/watcher-darwin-x64/-/watcher-darwin-x64-2.5.4.tgz",
"integrity": "sha512-UKaQFhCtNJW1A9YyVz3Ju7ydf6QgrpNQfRZ35wNKUhTQ3dxJ/3MULXN5JN/0Z80V/KUBDGa3RZaKq1EQT2a2gg==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": ">= 10.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/watcher-freebsd-x64": {
"version": "2.5.4",
"resolved": "https://registry.npmjs.org/@parcel/watcher-freebsd-x64/-/watcher-freebsd-x64-2.5.4.tgz",
"integrity": "sha512-Dib0Wv3Ow/m2/ttvLdeI2DBXloO7t3Z0oCp4bAb2aqyqOjKPPGrg10pMJJAQ7tt8P4V2rwYwywkDhUia/FgS+Q==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"freebsd"
],
"engines": {
"node": ">= 10.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/watcher-linux-arm-glibc": {
"version": "2.5.4",
"resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm-glibc/-/watcher-linux-arm-glibc-2.5.4.tgz",
"integrity": "sha512-I5Vb769pdf7Q7Sf4KNy8Pogl/URRCKu9ImMmnVKYayhynuyGYMzuI4UOWnegQNa2sGpsPSbzDsqbHNMyeyPCgw==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">= 10.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/watcher-linux-arm-musl": {
"version": "2.5.4",
"resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm-musl/-/watcher-linux-arm-musl-2.5.4.tgz",
"integrity": "sha512-kGO8RPvVrcAotV4QcWh8kZuHr9bXi9a3bSZw7kFarYR0+fGliU7hd/zevhjw8fnvIKG3J9EO5G6sXNGCSNMYPQ==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">= 10.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/watcher-linux-arm64-glibc": {
"version": "2.5.4",
"resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm64-glibc/-/watcher-linux-arm64-glibc-2.5.4.tgz",
"integrity": "sha512-KU75aooXhqGFY2W5/p8DYYHt4hrjHZod8AhcGAmhzPn/etTa+lYCDB2b1sJy3sWJ8ahFVTdy+EbqSBvMx3iFlw==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">= 10.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/watcher-linux-arm64-musl": {
"version": "2.5.4",
"resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm64-musl/-/watcher-linux-arm64-musl-2.5.4.tgz",
"integrity": "sha512-Qx8uNiIekVutnzbVdrgSanM+cbpDD3boB1f8vMtnuG5Zau4/bdDbXyKwIn0ToqFhIuob73bcxV9NwRm04/hzHQ==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">= 10.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/watcher-linux-x64-glibc": {
"version": "2.5.4",
"resolved": "https://registry.npmjs.org/@parcel/watcher-linux-x64-glibc/-/watcher-linux-x64-glibc-2.5.4.tgz",
"integrity": "sha512-UYBQvhYmgAv61LNUn24qGQdjtycFBKSK3EXr72DbJqX9aaLbtCOO8+1SkKhD/GNiJ97ExgcHBrukcYhVjrnogA==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">= 10.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/watcher-linux-x64-musl": {
"version": "2.5.4",
"resolved": "https://registry.npmjs.org/@parcel/watcher-linux-x64-musl/-/watcher-linux-x64-musl-2.5.4.tgz",
"integrity": "sha512-YoRWCVgxv8akZrMhdyVi6/TyoeeMkQ0PGGOf2E4omODrvd1wxniXP+DBynKoHryStks7l+fDAMUBRzqNHrVOpg==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">= 10.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/watcher-win32-arm64": {
"version": "2.5.4",
"resolved": "https://registry.npmjs.org/@parcel/watcher-win32-arm64/-/watcher-win32-arm64-2.5.4.tgz",
"integrity": "sha512-iby+D/YNXWkiQNYcIhg8P5hSjzXEHaQrk2SLrWOUD7VeC4Ohu0WQvmV+HDJokZVJ2UjJ4AGXW3bx7Lls9Ln4TQ==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">= 10.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/watcher-win32-ia32": {
"version": "2.5.4",
"resolved": "https://registry.npmjs.org/@parcel/watcher-win32-ia32/-/watcher-win32-ia32-2.5.4.tgz",
"integrity": "sha512-vQN+KIReG0a2ZDpVv8cgddlf67J8hk1WfZMMP7sMeZmJRSmEax5xNDNWKdgqSe2brOKTQQAs3aCCUal2qBHAyg==",
"cpu": [
"ia32"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">= 10.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/watcher-win32-x64": {
"version": "2.5.4",
"resolved": "https://registry.npmjs.org/@parcel/watcher-win32-x64/-/watcher-win32-x64-2.5.4.tgz",
"integrity": "sha512-3A6efb6BOKwyw7yk9ro2vus2YTt2nvcd56AuzxdMiVOxL9umDyN5PKkKfZ/gZ9row41SjVmTVQNWQhaRRGpOKw==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">= 10.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/watcher/node_modules/node-addon-api": {
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-7.1.1.tgz",
"integrity": "sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ==",
"dev": true,
"license": "MIT",
"optional": true
},
"node_modules/@rolldown/binding-android-arm64": {
"version": "1.0.0-beta.58",
"resolved": "https://registry.npmjs.org/@rolldown/binding-android-arm64/-/binding-android-arm64-1.0.0-beta.58.tgz",
"integrity": "sha512-mWj5eE4Qc8TbPdGGaaLvBb9XfDPvE1EmZkJQgiGKwchkWH4oAJcRAKMTw7ZHnb1L+t7Ah41sBkAecaIsuUgsug==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
"node": "^20.19.0 || >=22.12.0"
}
},
"node_modules/@rolldown/binding-darwin-arm64": {
"version": "1.0.0-beta.58",
"resolved": "https://registry.npmjs.org/@rolldown/binding-darwin-arm64/-/binding-darwin-arm64-1.0.0-beta.58.tgz",
"integrity": "sha512-wFxUymI/5R8bH8qZFYDfAxAN9CyISEIYke+95oZPiv6EWo88aa5rskjVcCpKA532R+klFmdqjbbaD56GNmTF4Q==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": "^20.19.0 || >=22.12.0"
}
},
"node_modules/@rolldown/binding-darwin-x64": {
"version": "1.0.0-beta.58",
"resolved": "https://registry.npmjs.org/@rolldown/binding-darwin-x64/-/binding-darwin-x64-1.0.0-beta.58.tgz",
"integrity": "sha512-ybp3MkPj23VDV9PhtRwdU5qrGhlViWRV5BjKwO6epaSlUD5lW0WyY+roN3ZAzbma/9RrMTgZ/a/gtQq8YXOcqw==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": "^20.19.0 || >=22.12.0"
}
},
"node_modules/@rolldown/binding-freebsd-x64": {
"version": "1.0.0-beta.58",
"resolved": "https://registry.npmjs.org/@rolldown/binding-freebsd-x64/-/binding-freebsd-x64-1.0.0-beta.58.tgz",
"integrity": "sha512-Evxj3yh7FWvyklUYZa0qTVT9N2zX9TPDqGF056hl8hlCZ9/ndQ2xMv6uw9PD1VlLpukbsqL+/C6M0qwipL0QMg==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"freebsd"
],
"engines": {
"node": "^20.19.0 || >=22.12.0"
}
},
"node_modules/@rolldown/binding-linux-arm-gnueabihf": {
"version": "1.0.0-beta.58",
"resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-1.0.0-beta.58.tgz",
"integrity": "sha512-tYeXprDOrEgVHUbPXH6MPso4cM/c6RTkmJNICMQlYdki4hGMh92aj3yU6CKs+4X5gfG0yj5kVUw/L4M685SYag==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": "^20.19.0 || >=22.12.0"
}
},
"node_modules/@rolldown/binding-linux-arm64-gnu": {
"version": "1.0.0-beta.58",
"resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-1.0.0-beta.58.tgz",
"integrity": "sha512-N78vmZzP6zG967Ohr+MasCjmKtis0geZ1SOVmxrA0/bklTQSzH5kHEjW5Qn+i1taFno6GEre1E40v0wuWsNOQw==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": "^20.19.0 || >=22.12.0"
}
},
"node_modules/@rolldown/binding-linux-arm64-musl": {
"version": "1.0.0-beta.58",
"resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm64-musl/-/binding-linux-arm64-musl-1.0.0-beta.58.tgz",
"integrity": "sha512-l+p4QVtG72C7wI2SIkNQw/KQtSjuYwS3rV6AKcWrRBF62ClsFUcif5vLaZIEbPrCXu5OFRXigXFJnxYsVVZqdQ==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": "^20.19.0 || >=22.12.0"
}
},
"node_modules/@rolldown/binding-linux-x64-gnu": {
"version": "1.0.0-beta.58",
"resolved": "https://registry.npmjs.org/@rolldown/binding-linux-x64-gnu/-/binding-linux-x64-gnu-1.0.0-beta.58.tgz",
"integrity": "sha512-urzJX0HrXxIh0FfxwWRjfPCMeInU9qsImLQxHBgLp5ivji1EEUnOfux8KxPPnRQthJyneBrN2LeqUix9DYrNaQ==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": "^20.19.0 || >=22.12.0"
}
},
"node_modules/@rolldown/binding-linux-x64-musl": {
"version": "1.0.0-beta.58",
"resolved": "https://registry.npmjs.org/@rolldown/binding-linux-x64-musl/-/binding-linux-x64-musl-1.0.0-beta.58.tgz",
"integrity": "sha512-7ijfVK3GISnXIwq/1FZo+KyAUJjL3kWPJ7rViAL6MWeEBhEgRzJ0yEd9I8N9aut8Y8ab+EKFJyRNMWZuUBwQ0A==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": "^20.19.0 || >=22.12.0"
}
},
"node_modules/@rolldown/binding-openharmony-arm64": {
"version": "1.0.0-beta.58",
"resolved": "https://registry.npmjs.org/@rolldown/binding-openharmony-arm64/-/binding-openharmony-arm64-1.0.0-beta.58.tgz",
"integrity": "sha512-/m7sKZCS+cUULbzyJTIlv8JbjNohxbpAOA6cM+lgWgqVzPee3U6jpwydrib328JFN/gF9A99IZEnuGYqEDJdww==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"openharmony"
],
"engines": {
"node": "^20.19.0 || >=22.12.0"
}
},
"node_modules/@rolldown/binding-wasm32-wasi": {
"version": "1.0.0-beta.58",
"resolved": "https://registry.npmjs.org/@rolldown/binding-wasm32-wasi/-/binding-wasm32-wasi-1.0.0-beta.58.tgz",
"integrity": "sha512-6SZk7zMgv+y3wFFQ9qE5P9NnRHcRsptL1ypmudD26PDY+PvFCvfHRkJNfclWnvacVGxjowr7JOL3a9fd1wWhUw==",
"cpu": [
"wasm32"
],
"dev": true,
"license": "MIT",
"optional": true,
"dependencies": {
"@napi-rs/wasm-runtime": "^1.1.1"
},
"engines": {
"node": ">=14.0.0"
}
},
"node_modules/@rolldown/binding-win32-arm64-msvc": {
"version": "1.0.0-beta.58",
"resolved": "https://registry.npmjs.org/@rolldown/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-1.0.0-beta.58.tgz",
"integrity": "sha512-sFqfYPnBZ6xBhMkadB7UD0yjEDRvs7ipR3nCggblN+N4ODCXY6qhg/bKL39+W+dgQybL7ErD4EGERVbW9DAWvg==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": "^20.19.0 || >=22.12.0"
}
},
"node_modules/@rolldown/binding-win32-x64-msvc": {
"version": "1.0.0-beta.58",
"resolved": "https://registry.npmjs.org/@rolldown/binding-win32-x64-msvc/-/binding-win32-x64-msvc-1.0.0-beta.58.tgz",
"integrity": "sha512-AnFWJdAqB8+IDPcGrATYs67Kik/6tnndNJV2jGRmwlbeNiQQ8GhRJU8ETRlINfII0pqi9k4WWLnb00p1QCxw/Q==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": "^20.19.0 || >=22.12.0"
}
},
"node_modules/@rolldown/pluginutils": {
"version": "1.0.0-beta.58",
"resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.58.tgz",
"integrity": "sha512-qWhDs6yFGR5xDfdrwiSa3CWGIHxD597uGE/A9xGqytBjANvh4rLCTTkq7szhMV4+Ygh+PMS90KVJ8xWG/TkX4w==",
"dev": true,
"license": "MIT"
},
"node_modules/@rollup/rollup-android-arm-eabi": {
"version": "4.55.2",
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.55.2.tgz",
"integrity": "sha512-21J6xzayjy3O6NdnlO6aXi/urvSRjm6nCI6+nF6ra2YofKruGixN9kfT+dt55HVNwfDmpDHJcaS3JuP/boNnlA==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
]
},
"node_modules/@rollup/rollup-android-arm64": {
"version": "4.55.2",
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.55.2.tgz",
"integrity": "sha512-eXBg7ibkNUZ+sTwbFiDKou0BAckeV6kIigK7y5Ko4mB/5A1KLhuzEKovsmfvsL8mQorkoincMFGnQuIT92SKqA==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
]
},
"node_modules/@rollup/rollup-darwin-arm64": {
"version": "4.55.2",
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.55.2.tgz",
"integrity": "sha512-UCbaTklREjrc5U47ypLulAgg4njaqfOVLU18VrCrI+6E5MQjuG0lSWaqLlAJwsD7NpFV249XgB0Bi37Zh5Sz4g==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
]
},
"node_modules/@rollup/rollup-darwin-x64": {
"version": "4.55.2",
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.55.2.tgz",
"integrity": "sha512-dP67MA0cCMHFT2g5XyjtpVOtp7y4UyUxN3dhLdt11at5cPKnSm4lY+EhwNvDXIMzAMIo2KU+mc9wxaAQJTn7sQ==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
]
},
"node_modules/@rollup/rollup-freebsd-arm64": {
"version": "4.55.2",
"resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.55.2.tgz",
"integrity": "sha512-WDUPLUwfYV9G1yxNRJdXcvISW15mpvod1Wv3ok+Ws93w1HjIVmCIFxsG2DquO+3usMNCpJQ0wqO+3GhFdl6Fow==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"freebsd"
]
},
"node_modules/@rollup/rollup-freebsd-x64": {
"version": "4.55.2",
"resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.55.2.tgz",
"integrity": "sha512-Ng95wtHVEulRwn7R0tMrlUuiLVL/HXA8Lt/MYVpy88+s5ikpntzZba1qEulTuPnPIZuOPcW9wNEiqvZxZmgmqQ==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"freebsd"
]
},
"node_modules/@rollup/rollup-linux-arm-gnueabihf": {
"version": "4.55.2",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.55.2.tgz",
"integrity": "sha512-AEXMESUDWWGqD6LwO/HkqCZgUE1VCJ1OhbvYGsfqX2Y6w5quSXuyoy/Fg3nRqiwro+cJYFxiw5v4kB2ZDLhxrw==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-arm-musleabihf": {
"version": "4.55.2",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.55.2.tgz",
"integrity": "sha512-ZV7EljjBDwBBBSv570VWj0hiNTdHt9uGznDtznBB4Caj3ch5rgD4I2K1GQrtbvJ/QiB+663lLgOdcADMNVC29Q==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-arm64-gnu": {
"version": "4.55.2",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.55.2.tgz",
"integrity": "sha512-uvjwc8NtQVPAJtq4Tt7Q49FOodjfbf6NpqXyW/rjXoV+iZ3EJAHLNAnKT5UJBc6ffQVgmXTUL2ifYiLABlGFqA==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-arm64-musl": {
"version": "4.55.2",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.55.2.tgz",
"integrity": "sha512-s3KoWVNnye9mm/2WpOZ3JeUiediUVw6AvY/H7jNA6qgKA2V2aM25lMkVarTDfiicn/DLq3O0a81jncXszoyCFA==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-loong64-gnu": {
"version": "4.55.2",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.55.2.tgz",
"integrity": "sha512-gi21faacK+J8aVSyAUptML9VQN26JRxe484IbF+h3hpG+sNVoMXPduhREz2CcYr5my0NE3MjVvQ5bMKX71pfVA==",
"cpu": [
"loong64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-loong64-musl": {
"version": "4.55.2",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.55.2.tgz",
"integrity": "sha512-qSlWiXnVaS/ceqXNfnoFZh4IiCA0EwvCivivTGbEu1qv2o+WTHpn1zNmCTAoOG5QaVr2/yhCoLScQtc/7RxshA==",
"cpu": [
"loong64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-ppc64-gnu": {
"version": "4.55.2",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.55.2.tgz",
"integrity": "sha512-rPyuLFNoF1B0+wolH277E780NUKf+KoEDb3OyoLbAO18BbeKi++YN6gC/zuJoPPDlQRL3fIxHxCxVEWiem2yXw==",
"cpu": [
"ppc64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-ppc64-musl": {
"version": "4.55.2",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.55.2.tgz",
"integrity": "sha512-g+0ZLMook31iWV4PvqKU0i9E78gaZgYpSrYPed/4Bu+nGTgfOPtfs1h11tSSRPXSjC5EzLTjV/1A7L2Vr8pJoQ==",
"cpu": [
"ppc64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-riscv64-gnu": {
"version": "4.55.2",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.55.2.tgz",
"integrity": "sha512-i+sGeRGsjKZcQRh3BRfpLsM3LX3bi4AoEVqmGDyc50L6KfYsN45wVCSz70iQMwPWr3E5opSiLOwsC9WB4/1pqg==",
"cpu": [
"riscv64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-riscv64-musl": {
"version": "4.55.2",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.55.2.tgz",
"integrity": "sha512-C1vLcKc4MfFV6I0aWsC7B2Y9QcsiEcvKkfxprwkPfLaN8hQf0/fKHwSF2lcYzA9g4imqnhic729VB9Fo70HO3Q==",
"cpu": [
"riscv64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-s390x-gnu": {
"version": "4.55.2",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.55.2.tgz",
"integrity": "sha512-68gHUK/howpQjh7g7hlD9DvTTt4sNLp1Bb+Yzw2Ki0xvscm2cOdCLZNJNhd2jW8lsTPrHAHuF751BygifW4bkQ==",
"cpu": [
"s390x"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-x64-gnu": {
"version": "4.55.2",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.55.2.tgz",
"integrity": "sha512-1e30XAuaBP1MAizaOBApsgeGZge2/Byd6wV4a8oa6jPdHELbRHBiw7wvo4dp7Ie2PE8TZT4pj9RLGZv9N4qwlw==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-x64-musl": {
"version": "4.55.2",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.55.2.tgz",
"integrity": "sha512-4BJucJBGbuGnH6q7kpPqGJGzZnYrpAzRd60HQSt3OpX/6/YVgSsJnNzR8Ot74io50SeVT4CtCWe/RYIAymFPwA==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-openbsd-x64": {
"version": "4.55.2",
"resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.55.2.tgz",
"integrity": "sha512-cT2MmXySMo58ENv8p6/O6wI/h/gLnD3D6JoajwXFZH6X9jz4hARqUhWpGuQhOgLNXscfZYRQMJvZDtWNzMAIDw==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"openbsd"
]
},
"node_modules/@rollup/rollup-openharmony-arm64": {
"version": "4.55.2",
"resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.55.2.tgz",
"integrity": "sha512-sZnyUgGkuzIXaK3jNMPmUIyJrxu/PjmATQrocpGA1WbCPX8H5tfGgRSuYtqBYAvLuIGp8SPRb1O4d1Fkb5fXaQ==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"openharmony"
]
},
"node_modules/@rollup/rollup-win32-arm64-msvc": {
"version": "4.55.2",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.55.2.tgz",
"integrity": "sha512-sDpFbenhmWjNcEbBcoTV0PWvW5rPJFvu+P7XoTY0YLGRupgLbFY0XPfwIbJOObzO7QgkRDANh65RjhPmgSaAjQ==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
]
},
"node_modules/@rollup/rollup-win32-ia32-msvc": {
"version": "4.55.2",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.55.2.tgz",
"integrity": "sha512-GvJ03TqqaweWCigtKQVBErw2bEhu1tyfNQbarwr94wCGnczA9HF8wqEe3U/Lfu6EdeNP0p6R+APeHVwEqVxpUQ==",
"cpu": [
"ia32"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
]
},
"node_modules/@rollup/rollup-win32-x64-gnu": {
"version": "4.55.2",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.55.2.tgz",
"integrity": "sha512-KvXsBvp13oZz9JGe5NYS7FNizLe99Ny+W8ETsuCyjXiKdiGrcz2/J/N8qxZ/RSwivqjQguug07NLHqrIHrqfYw==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
]
},
"node_modules/@rollup/rollup-win32-x64-msvc": {
"version": "4.55.2",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.55.2.tgz",
"integrity": "sha512-xNO+fksQhsAckRtDSPWaMeT1uIM+JrDRXlerpnWNXhn1TdB3YZ6uKBMBTKP0eX9XtYEP978hHk1f8332i2AW8Q==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
]
},
"node_modules/@schematics/angular": {
"version": "21.1.0",
"resolved": "https://registry.npmjs.org/@schematics/angular/-/angular-21.1.0.tgz",
"integrity": "sha512-gXf3gO5SeU+tiPHxXeQvdbua4C4/V+KH43JH2PYPxaNCD2HGo1uV0pfyNSNgcVF21voKlbAQ13YRrNDh7z5Kig==",
"dev": true,
"license": "MIT",
"dependencies": {
"@angular-devkit/core": "21.1.0",
"@angular-devkit/schematics": "21.1.0",
"jsonc-parser": "3.3.1"
},
"engines": {
"node": "^20.19.0 || ^22.12.0 || >=24.0.0",
"npm": "^6.11.0 || ^7.5.6 || >=8.0.0",
"yarn": ">= 1.13.0"
}
},
"node_modules/@sigstore/bundle": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-4.0.0.tgz",
"integrity": "sha512-NwCl5Y0V6Di0NexvkTqdoVfmjTaQwoLM236r89KEojGmq/jMls8S+zb7yOwAPdXvbwfKDlP+lmXgAL4vKSQT+A==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
"@sigstore/protobuf-specs": "^0.5.0"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/@sigstore/core": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/@sigstore/core/-/core-3.1.0.tgz",
"integrity": "sha512-o5cw1QYhNQ9IroioJxpzexmPjfCe7gzafd2RY3qnMpxr4ZEja+Jad/U8sgFpaue6bOaF+z7RVkyKVV44FN+N8A==",
"dev": true,
"license": "Apache-2.0",
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/@sigstore/protobuf-specs": {
"version": "0.5.0",
"resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.5.0.tgz",
"integrity": "sha512-MM8XIwUjN2bwvCg1QvrMtbBmpcSHrkhFSCu1D11NyPvDQ25HEc4oG5/OcQfd/Tlf/OxmKWERDj0zGE23jQaMwA==",
"dev": true,
"license": "Apache-2.0",
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/@sigstore/sign": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-4.1.0.tgz",
"integrity": "sha512-Vx1RmLxLGnSUqx/o5/VsCjkuN5L7y+vxEEwawvc7u+6WtX2W4GNa7b9HEjmcRWohw/d6BpATXmvOwc78m+Swdg==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
"@sigstore/bundle": "^4.0.0",
"@sigstore/core": "^3.1.0",
"@sigstore/protobuf-specs": "^0.5.0",
"make-fetch-happen": "^15.0.3",
"proc-log": "^6.1.0",
"promise-retry": "^2.0.1"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/@sigstore/tuf": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-4.0.1.tgz",
"integrity": "sha512-OPZBg8y5Vc9yZjmWCHrlWPMBqW5yd8+wFNl+thMdtcWz3vjVSoJQutF8YkrzI0SLGnkuFof4HSsWUhXrf219Lw==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
"@sigstore/protobuf-specs": "^0.5.0",
"tuf-js": "^4.1.0"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/@sigstore/verify": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/@sigstore/verify/-/verify-3.1.0.tgz",
"integrity": "sha512-mNe0Iigql08YupSOGv197YdHpPPr+EzDZmfCgMc7RPNaZTw5aLN01nBl6CHJOh3BGtnMIj83EeN4butBchc8Ag==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
"@sigstore/bundle": "^4.0.0",
"@sigstore/core": "^3.1.0",
"@sigstore/protobuf-specs": "^0.5.0"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/@standard-schema/spec": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.1.0.tgz",
"integrity": "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==",
"license": "MIT"
},
"node_modules/@tufjs/canonical-json": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/@tufjs/canonical-json/-/canonical-json-2.0.0.tgz",
"integrity": "sha512-yVtV8zsdo8qFHe+/3kw81dSLyF7D576A5cCFCi4X7B39tWT7SekaEFUnvnWJHz+9qO7qJTah1JbrDjWKqFtdWA==",
"dev": true,
"license": "MIT",
"engines": {
"node": "^16.14.0 || >=18.0.0"
}
},
"node_modules/@tufjs/models": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/@tufjs/models/-/models-4.1.0.tgz",
"integrity": "sha512-Y8cK9aggNRsqJVaKUlEYs4s7CvQ1b1ta2DVPyAimb0I2qhzjNk+A+mxvll/klL0RlfuIUei8BF7YWiua4kQqww==",
"dev": true,
"license": "MIT",
"dependencies": {
"@tufjs/canonical-json": "2.0.0",
"minimatch": "^10.1.1"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/@tybys/wasm-util": {
"version": "0.10.1",
"resolved": "https://registry.npmjs.org/@tybys/wasm-util/-/wasm-util-0.10.1.tgz",
"integrity": "sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==",
"dev": true,
"license": "MIT",
"optional": true,
"dependencies": {
"tslib": "^2.4.0"
}
},
"node_modules/@types/estree": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz",
"integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==",
"dev": true,
"license": "MIT"
},
"node_modules/@vitejs/plugin-basic-ssl": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/@vitejs/plugin-basic-ssl/-/plugin-basic-ssl-2.1.0.tgz",
"integrity": "sha512-dOxxrhgyDIEUADhb/8OlV9JIqYLgos03YorAueTIeOUskLJSEsfwCByjbu98ctXitUN3znXKp0bYD/WHSudCeA==",
"dev": true,
"license": "MIT",
"engines": {
"node": "^18.0.0 || ^20.0.0 || >=22.0.0"
},
"peerDependencies": {
"vite": "^6.0.0 || ^7.0.0"
}
},
"node_modules/@yarnpkg/lockfile": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@yarnpkg/lockfile/-/lockfile-1.1.0.tgz",
"integrity": "sha512-GpSwvyXOcOOlV70vbnzjj4fW5xW/FdUF6nQEt1ENy7m4ZCczi1+/buVUPAqmGfqznsORNFzUMjctTIp8a9tuCQ==",
"dev": true,
"license": "BSD-2-Clause"
},
"node_modules/abbrev": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/abbrev/-/abbrev-4.0.0.tgz",
"integrity": "sha512-a1wflyaL0tHtJSmLSOVybYhy22vRih4eduhhrkcjgrWGnRfrZtovJ2FRjxuTtkkj47O/baf0R86QU5OuYpz8fA==",
"dev": true,
"license": "ISC",
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/accepts": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/accepts/-/accepts-2.0.0.tgz",
"integrity": "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==",
"dev": true,
"license": "MIT",
"dependencies": {
"mime-types": "^3.0.0",
"negotiator": "^1.0.0"
},
"engines": {
"node": ">= 0.6"
}
},
"node_modules/agent-base": {
"version": "7.1.4",
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.4.tgz",
"integrity": "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 14"
}
},
"node_modules/ajv": {
"version": "8.17.1",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz",
"integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==",
"dev": true,
"license": "MIT",
"dependencies": {
"fast-deep-equal": "^3.1.3",
"fast-uri": "^3.0.1",
"json-schema-traverse": "^1.0.0",
"require-from-string": "^2.0.2"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/epoberezkin"
}
},
"node_modules/ajv-formats": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-3.0.1.tgz",
"integrity": "sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"ajv": "^8.0.0"
},
"peerDependencies": {
"ajv": "^8.0.0"
},
"peerDependenciesMeta": {
"ajv": {
"optional": true
}
}
},
"node_modules/algoliasearch": {
"version": "5.46.2",
"resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-5.46.2.tgz",
"integrity": "sha512-qqAXW9QvKf2tTyhpDA4qXv1IfBwD2eduSW6tUEBFIfCeE9gn9HQ9I5+MaKoenRuHrzk5sQoNh1/iof8mY7uD6Q==",
"dev": true,
"license": "MIT",
"dependencies": {
"@algolia/abtesting": "1.12.2",
"@algolia/client-abtesting": "5.46.2",
"@algolia/client-analytics": "5.46.2",
"@algolia/client-common": "5.46.2",
"@algolia/client-insights": "5.46.2",
"@algolia/client-personalization": "5.46.2",
"@algolia/client-query-suggestions": "5.46.2",
"@algolia/client-search": "5.46.2",
"@algolia/ingestion": "1.46.2",
"@algolia/monitoring": "1.46.2",
"@algolia/recommend": "5.46.2",
"@algolia/requester-browser-xhr": "5.46.2",
"@algolia/requester-fetch": "5.46.2",
"@algolia/requester-node-http": "5.46.2"
},
"engines": {
"node": ">= 14.0.0"
}
},
"node_modules/ansi-escapes": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-7.2.0.tgz",
"integrity": "sha512-g6LhBsl+GBPRWGWsBtutpzBYuIIdBkLEvad5C/va/74Db018+5TZiyA26cZJAr3Rft5lprVqOIPxf5Vid6tqAw==",
"dev": true,
"license": "MIT",
"dependencies": {
"environment": "^1.0.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/ansi-regex": {
"version": "6.2.2",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz",
"integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/ansi-regex?sponsor=1"
}
},
"node_modules/ansi-styles": {
"version": "6.2.3",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz",
"integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
}
},
"node_modules/baseline-browser-mapping": {
"version": "2.9.15",
"resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.9.15.tgz",
"integrity": "sha512-kX8h7K2srmDyYnXRIppo4AH/wYgzWVCs+eKr3RusRSQ5PvRYoEFmR/I0PbdTjKFAoKqp5+kbxnNTFO9jOfSVJg==",
"dev": true,
"license": "Apache-2.0",
"bin": {
"baseline-browser-mapping": "dist/cli.js"
}
},
"node_modules/beasties": {
"version": "0.3.5",
"resolved": "https://registry.npmjs.org/beasties/-/beasties-0.3.5.tgz",
"integrity": "sha512-NaWu+f4YrJxEttJSm16AzMIFtVldCvaJ68b1L098KpqXmxt9xOLtKoLkKxb8ekhOrLqEJAbvT6n6SEvB/sac7A==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
"css-select": "^6.0.0",
"css-what": "^7.0.0",
"dom-serializer": "^2.0.0",
"domhandler": "^5.0.3",
"htmlparser2": "^10.0.0",
"picocolors": "^1.1.1",
"postcss": "^8.4.49",
"postcss-media-query-parser": "^0.2.3"
},
"engines": {
"node": ">=14.0.0"
}
},
"node_modules/body-parser": {
"version": "2.2.2",
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.2.2.tgz",
"integrity": "sha512-oP5VkATKlNwcgvxi0vM0p/D3n2C3EReYVX+DNYs5TjZFn/oQt2j+4sVJtSMr18pdRr8wjTcBl6LoV+FUwzPmNA==",
"dev": true,
"license": "MIT",
"dependencies": {
"bytes": "^3.1.2",
"content-type": "^1.0.5",
"debug": "^4.4.3",
"http-errors": "^2.0.0",
"iconv-lite": "^0.7.0",
"on-finished": "^2.4.1",
"qs": "^6.14.1",
"raw-body": "^3.0.1",
"type-is": "^2.0.1"
},
"engines": {
"node": ">=18"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/express"
}
},
"node_modules/boolbase": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz",
"integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==",
"dev": true,
"license": "ISC"
},
"node_modules/browserslist": {
"version": "4.28.1",
"resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.28.1.tgz",
"integrity": "sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==",
"dev": true,
"funding": [
{
"type": "opencollective",
"url": "https://opencollective.com/browserslist"
},
{
"type": "tidelift",
"url": "https://tidelift.com/funding/github/npm/browserslist"
},
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
],
"license": "MIT",
"peer": true,
"dependencies": {
"baseline-browser-mapping": "^2.9.0",
"caniuse-lite": "^1.0.30001759",
"electron-to-chromium": "^1.5.263",
"node-releases": "^2.0.27",
"update-browserslist-db": "^1.2.0"
},
"bin": {
"browserslist": "cli.js"
},
"engines": {
"node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7"
}
},
"node_modules/buffer-from": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz",
"integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==",
"dev": true,
"license": "MIT"
},
"node_modules/bytes": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz",
"integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.8"
}
},
"node_modules/cacache": {
"version": "20.0.3",
"resolved": "https://registry.npmjs.org/cacache/-/cacache-20.0.3.tgz",
"integrity": "sha512-3pUp4e8hv07k1QlijZu6Kn7c9+ZpWWk4j3F8N3xPuCExULobqJydKYOTj1FTq58srkJsXvO7LbGAH4C0ZU3WGw==",
"dev": true,
"license": "ISC",
"dependencies": {
"@npmcli/fs": "^5.0.0",
"fs-minipass": "^3.0.0",
"glob": "^13.0.0",
"lru-cache": "^11.1.0",
"minipass": "^7.0.3",
"minipass-collect": "^2.0.1",
"minipass-flush": "^1.0.5",
"minipass-pipeline": "^1.2.4",
"p-map": "^7.0.2",
"ssri": "^13.0.0",
"unique-filename": "^5.0.0"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/cacache/node_modules/lru-cache": {
"version": "11.2.4",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.4.tgz",
"integrity": "sha512-B5Y16Jr9LB9dHVkh6ZevG+vAbOsNOYCX+sXvFWFu7B3Iz5mijW3zdbMyhsh8ANd2mSWBYdJgnqi+mL7/LrOPYg==",
"dev": true,
"license": "BlueOak-1.0.0",
"engines": {
"node": "20 || >=22"
}
},
"node_modules/call-bind-apply-helpers": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz",
"integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0",
"function-bind": "^1.1.2"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/call-bound": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz",
"integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==",
"dev": true,
"license": "MIT",
"dependencies": {
"call-bind-apply-helpers": "^1.0.2",
"get-intrinsic": "^1.3.0"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/caniuse-lite": {
"version": "1.0.30001765",
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001765.tgz",
"integrity": "sha512-LWcNtSyZrakjECqmpP4qdg0MMGdN368D7X8XvvAqOcqMv0RxnlqVKZl2V6/mBR68oYMxOZPLw/gO7DuisMHUvQ==",
"dev": true,
"funding": [
{
"type": "opencollective",
"url": "https://opencollective.com/browserslist"
},
{
"type": "tidelift",
"url": "https://tidelift.com/funding/github/npm/caniuse-lite"
},
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
],
"license": "CC-BY-4.0"
},
"node_modules/chalk": {
"version": "5.6.2",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-5.6.2.tgz",
"integrity": "sha512-7NzBL0rN6fMUW+f7A6Io4h40qQlG+xGmtMxfbnH/K7TAtt8JQWVQK+6g0UXKMeVJoyV5EkkNsErQ8pVD3bLHbA==",
"dev": true,
"license": "MIT",
"engines": {
"node": "^12.17.0 || ^14.13 || >=16.0.0"
},
"funding": {
"url": "https://github.com/chalk/chalk?sponsor=1"
}
},
"node_modules/chardet": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/chardet/-/chardet-2.1.1.tgz",
"integrity": "sha512-PsezH1rqdV9VvyNhxxOW32/d75r01NY7TQCmOqomRo15ZSOKbpTFVsfjghxo6JloQUCGnH4k1LGu0R4yCLlWQQ==",
"dev": true,
"license": "MIT"
},
"node_modules/chokidar": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/chokidar/-/chokidar-5.0.0.tgz",
"integrity": "sha512-TQMmc3w+5AxjpL8iIiwebF73dRDF4fBIieAqGn9RGCWaEVwQ6Fb2cGe31Yns0RRIzii5goJ1Y7xbMwo1TxMplw==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"readdirp": "^5.0.0"
},
"engines": {
"node": ">= 20.19.0"
},
"funding": {
"url": "https://paulmillr.com/funding/"
}
},
"node_modules/chownr": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz",
"integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==",
"dev": true,
"license": "BlueOak-1.0.0",
"engines": {
"node": ">=18"
}
},
"node_modules/cli-cursor": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-5.0.0.tgz",
"integrity": "sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw==",
"dev": true,
"license": "MIT",
"dependencies": {
"restore-cursor": "^5.0.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/cli-spinners": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-3.4.0.tgz",
"integrity": "sha512-bXfOC4QcT1tKXGorxL3wbJm6XJPDqEnij2gQ2m7ESQuE+/z9YFIWnl/5RpTiKWbMq3EVKR4fRLJGn6DVfu0mpw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=18.20"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/cli-truncate": {
"version": "5.1.1",
"resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-5.1.1.tgz",
"integrity": "sha512-SroPvNHxUnk+vIW/dOSfNqdy1sPEFkrTk6TUtqLCnBlo3N7TNYYkzzN7uSD6+jVjrdO4+p8nH7JzH6cIvUem6A==",
"dev": true,
"license": "MIT",
"dependencies": {
"slice-ansi": "^7.1.0",
"string-width": "^8.0.0"
},
"engines": {
"node": ">=20"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/cli-width": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/cli-width/-/cli-width-4.1.0.tgz",
"integrity": "sha512-ouuZd4/dm2Sw5Gmqy6bGyNNNe1qt9RpmxveLSO7KcgsTnU7RXfsw+/bukWGo1abgBiMAic068rclZsO4IWmmxQ==",
"dev": true,
"license": "ISC",
"engines": {
"node": ">= 12"
}
},
"node_modules/cliui": {
"version": "9.0.1",
"resolved": "https://registry.npmjs.org/cliui/-/cliui-9.0.1.tgz",
"integrity": "sha512-k7ndgKhwoQveBL+/1tqGJYNz097I7WOvwbmmU2AR5+magtbjPWQTS1C5vzGkBC8Ym8UWRzfKUzUUqFLypY4Q+w==",
"dev": true,
"license": "ISC",
"dependencies": {
"string-width": "^7.2.0",
"strip-ansi": "^7.1.0",
"wrap-ansi": "^9.0.0"
},
"engines": {
"node": ">=20"
}
},
"node_modules/cliui/node_modules/string-width": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz",
"integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"emoji-regex": "^10.3.0",
"get-east-asian-width": "^1.0.0",
"strip-ansi": "^7.1.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/cliui/node_modules/wrap-ansi": {
"version": "9.0.2",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.2.tgz",
"integrity": "sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-styles": "^6.2.1",
"string-width": "^7.0.0",
"strip-ansi": "^7.1.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
}
},
"node_modules/color-convert": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"color-name": "~1.1.4"
},
"engines": {
"node": ">=7.0.0"
}
},
"node_modules/color-name": {
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
"dev": true,
"license": "MIT"
},
"node_modules/colorette": {
"version": "2.0.20",
"resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz",
"integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==",
"dev": true,
"license": "MIT"
},
"node_modules/content-disposition": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.1.tgz",
"integrity": "sha512-oIXISMynqSqm241k6kcQ5UwttDILMK4BiurCfGEREw6+X9jkkpEe5T9FZaApyLGGOnFuyMWZpdolTXMtvEJ08Q==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=18"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/express"
}
},
"node_modules/content-type": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz",
"integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/convert-source-map": {
"version": "1.9.0",
"resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz",
"integrity": "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==",
"dev": true,
"license": "MIT"
},
"node_modules/cookie": {
"version": "0.7.2",
"resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz",
"integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/cookie-signature": {
"version": "1.2.2",
"resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.2.2.tgz",
"integrity": "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6.6.0"
}
},
"node_modules/cors": {
"version": "2.8.5",
"resolved": "https://registry.npmjs.org/cors/-/cors-2.8.5.tgz",
"integrity": "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==",
"dev": true,
"license": "MIT",
"dependencies": {
"object-assign": "^4",
"vary": "^1"
},
"engines": {
"node": ">= 0.10"
}
},
"node_modules/cross-spawn": {
"version": "7.0.6",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
"integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==",
"dev": true,
"license": "MIT",
"dependencies": {
"path-key": "^3.1.0",
"shebang-command": "^2.0.0",
"which": "^2.0.1"
},
"engines": {
"node": ">= 8"
}
},
"node_modules/css-select": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/css-select/-/css-select-6.0.0.tgz",
"integrity": "sha512-rZZVSLle8v0+EY8QAkDWrKhpgt6SA5OtHsgBnsj6ZaLb5dmDVOWUDtQitd9ydxxvEjhewNudS6eTVU7uOyzvXw==",
"dev": true,
"license": "BSD-2-Clause",
"dependencies": {
"boolbase": "^1.0.0",
"css-what": "^7.0.0",
"domhandler": "^5.0.3",
"domutils": "^3.2.2",
"nth-check": "^2.1.1"
},
"funding": {
"url": "https://github.com/sponsors/fb55"
}
},
"node_modules/css-what": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/css-what/-/css-what-7.0.0.tgz",
"integrity": "sha512-wD5oz5xibMOPHzy13CyGmogB3phdvcDaB5t0W/Nr5Z2O/agcB8YwOz6e2Lsp10pNDzBoDO9nVa3RGs/2BttpHQ==",
"dev": true,
"license": "BSD-2-Clause",
"engines": {
"node": ">= 6"
},
"funding": {
"url": "https://github.com/sponsors/fb55"
}
},
"node_modules/debug": {
"version": "4.4.3",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz",
"integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==",
"dev": true,
"license": "MIT",
"dependencies": {
"ms": "^2.1.3"
},
"engines": {
"node": ">=6.0"
},
"peerDependenciesMeta": {
"supports-color": {
"optional": true
}
}
},
"node_modules/depd": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz",
"integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.8"
}
},
"node_modules/detect-libc": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz",
"integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==",
"dev": true,
"license": "Apache-2.0",
"optional": true,
"engines": {
"node": ">=8"
}
},
"node_modules/dom-serializer": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz",
"integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==",
"dev": true,
"license": "MIT",
"dependencies": {
"domelementtype": "^2.3.0",
"domhandler": "^5.0.2",
"entities": "^4.2.0"
},
"funding": {
"url": "https://github.com/cheeriojs/dom-serializer?sponsor=1"
}
},
"node_modules/domelementtype": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz",
"integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==",
"dev": true,
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/fb55"
}
],
"license": "BSD-2-Clause"
},
"node_modules/domhandler": {
"version": "5.0.3",
"resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz",
"integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==",
"dev": true,
"license": "BSD-2-Clause",
"dependencies": {
"domelementtype": "^2.3.0"
},
"engines": {
"node": ">= 4"
},
"funding": {
"url": "https://github.com/fb55/domhandler?sponsor=1"
}
},
"node_modules/domutils": {
"version": "3.2.2",
"resolved": "https://registry.npmjs.org/domutils/-/domutils-3.2.2.tgz",
"integrity": "sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw==",
"dev": true,
"license": "BSD-2-Clause",
"dependencies": {
"dom-serializer": "^2.0.0",
"domelementtype": "^2.3.0",
"domhandler": "^5.0.3"
},
"funding": {
"url": "https://github.com/fb55/domutils?sponsor=1"
}
},
"node_modules/dunder-proto": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
"integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==",
"dev": true,
"license": "MIT",
"dependencies": {
"call-bind-apply-helpers": "^1.0.1",
"es-errors": "^1.3.0",
"gopd": "^1.2.0"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/ee-first": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz",
"integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==",
"dev": true,
"license": "MIT"
},
"node_modules/electron-to-chromium": {
"version": "1.5.267",
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.267.tgz",
"integrity": "sha512-0Drusm6MVRXSOJpGbaSVgcQsuB4hEkMpHXaVstcPmhu5LIedxs1xNK/nIxmQIU/RPC0+1/o0AVZfBTkTNJOdUw==",
"dev": true,
"license": "ISC"
},
"node_modules/emoji-regex": {
"version": "10.6.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.6.0.tgz",
"integrity": "sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A==",
"dev": true,
"license": "MIT"
},
"node_modules/encodeurl": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz",
"integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.8"
}
},
"node_modules/encoding": {
"version": "0.1.13",
"resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.13.tgz",
"integrity": "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==",
"dev": true,
"license": "MIT",
"optional": true,
"dependencies": {
"iconv-lite": "^0.6.2"
}
},
"node_modules/encoding/node_modules/iconv-lite": {
"version": "0.6.3",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz",
"integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==",
"dev": true,
"license": "MIT",
"optional": true,
"dependencies": {
"safer-buffer": ">= 2.1.2 < 3.0.0"
},
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/entities": {
"version": "4.5.0",
"resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz",
"integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==",
"dev": true,
"license": "BSD-2-Clause",
"engines": {
"node": ">=0.12"
},
"funding": {
"url": "https://github.com/fb55/entities?sponsor=1"
}
},
"node_modules/env-paths": {
"version": "2.2.1",
"resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz",
"integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6"
}
},
"node_modules/environment": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/environment/-/environment-1.1.0.tgz",
"integrity": "sha512-xUtoPkMggbz0MPyPiIWr1Kp4aeWJjDZ6SMvURhimjdZgsRuDplF5/s9hcgGhyXMhs+6vpnuoiZ2kFiu3FMnS8Q==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/err-code": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/err-code/-/err-code-2.0.3.tgz",
"integrity": "sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==",
"dev": true,
"license": "MIT"
},
"node_modules/es-define-property": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz",
"integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
}
},
"node_modules/es-errors": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz",
"integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
}
},
"node_modules/es-object-atoms": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz",
"integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==",
"dev": true,
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/esbuild": {
"version": "0.27.2",
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.2.tgz",
"integrity": "sha512-HyNQImnsOC7X9PMNaCIeAm4ISCQXs5a5YasTXVliKv4uuBo1dKrG0A+uQS8M5eXjVMnLg3WgXaKvprHlFJQffw==",
"dev": true,
"hasInstallScript": true,
"license": "MIT",
"bin": {
"esbuild": "bin/esbuild"
},
"engines": {
"node": ">=18"
},
"optionalDependencies": {
"@esbuild/aix-ppc64": "0.27.2",
"@esbuild/android-arm": "0.27.2",
"@esbuild/android-arm64": "0.27.2",
"@esbuild/android-x64": "0.27.2",
"@esbuild/darwin-arm64": "0.27.2",
"@esbuild/darwin-x64": "0.27.2",
"@esbuild/freebsd-arm64": "0.27.2",
"@esbuild/freebsd-x64": "0.27.2",
"@esbuild/linux-arm": "0.27.2",
"@esbuild/linux-arm64": "0.27.2",
"@esbuild/linux-ia32": "0.27.2",
"@esbuild/linux-loong64": "0.27.2",
"@esbuild/linux-mips64el": "0.27.2",
"@esbuild/linux-ppc64": "0.27.2",
"@esbuild/linux-riscv64": "0.27.2",
"@esbuild/linux-s390x": "0.27.2",
"@esbuild/linux-x64": "0.27.2",
"@esbuild/netbsd-arm64": "0.27.2",
"@esbuild/netbsd-x64": "0.27.2",
"@esbuild/openbsd-arm64": "0.27.2",
"@esbuild/openbsd-x64": "0.27.2",
"@esbuild/openharmony-arm64": "0.27.2",
"@esbuild/sunos-x64": "0.27.2",
"@esbuild/win32-arm64": "0.27.2",
"@esbuild/win32-ia32": "0.27.2",
"@esbuild/win32-x64": "0.27.2"
}
},
"node_modules/escalade": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz",
"integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6"
}
},
"node_modules/escape-html": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz",
"integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==",
"dev": true,
"license": "MIT"
},
"node_modules/etag": {
"version": "1.8.1",
"resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz",
"integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/eventemitter3": {
"version": "5.0.4",
"resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.4.tgz",
"integrity": "sha512-mlsTRyGaPBjPedk6Bvw+aqbsXDtoAyAzm5MO7JgU+yVRyMQ5O8bD4Kcci7BS85f93veegeCPkL8R4GLClnjLFw==",
"dev": true,
"license": "MIT"
},
"node_modules/eventsource": {
"version": "3.0.7",
"resolved": "https://registry.npmjs.org/eventsource/-/eventsource-3.0.7.tgz",
"integrity": "sha512-CRT1WTyuQoD771GW56XEZFQ/ZoSfWid1alKGDYMmkt2yl8UXrVR4pspqWNEcqKvVIzg6PAltWjxcSSPrboA4iA==",
"dev": true,
"license": "MIT",
"dependencies": {
"eventsource-parser": "^3.0.1"
},
"engines": {
"node": ">=18.0.0"
}
},
"node_modules/eventsource-parser": {
"version": "3.0.6",
"resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-3.0.6.tgz",
"integrity": "sha512-Vo1ab+QXPzZ4tCa8SwIHJFaSzy4R6SHf7BY79rFBDf0idraZWAkYrDjDj8uWaSm3S2TK+hJ7/t1CEmZ7jXw+pg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=18.0.0"
}
},
"node_modules/exponential-backoff": {
"version": "3.1.3",
"resolved": "https://registry.npmjs.org/exponential-backoff/-/exponential-backoff-3.1.3.tgz",
"integrity": "sha512-ZgEeZXj30q+I0EN+CbSSpIyPaJ5HVQD18Z1m+u1FXbAeT94mr1zw50q4q6jiiC447Nl/YTcIYSAftiGqetwXCA==",
"dev": true,
"license": "Apache-2.0"
},
"node_modules/express": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/express/-/express-5.2.1.tgz",
"integrity": "sha512-hIS4idWWai69NezIdRt2xFVofaF4j+6INOpJlVOLDO8zXGpUVEVzIYk12UUi2JzjEzWL3IOAxcTubgz9Po0yXw==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"accepts": "^2.0.0",
"body-parser": "^2.2.1",
"content-disposition": "^1.0.0",
"content-type": "^1.0.5",
"cookie": "^0.7.1",
"cookie-signature": "^1.2.1",
"debug": "^4.4.0",
"depd": "^2.0.0",
"encodeurl": "^2.0.0",
"escape-html": "^1.0.3",
"etag": "^1.8.1",
"finalhandler": "^2.1.0",
"fresh": "^2.0.0",
"http-errors": "^2.0.0",
"merge-descriptors": "^2.0.0",
"mime-types": "^3.0.0",
"on-finished": "^2.4.1",
"once": "^1.4.0",
"parseurl": "^1.3.3",
"proxy-addr": "^2.0.7",
"qs": "^6.14.0",
"range-parser": "^1.2.1",
"router": "^2.2.0",
"send": "^1.1.0",
"serve-static": "^2.2.0",
"statuses": "^2.0.1",
"type-is": "^2.0.1",
"vary": "^1.1.2"
},
"engines": {
"node": ">= 18"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/express"
}
},
"node_modules/express-rate-limit": {
"version": "7.5.1",
"resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-7.5.1.tgz",
"integrity": "sha512-7iN8iPMDzOMHPUYllBEsQdWVB6fPDMPqwjBaFrgr4Jgr/+okjvzAy+UHlYYL/Vs0OsOrMkwS6PJDkFlJwoxUnw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 16"
},
"funding": {
"url": "https://github.com/sponsors/express-rate-limit"
},
"peerDependencies": {
"express": ">= 4.11"
}
},
"node_modules/fast-deep-equal": {
"version": "3.1.3",
"resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
"integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==",
"dev": true,
"license": "MIT"
},
"node_modules/fast-uri": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.1.0.tgz",
"integrity": "sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==",
"dev": true,
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/fastify"
},
{
"type": "opencollective",
"url": "https://opencollective.com/fastify"
}
],
"license": "BSD-3-Clause"
},
"node_modules/fdir": {
"version": "6.5.0",
"resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz",
"integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=12.0.0"
},
"peerDependencies": {
"picomatch": "^3 || ^4"
},
"peerDependenciesMeta": {
"picomatch": {
"optional": true
}
}
},
"node_modules/finalhandler": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.1.tgz",
"integrity": "sha512-S8KoZgRZN+a5rNwqTxlZZePjT/4cnm0ROV70LedRHZ0p8u9fRID0hJUZQpkKLzro8LfmC8sx23bY6tVNxv8pQA==",
"dev": true,
"license": "MIT",
"dependencies": {
"debug": "^4.4.0",
"encodeurl": "^2.0.0",
"escape-html": "^1.0.3",
"on-finished": "^2.4.1",
"parseurl": "^1.3.3",
"statuses": "^2.0.1"
},
"engines": {
"node": ">= 18.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/express"
}
},
"node_modules/forwarded": {
"version": "0.2.0",
"resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz",
"integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/fresh": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/fresh/-/fresh-2.0.0.tgz",
"integrity": "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.8"
}
},
"node_modules/fs-minipass": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-3.0.3.tgz",
"integrity": "sha512-XUBA9XClHbnJWSfBzjkm6RvPsyg3sryZt06BEQoXcF7EK/xpGaQYJgQKDJSUH5SGZ76Y7pFx1QBnXz09rU5Fbw==",
"dev": true,
"license": "ISC",
"dependencies": {
"minipass": "^7.0.3"
},
"engines": {
"node": "^14.17.0 || ^16.13.0 || >=18.0.0"
}
},
"node_modules/fsevents": {
"version": "2.3.3",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
"integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==",
"dev": true,
"hasInstallScript": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
}
},
"node_modules/function-bind": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
"integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==",
"dev": true,
"license": "MIT",
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/gensync": {
"version": "1.0.0-beta.2",
"resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz",
"integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/get-caller-file": {
"version": "2.0.5",
"resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
"integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==",
"dev": true,
"license": "ISC",
"engines": {
"node": "6.* || 8.* || >= 10.*"
}
},
"node_modules/get-east-asian-width": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.4.0.tgz",
"integrity": "sha512-QZjmEOC+IT1uk6Rx0sX22V6uHWVwbdbxf1faPqJ1QhLdGgsRGCZoyaQBm/piRdJy/D2um6hM1UP7ZEeQ4EkP+Q==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/get-intrinsic": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz",
"integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"call-bind-apply-helpers": "^1.0.2",
"es-define-property": "^1.0.1",
"es-errors": "^1.3.0",
"es-object-atoms": "^1.1.1",
"function-bind": "^1.1.2",
"get-proto": "^1.0.1",
"gopd": "^1.2.0",
"has-symbols": "^1.1.0",
"hasown": "^2.0.2",
"math-intrinsics": "^1.1.0"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/get-proto": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz",
"integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==",
"dev": true,
"license": "MIT",
"dependencies": {
"dunder-proto": "^1.0.1",
"es-object-atoms": "^1.0.0"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/glob": {
"version": "13.0.0",
"resolved": "https://registry.npmjs.org/glob/-/glob-13.0.0.tgz",
"integrity": "sha512-tvZgpqk6fz4BaNZ66ZsRaZnbHvP/jG3uKJvAZOwEVUL4RTA5nJeeLYfyN9/VA8NX/V3IBG+hkeuGpKjvELkVhA==",
"dev": true,
"license": "BlueOak-1.0.0",
"dependencies": {
"minimatch": "^10.1.1",
"minipass": "^7.1.2",
"path-scurry": "^2.0.0"
},
"engines": {
"node": "20 || >=22"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/glob-to-regexp": {
"version": "0.4.1",
"resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz",
"integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==",
"dev": true,
"license": "BSD-2-Clause"
},
"node_modules/gopd": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz",
"integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/graceful-fs": {
"version": "4.2.11",
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz",
"integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==",
"dev": true,
"license": "ISC"
},
"node_modules/has-symbols": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz",
"integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/hasown": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
"integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"function-bind": "^1.1.2"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/hono": {
"version": "4.11.4",
"resolved": "https://registry.npmjs.org/hono/-/hono-4.11.4.tgz",
"integrity": "sha512-U7tt8JsyrxSRKspfhtLET79pU8K+tInj5QZXs1jSugO1Vq5dFj3kmZsRldo29mTBfcjDRVRXrEZ6LS63Cog9ZA==",
"dev": true,
"license": "MIT",
"peer": true,
"engines": {
"node": ">=16.9.0"
}
},
"node_modules/hosted-git-info": {
"version": "9.0.2",
"resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-9.0.2.tgz",
"integrity": "sha512-M422h7o/BR3rmCQ8UHi7cyyMqKltdP9Uo+J2fXK+RSAY+wTcKOIRyhTuKv4qn+DJf3g+PL890AzId5KZpX+CBg==",
"dev": true,
"license": "ISC",
"dependencies": {
"lru-cache": "^11.1.0"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/hosted-git-info/node_modules/lru-cache": {
"version": "11.2.4",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.4.tgz",
"integrity": "sha512-B5Y16Jr9LB9dHVkh6ZevG+vAbOsNOYCX+sXvFWFu7B3Iz5mijW3zdbMyhsh8ANd2mSWBYdJgnqi+mL7/LrOPYg==",
"dev": true,
"license": "BlueOak-1.0.0",
"engines": {
"node": "20 || >=22"
}
},
"node_modules/htmlparser2": {
"version": "10.0.0",
"resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-10.0.0.tgz",
"integrity": "sha512-TwAZM+zE5Tq3lrEHvOlvwgj1XLWQCtaaibSN11Q+gGBAS7Y1uZSWwXXRe4iF6OXnaq1riyQAPFOBtYc77Mxq0g==",
"dev": true,
"funding": [
"https://github.com/fb55/htmlparser2?sponsor=1",
{
"type": "github",
"url": "https://github.com/sponsors/fb55"
}
],
"license": "MIT",
"dependencies": {
"domelementtype": "^2.3.0",
"domhandler": "^5.0.3",
"domutils": "^3.2.1",
"entities": "^6.0.0"
}
},
"node_modules/htmlparser2/node_modules/entities": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/entities/-/entities-6.0.1.tgz",
"integrity": "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==",
"dev": true,
"license": "BSD-2-Clause",
"engines": {
"node": ">=0.12"
},
"funding": {
"url": "https://github.com/fb55/entities?sponsor=1"
}
},
"node_modules/http-cache-semantics": {
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.2.0.tgz",
"integrity": "sha512-dTxcvPXqPvXBQpq5dUr6mEMJX4oIEFv6bwom3FDwKRDsuIjjJGANqhBuoAn9c1RQJIdAKav33ED65E2ys+87QQ==",
"dev": true,
"license": "BSD-2-Clause"
},
"node_modules/http-errors": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.1.tgz",
"integrity": "sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"depd": "~2.0.0",
"inherits": "~2.0.4",
"setprototypeof": "~1.2.0",
"statuses": "~2.0.2",
"toidentifier": "~1.0.1"
},
"engines": {
"node": ">= 0.8"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/express"
}
},
"node_modules/http-proxy-agent": {
"version": "7.0.2",
"resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz",
"integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==",
"dev": true,
"license": "MIT",
"dependencies": {
"agent-base": "^7.1.0",
"debug": "^4.3.4"
},
"engines": {
"node": ">= 14"
}
},
"node_modules/https-proxy-agent": {
"version": "7.0.6",
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz",
"integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==",
"dev": true,
"license": "MIT",
"dependencies": {
"agent-base": "^7.1.2",
"debug": "4"
},
"engines": {
"node": ">= 14"
}
},
"node_modules/iconv-lite": {
"version": "0.7.2",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.7.2.tgz",
"integrity": "sha512-im9DjEDQ55s9fL4EYzOAv0yMqmMBSZp6G0VvFyTMPKWxiSBHUj9NW/qqLmXUwXrrM7AvqSlTCfvqRb0cM8yYqw==",
"dev": true,
"license": "MIT",
"dependencies": {
"safer-buffer": ">= 2.1.2 < 3.0.0"
},
"engines": {
"node": ">=0.10.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/express"
}
},
"node_modules/ignore-walk": {
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-8.0.0.tgz",
"integrity": "sha512-FCeMZT4NiRQGh+YkeKMtWrOmBgWjHjMJ26WQWrRQyoyzqevdaGSakUaJW5xQYmjLlUVk2qUnCjYVBax9EKKg8A==",
"dev": true,
"license": "ISC",
"dependencies": {
"minimatch": "^10.0.3"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/immutable": {
"version": "5.1.4",
"resolved": "https://registry.npmjs.org/immutable/-/immutable-5.1.4.tgz",
"integrity": "sha512-p6u1bG3YSnINT5RQmx/yRZBpenIl30kVxkTLDyHLIMk0gict704Q9n+thfDI7lTRm9vXdDYutVzXhzcThxTnXA==",
"dev": true,
"license": "MIT"
},
"node_modules/imurmurhash": {
"version": "0.1.4",
"resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz",
"integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.8.19"
}
},
"node_modules/inherits": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
"dev": true,
"license": "ISC"
},
"node_modules/ini": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/ini/-/ini-6.0.0.tgz",
"integrity": "sha512-IBTdIkzZNOpqm7q3dRqJvMaldXjDHWkEDfrwGEQTs5eaQMWV+djAhR+wahyNNMAa+qpbDUhBMVt4ZKNwpPm7xQ==",
"dev": true,
"license": "ISC",
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/ip-address": {
"version": "10.1.0",
"resolved": "https://registry.npmjs.org/ip-address/-/ip-address-10.1.0.tgz",
"integrity": "sha512-XXADHxXmvT9+CRxhXg56LJovE+bmWnEWB78LB83VZTprKTmaC5QfruXocxzTZ2Kl0DNwKuBdlIhjL8LeY8Sf8Q==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 12"
}
},
"node_modules/ipaddr.js": {
"version": "1.9.1",
"resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz",
"integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.10"
}
},
"node_modules/is-core-module": {
"version": "2.16.1",
"resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz",
"integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==",
"dev": true,
"license": "MIT",
"dependencies": {
"hasown": "^2.0.2"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/is-extglob": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
"integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==",
"dev": true,
"license": "MIT",
"optional": true,
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/is-fullwidth-code-point": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-5.1.0.tgz",
"integrity": "sha512-5XHYaSyiqADb4RnZ1Bdad6cPp8Toise4TzEjcOYDHZkTCbKgiUl7WTUCpNWHuxmDt91wnsZBc9xinNzopv3JMQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"get-east-asian-width": "^1.3.1"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/is-glob": {
"version": "4.0.3",
"resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
"integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
"dev": true,
"license": "MIT",
"optional": true,
"dependencies": {
"is-extglob": "^2.1.1"
},
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/is-interactive": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/is-interactive/-/is-interactive-2.0.0.tgz",
"integrity": "sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/is-promise": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/is-promise/-/is-promise-4.0.0.tgz",
"integrity": "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==",
"dev": true,
"license": "MIT"
},
"node_modules/is-unicode-supported": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-2.1.0.tgz",
"integrity": "sha512-mE00Gnza5EEB3Ds0HfMyllZzbBrmLOX3vfWoj9A9PEnTfratQ/BcaJOuMhnkhjXvb2+FkY3VuHqtAGpTPmglFQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/isexe": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
"integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==",
"dev": true,
"license": "ISC"
},
"node_modules/istanbul-lib-coverage": {
"version": "3.2.2",
"resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz",
"integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==",
"dev": true,
"license": "BSD-3-Clause",
"engines": {
"node": ">=8"
}
},
"node_modules/istanbul-lib-instrument": {
"version": "6.0.3",
"resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-6.0.3.tgz",
"integrity": "sha512-Vtgk7L/R2JHyyGW07spoFlB8/lpjiOLTjMdms6AFMraYt3BaJauod/NGrfnVG/y4Ix1JEuMRPDPEj2ua+zz1/Q==",
"dev": true,
"license": "BSD-3-Clause",
"dependencies": {
"@babel/core": "^7.23.9",
"@babel/parser": "^7.23.9",
"@istanbuljs/schema": "^0.1.3",
"istanbul-lib-coverage": "^3.2.0",
"semver": "^7.5.4"
},
"engines": {
"node": ">=10"
}
},
"node_modules/jose": {
"version": "6.1.3",
"resolved": "https://registry.npmjs.org/jose/-/jose-6.1.3.tgz",
"integrity": "sha512-0TpaTfihd4QMNwrz/ob2Bp7X04yuxJkjRGi4aKmOqwhov54i6u79oCv7T+C7lo70MKH6BesI3vscD1yb/yzKXQ==",
"dev": true,
"license": "MIT",
"funding": {
"url": "https://github.com/sponsors/panva"
}
},
"node_modules/js-tokens": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
"integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==",
"dev": true,
"license": "MIT"
},
"node_modules/jsesc": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz",
"integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==",
"dev": true,
"license": "MIT",
"bin": {
"jsesc": "bin/jsesc"
},
"engines": {
"node": ">=6"
}
},
"node_modules/json-parse-even-better-errors": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-5.0.0.tgz",
"integrity": "sha512-ZF1nxZ28VhQouRWhUcVlUIN3qwSgPuswK05s/HIaoetAoE/9tngVmCHjSxmSQPav1nd+lPtTL0YZ/2AFdR/iYQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/json-schema-traverse": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz",
"integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==",
"dev": true,
"license": "MIT"
},
"node_modules/json-schema-typed": {
"version": "8.0.2",
"resolved": "https://registry.npmjs.org/json-schema-typed/-/json-schema-typed-8.0.2.tgz",
"integrity": "sha512-fQhoXdcvc3V28x7C7BMs4P5+kNlgUURe2jmUT1T//oBRMDrqy1QPelJimwZGo7Hg9VPV3EQV5Bnq4hbFy2vetA==",
"dev": true,
"license": "BSD-2-Clause"
},
"node_modules/json5": {
"version": "2.2.3",
"resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz",
"integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==",
"dev": true,
"license": "MIT",
"bin": {
"json5": "lib/cli.js"
},
"engines": {
"node": ">=6"
}
},
"node_modules/jsonc-parser": {
"version": "3.3.1",
"resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-3.3.1.tgz",
"integrity": "sha512-HUgH65KyejrUFPvHFPbqOY0rsFip3Bo5wb4ngvdi1EpCYWUQDC5V+Y7mZws+DLkr4M//zQJoanu1SP+87Dv1oQ==",
"dev": true,
"license": "MIT"
},
"node_modules/jsonparse": {
"version": "1.3.1",
"resolved": "https://registry.npmjs.org/jsonparse/-/jsonparse-1.3.1.tgz",
"integrity": "sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg==",
"dev": true,
"engines": [
"node >= 0.2.0"
],
"license": "MIT"
},
"node_modules/listr2": {
"version": "9.0.5",
"resolved": "https://registry.npmjs.org/listr2/-/listr2-9.0.5.tgz",
"integrity": "sha512-ME4Fb83LgEgwNw96RKNvKV4VTLuXfoKudAmm2lP8Kk87KaMK0/Xrx/aAkMWmT8mDb+3MlFDspfbCs7adjRxA2g==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"cli-truncate": "^5.0.0",
"colorette": "^2.0.20",
"eventemitter3": "^5.0.1",
"log-update": "^6.1.0",
"rfdc": "^1.4.1",
"wrap-ansi": "^9.0.0"
},
"engines": {
"node": ">=20.0.0"
}
},
"node_modules/listr2/node_modules/string-width": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz",
"integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"emoji-regex": "^10.3.0",
"get-east-asian-width": "^1.0.0",
"strip-ansi": "^7.1.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/listr2/node_modules/wrap-ansi": {
"version": "9.0.2",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.2.tgz",
"integrity": "sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-styles": "^6.2.1",
"string-width": "^7.0.0",
"strip-ansi": "^7.1.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
}
},
"node_modules/lmdb": {
"version": "3.4.4",
"resolved": "https://registry.npmjs.org/lmdb/-/lmdb-3.4.4.tgz",
"integrity": "sha512-+Y2DqovevLkb6DrSQ6SXTYLEd6kvlRbhsxzgJrk7BUfOVA/mt21ak6pFDZDKxiAczHMWxrb02kXBTSTIA0O94A==",
"dev": true,
"hasInstallScript": true,
"license": "MIT",
"optional": true,
"dependencies": {
"msgpackr": "^1.11.2",
"node-addon-api": "^6.1.0",
"node-gyp-build-optional-packages": "5.2.2",
"ordered-binary": "^1.5.3",
"weak-lru-cache": "^1.2.2"
},
"bin": {
"download-lmdb-prebuilds": "bin/download-prebuilds.js"
},
"optionalDependencies": {
"@lmdb/lmdb-darwin-arm64": "3.4.4",
"@lmdb/lmdb-darwin-x64": "3.4.4",
"@lmdb/lmdb-linux-arm": "3.4.4",
"@lmdb/lmdb-linux-arm64": "3.4.4",
"@lmdb/lmdb-linux-x64": "3.4.4",
"@lmdb/lmdb-win32-arm64": "3.4.4",
"@lmdb/lmdb-win32-x64": "3.4.4"
}
},
"node_modules/log-symbols": {
"version": "7.0.1",
"resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-7.0.1.tgz",
"integrity": "sha512-ja1E3yCr9i/0hmBVaM0bfwDjnGy8I/s6PP4DFp+yP+a+mrHO4Rm7DtmnqROTUkHIkqffC84YY7AeqX6oFk0WFg==",
"dev": true,
"license": "MIT",
"dependencies": {
"is-unicode-supported": "^2.0.0",
"yoctocolors": "^2.1.1"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/log-update": {
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/log-update/-/log-update-6.1.0.tgz",
"integrity": "sha512-9ie8ItPR6tjY5uYJh8K/Zrv/RMZ5VOlOWvtZdEHYSTFKZfIBPQa9tOAEeAWhd+AnIneLJ22w5fjOYtoutpWq5w==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-escapes": "^7.0.0",
"cli-cursor": "^5.0.0",
"slice-ansi": "^7.1.0",
"strip-ansi": "^7.1.0",
"wrap-ansi": "^9.0.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/log-update/node_modules/string-width": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz",
"integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"emoji-regex": "^10.3.0",
"get-east-asian-width": "^1.0.0",
"strip-ansi": "^7.1.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/log-update/node_modules/wrap-ansi": {
"version": "9.0.2",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.2.tgz",
"integrity": "sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-styles": "^6.2.1",
"string-width": "^7.0.0",
"strip-ansi": "^7.1.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
}
},
"node_modules/lru-cache": {
"version": "5.1.1",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz",
"integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==",
"dev": true,
"license": "ISC",
"dependencies": {
"yallist": "^3.0.2"
}
},
"node_modules/magic-string": {
"version": "0.30.21",
"resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz",
"integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@jridgewell/sourcemap-codec": "^1.5.5"
}
},
"node_modules/make-fetch-happen": {
"version": "15.0.3",
"resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-15.0.3.tgz",
"integrity": "sha512-iyyEpDty1mwW3dGlYXAJqC/azFn5PPvgKVwXayOGBSmKLxhKZ9fg4qIan2ePpp1vJIwfFiO34LAPZgq9SZW9Aw==",
"dev": true,
"license": "ISC",
"dependencies": {
"@npmcli/agent": "^4.0.0",
"cacache": "^20.0.1",
"http-cache-semantics": "^4.1.1",
"minipass": "^7.0.2",
"minipass-fetch": "^5.0.0",
"minipass-flush": "^1.0.5",
"minipass-pipeline": "^1.2.4",
"negotiator": "^1.0.0",
"proc-log": "^6.0.0",
"promise-retry": "^2.0.1",
"ssri": "^13.0.0"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/math-intrinsics": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
"integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
}
},
"node_modules/media-typer": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/media-typer/-/media-typer-1.1.0.tgz",
"integrity": "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.8"
}
},
"node_modules/merge-descriptors": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-2.0.0.tgz",
"integrity": "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/mime-db": {
"version": "1.54.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz",
"integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/mime-types": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.2.tgz",
"integrity": "sha512-Lbgzdk0h4juoQ9fCKXW4by0UJqj+nOOrI9MJ1sSj4nI8aI2eo1qmvQEie4VD1glsS250n15LsWsYtCugiStS5A==",
"dev": true,
"license": "MIT",
"dependencies": {
"mime-db": "^1.54.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/express"
}
},
"node_modules/mimic-function": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/mimic-function/-/mimic-function-5.0.1.tgz",
"integrity": "sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/minimatch": {
"version": "10.1.1",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.1.1.tgz",
"integrity": "sha512-enIvLvRAFZYXJzkCYG5RKmPfrFArdLv+R+lbQ53BmIMLIry74bjKzX6iHAm8WYamJkhSSEabrWN5D97XnKObjQ==",
"dev": true,
"license": "BlueOak-1.0.0",
"dependencies": {
"@isaacs/brace-expansion": "^5.0.0"
},
"engines": {
"node": "20 || >=22"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/minipass": {
"version": "7.1.2",
"resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz",
"integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==",
"dev": true,
"license": "ISC",
"engines": {
"node": ">=16 || 14 >=14.17"
}
},
"node_modules/minipass-collect": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/minipass-collect/-/minipass-collect-2.0.1.tgz",
"integrity": "sha512-D7V8PO9oaz7PWGLbCACuI1qEOsq7UKfLotx/C0Aet43fCUB/wfQ7DYeq2oR/svFJGYDHPr38SHATeaj/ZoKHKw==",
"dev": true,
"license": "ISC",
"dependencies": {
"minipass": "^7.0.3"
},
"engines": {
"node": ">=16 || 14 >=14.17"
}
},
"node_modules/minipass-fetch": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-5.0.0.tgz",
"integrity": "sha512-fiCdUALipqgPWrOVTz9fw0XhcazULXOSU6ie40DDbX1F49p1dBrSRBuswndTx1x3vEb/g0FT7vC4c4C2u/mh3A==",
"dev": true,
"license": "MIT",
"dependencies": {
"minipass": "^7.0.3",
"minipass-sized": "^1.0.3",
"minizlib": "^3.0.1"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
},
"optionalDependencies": {
"encoding": "^0.1.13"
}
},
"node_modules/minipass-flush": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/minipass-flush/-/minipass-flush-1.0.5.tgz",
"integrity": "sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==",
"dev": true,
"license": "ISC",
"dependencies": {
"minipass": "^3.0.0"
},
"engines": {
"node": ">= 8"
}
},
"node_modules/minipass-flush/node_modules/minipass": {
"version": "3.3.6",
"resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
"integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
"dev": true,
"license": "ISC",
"dependencies": {
"yallist": "^4.0.0"
},
"engines": {
"node": ">=8"
}
},
"node_modules/minipass-flush/node_modules/yallist": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==",
"dev": true,
"license": "ISC"
},
"node_modules/minipass-pipeline": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/minipass-pipeline/-/minipass-pipeline-1.2.4.tgz",
"integrity": "sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A==",
"dev": true,
"license": "ISC",
"dependencies": {
"minipass": "^3.0.0"
},
"engines": {
"node": ">=8"
}
},
"node_modules/minipass-pipeline/node_modules/minipass": {
"version": "3.3.6",
"resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
"integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
"dev": true,
"license": "ISC",
"dependencies": {
"yallist": "^4.0.0"
},
"engines": {
"node": ">=8"
}
},
"node_modules/minipass-pipeline/node_modules/yallist": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==",
"dev": true,
"license": "ISC"
},
"node_modules/minipass-sized": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/minipass-sized/-/minipass-sized-1.0.3.tgz",
"integrity": "sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g==",
"dev": true,
"license": "ISC",
"dependencies": {
"minipass": "^3.0.0"
},
"engines": {
"node": ">=8"
}
},
"node_modules/minipass-sized/node_modules/minipass": {
"version": "3.3.6",
"resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
"integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
"dev": true,
"license": "ISC",
"dependencies": {
"yallist": "^4.0.0"
},
"engines": {
"node": ">=8"
}
},
"node_modules/minipass-sized/node_modules/yallist": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==",
"dev": true,
"license": "ISC"
},
"node_modules/minizlib": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.1.0.tgz",
"integrity": "sha512-KZxYo1BUkWD2TVFLr0MQoM8vUUigWD3LlD83a/75BqC+4qE0Hb1Vo5v1FgcfaNXvfXzr+5EhQ6ing/CaBijTlw==",
"dev": true,
"license": "MIT",
"dependencies": {
"minipass": "^7.1.2"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/mrmime": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/mrmime/-/mrmime-2.0.1.tgz",
"integrity": "sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=10"
}
},
"node_modules/ms": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
"dev": true,
"license": "MIT"
},
"node_modules/msgpackr": {
"version": "1.11.8",
"resolved": "https://registry.npmjs.org/msgpackr/-/msgpackr-1.11.8.tgz",
"integrity": "sha512-bC4UGzHhVvgDNS7kn9tV8fAucIYUBuGojcaLiz7v+P63Lmtm0Xeji8B/8tYKddALXxJLpwIeBmUN3u64C4YkRA==",
"dev": true,
"license": "MIT",
"optional": true,
"optionalDependencies": {
"msgpackr-extract": "^3.0.2"
}
},
"node_modules/msgpackr-extract": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/msgpackr-extract/-/msgpackr-extract-3.0.3.tgz",
"integrity": "sha512-P0efT1C9jIdVRefqjzOQ9Xml57zpOXnIuS+csaB4MdZbTdmGDLo8XhzBG1N7aO11gKDDkJvBLULeFTo46wwreA==",
"dev": true,
"hasInstallScript": true,
"license": "MIT",
"optional": true,
"dependencies": {
"node-gyp-build-optional-packages": "5.2.2"
},
"bin": {
"download-msgpackr-prebuilds": "bin/download-prebuilds.js"
},
"optionalDependencies": {
"@msgpackr-extract/msgpackr-extract-darwin-arm64": "3.0.3",
"@msgpackr-extract/msgpackr-extract-darwin-x64": "3.0.3",
"@msgpackr-extract/msgpackr-extract-linux-arm": "3.0.3",
"@msgpackr-extract/msgpackr-extract-linux-arm64": "3.0.3",
"@msgpackr-extract/msgpackr-extract-linux-x64": "3.0.3",
"@msgpackr-extract/msgpackr-extract-win32-x64": "3.0.3"
}
},
"node_modules/mute-stream": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-2.0.0.tgz",
"integrity": "sha512-WWdIxpyjEn+FhQJQQv9aQAYlHoNVdzIzUySNV1gHUPDSdZJ3yZn7pAAbQcV7B56Mvu881q9FZV+0Vx2xC44VWA==",
"dev": true,
"license": "ISC",
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/nanoid": {
"version": "3.3.11",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz",
"integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==",
"dev": true,
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
],
"license": "MIT",
"bin": {
"nanoid": "bin/nanoid.cjs"
},
"engines": {
"node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1"
}
},
"node_modules/negotiator": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz",
"integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/node-addon-api": {
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-6.1.0.tgz",
"integrity": "sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA==",
"dev": true,
"license": "MIT",
"optional": true
},
"node_modules/node-gyp": {
"version": "12.1.0",
"resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-12.1.0.tgz",
"integrity": "sha512-W+RYA8jBnhSr2vrTtlPYPc1K+CSjGpVDRZxcqJcERZ8ND3A1ThWPHRwctTx3qC3oW99jt726jhdz3Y6ky87J4g==",
"dev": true,
"license": "MIT",
"dependencies": {
"env-paths": "^2.2.0",
"exponential-backoff": "^3.1.1",
"graceful-fs": "^4.2.6",
"make-fetch-happen": "^15.0.0",
"nopt": "^9.0.0",
"proc-log": "^6.0.0",
"semver": "^7.3.5",
"tar": "^7.5.2",
"tinyglobby": "^0.2.12",
"which": "^6.0.0"
},
"bin": {
"node-gyp": "bin/node-gyp.js"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/node-gyp-build-optional-packages": {
"version": "5.2.2",
"resolved": "https://registry.npmjs.org/node-gyp-build-optional-packages/-/node-gyp-build-optional-packages-5.2.2.tgz",
"integrity": "sha512-s+w+rBWnpTMwSFbaE0UXsRlg7hU4FjekKU4eyAih5T8nJuNZT1nNsskXpxmeqSK9UzkBl6UgRlnKc8hz8IEqOw==",
"dev": true,
"license": "MIT",
"optional": true,
"dependencies": {
"detect-libc": "^2.0.1"
},
"bin": {
"node-gyp-build-optional-packages": "bin.js",
"node-gyp-build-optional-packages-optional": "optional.js",
"node-gyp-build-optional-packages-test": "build-test.js"
}
},
"node_modules/node-gyp/node_modules/isexe": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz",
"integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==",
"dev": true,
"license": "ISC",
"engines": {
"node": ">=16"
}
},
"node_modules/node-gyp/node_modules/which": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/which/-/which-6.0.0.tgz",
"integrity": "sha512-f+gEpIKMR9faW/JgAgPK1D7mekkFoqbmiwvNzuhsHetni20QSgzg9Vhn0g2JSJkkfehQnqdUAx7/e15qS1lPxg==",
"dev": true,
"license": "ISC",
"dependencies": {
"isexe": "^3.1.1"
},
"bin": {
"node-which": "bin/which.js"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/node-releases": {
"version": "2.0.27",
"resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.27.tgz",
"integrity": "sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==",
"dev": true,
"license": "MIT"
},
"node_modules/nopt": {
"version": "9.0.0",
"resolved": "https://registry.npmjs.org/nopt/-/nopt-9.0.0.tgz",
"integrity": "sha512-Zhq3a+yFKrYwSBluL4H9XP3m3y5uvQkB/09CwDruCiRmR/UJYnn9W4R48ry0uGC70aeTPKLynBtscP9efFFcPw==",
"dev": true,
"license": "ISC",
"dependencies": {
"abbrev": "^4.0.0"
},
"bin": {
"nopt": "bin/nopt.js"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/npm-bundled": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-5.0.0.tgz",
"integrity": "sha512-JLSpbzh6UUXIEoqPsYBvVNVmyrjVZ1fzEFbqxKkTJQkWBO3xFzFT+KDnSKQWwOQNbuWRwt5LSD6HOTLGIWzfrw==",
"dev": true,
"license": "ISC",
"dependencies": {
"npm-normalize-package-bin": "^5.0.0"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/npm-install-checks": {
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-8.0.0.tgz",
"integrity": "sha512-ScAUdMpyzkbpxoNekQ3tNRdFI8SJ86wgKZSQZdUxT+bj0wVFpsEMWnkXP0twVe1gJyNF5apBWDJhhIbgrIViRA==",
"dev": true,
"license": "BSD-2-Clause",
"dependencies": {
"semver": "^7.1.1"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/npm-normalize-package-bin": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-5.0.0.tgz",
"integrity": "sha512-CJi3OS4JLsNMmr2u07OJlhcrPxCeOeP/4xq67aWNai6TNWWbTrlNDgl8NcFKVlcBKp18GPj+EzbNIgrBfZhsag==",
"dev": true,
"license": "ISC",
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/npm-package-arg": {
"version": "13.0.2",
"resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-13.0.2.tgz",
"integrity": "sha512-IciCE3SY3uE84Ld8WZU23gAPPV9rIYod4F+rc+vJ7h7cwAJt9Vk6TVsK60ry7Uj3SRS3bqRRIGuTp9YVlk6WNA==",
"dev": true,
"license": "ISC",
"dependencies": {
"hosted-git-info": "^9.0.0",
"proc-log": "^6.0.0",
"semver": "^7.3.5",
"validate-npm-package-name": "^7.0.0"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/npm-packlist": {
"version": "10.0.3",
"resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-10.0.3.tgz",
"integrity": "sha512-zPukTwJMOu5X5uvm0fztwS5Zxyvmk38H/LfidkOMt3gbZVCyro2cD/ETzwzVPcWZA3JOyPznfUN/nkyFiyUbxg==",
"dev": true,
"license": "ISC",
"dependencies": {
"ignore-walk": "^8.0.0",
"proc-log": "^6.0.0"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/npm-pick-manifest": {
"version": "11.0.3",
"resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-11.0.3.tgz",
"integrity": "sha512-buzyCfeoGY/PxKqmBqn1IUJrZnUi1VVJTdSSRPGI60tJdUhUoSQFhs0zycJokDdOznQentgrpf8LayEHyyYlqQ==",
"dev": true,
"license": "ISC",
"dependencies": {
"npm-install-checks": "^8.0.0",
"npm-normalize-package-bin": "^5.0.0",
"npm-package-arg": "^13.0.0",
"semver": "^7.3.5"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/npm-registry-fetch": {
"version": "19.1.1",
"resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-19.1.1.tgz",
"integrity": "sha512-TakBap6OM1w0H73VZVDf44iFXsOS3h+L4wVMXmbWOQroZgFhMch0juN6XSzBNlD965yIKvWg2dfu7NSiaYLxtw==",
"dev": true,
"license": "ISC",
"dependencies": {
"@npmcli/redact": "^4.0.0",
"jsonparse": "^1.3.1",
"make-fetch-happen": "^15.0.0",
"minipass": "^7.0.2",
"minipass-fetch": "^5.0.0",
"minizlib": "^3.0.1",
"npm-package-arg": "^13.0.0",
"proc-log": "^6.0.0"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/nth-check": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz",
"integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==",
"dev": true,
"license": "BSD-2-Clause",
"dependencies": {
"boolbase": "^1.0.0"
},
"funding": {
"url": "https://github.com/fb55/nth-check?sponsor=1"
}
},
"node_modules/object-assign": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
"integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/object-inspect": {
"version": "1.13.4",
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz",
"integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/on-finished": {
"version": "2.4.1",
"resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz",
"integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==",
"dev": true,
"license": "MIT",
"dependencies": {
"ee-first": "1.1.1"
},
"engines": {
"node": ">= 0.8"
}
},
"node_modules/once": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
"integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
"dev": true,
"license": "ISC",
"dependencies": {
"wrappy": "1"
}
},
"node_modules/onetime": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/onetime/-/onetime-7.0.0.tgz",
"integrity": "sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"mimic-function": "^5.0.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/ora": {
"version": "9.0.0",
"resolved": "https://registry.npmjs.org/ora/-/ora-9.0.0.tgz",
"integrity": "sha512-m0pg2zscbYgWbqRR6ABga5c3sZdEon7bSgjnlXC64kxtxLOyjRcbbUkLj7HFyy/FTD+P2xdBWu8snGhYI0jc4A==",
"dev": true,
"license": "MIT",
"dependencies": {
"chalk": "^5.6.2",
"cli-cursor": "^5.0.0",
"cli-spinners": "^3.2.0",
"is-interactive": "^2.0.0",
"is-unicode-supported": "^2.1.0",
"log-symbols": "^7.0.1",
"stdin-discarder": "^0.2.2",
"string-width": "^8.1.0",
"strip-ansi": "^7.1.2"
},
"engines": {
"node": ">=20"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/ordered-binary": {
"version": "1.6.1",
"resolved": "https://registry.npmjs.org/ordered-binary/-/ordered-binary-1.6.1.tgz",
"integrity": "sha512-QkCdPooczexPLiXIrbVOPYkR3VO3T6v2OyKRkR1Xbhpy7/LAVXwahnRCgRp78Oe/Ehf0C/HATAxfSr6eA1oX+w==",
"dev": true,
"license": "MIT",
"optional": true
},
"node_modules/p-map": {
"version": "7.0.4",
"resolved": "https://registry.npmjs.org/p-map/-/p-map-7.0.4.tgz",
"integrity": "sha512-tkAQEw8ysMzmkhgw8k+1U/iPhWNhykKnSk4Rd5zLoPJCuJaGRPo6YposrZgaxHKzDHdDWWZvE/Sk7hsL2X/CpQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/pacote": {
"version": "21.0.4",
"resolved": "https://registry.npmjs.org/pacote/-/pacote-21.0.4.tgz",
"integrity": "sha512-RplP/pDW0NNNDh3pnaoIWYPvNenS7UqMbXyvMqJczosiFWTeGGwJC2NQBLqKf4rGLFfwCOnntw1aEp9Jiqm1MA==",
"dev": true,
"license": "ISC",
"dependencies": {
"@npmcli/git": "^7.0.0",
"@npmcli/installed-package-contents": "^4.0.0",
"@npmcli/package-json": "^7.0.0",
"@npmcli/promise-spawn": "^9.0.0",
"@npmcli/run-script": "^10.0.0",
"cacache": "^20.0.0",
"fs-minipass": "^3.0.0",
"minipass": "^7.0.2",
"npm-package-arg": "^13.0.0",
"npm-packlist": "^10.0.1",
"npm-pick-manifest": "^11.0.1",
"npm-registry-fetch": "^19.0.0",
"proc-log": "^6.0.0",
"promise-retry": "^2.0.1",
"sigstore": "^4.0.0",
"ssri": "^13.0.0",
"tar": "^7.4.3"
},
"bin": {
"pacote": "bin/index.js"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/parse5": {
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/parse5/-/parse5-8.0.0.tgz",
"integrity": "sha512-9m4m5GSgXjL4AjumKzq1Fgfp3Z8rsvjRNbnkVwfu2ImRqE5D0LnY2QfDen18FSY9C573YU5XxSapdHZTZ2WolA==",
"dev": true,
"license": "MIT",
"dependencies": {
"entities": "^6.0.0"
},
"funding": {
"url": "https://github.com/inikulin/parse5?sponsor=1"
}
},
"node_modules/parse5-html-rewriting-stream": {
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/parse5-html-rewriting-stream/-/parse5-html-rewriting-stream-8.0.0.tgz",
"integrity": "sha512-wzh11mj8KKkno1pZEu+l2EVeWsuKDfR5KNWZOTsslfUX8lPDZx77m9T0kIoAVkFtD1nx6YF8oh4BnPHvxMtNMw==",
"dev": true,
"license": "MIT",
"dependencies": {
"entities": "^6.0.0",
"parse5": "^8.0.0",
"parse5-sax-parser": "^8.0.0"
},
"funding": {
"url": "https://github.com/inikulin/parse5?sponsor=1"
}
},
"node_modules/parse5-html-rewriting-stream/node_modules/entities": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/entities/-/entities-6.0.1.tgz",
"integrity": "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==",
"dev": true,
"license": "BSD-2-Clause",
"engines": {
"node": ">=0.12"
},
"funding": {
"url": "https://github.com/fb55/entities?sponsor=1"
}
},
"node_modules/parse5-sax-parser": {
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/parse5-sax-parser/-/parse5-sax-parser-8.0.0.tgz",
"integrity": "sha512-/dQ8UzHZwnrzs3EvDj6IkKrD/jIZyTlB+8XrHJvcjNgRdmWruNdN9i9RK/JtxakmlUdPwKubKPTCqvbTgzGhrw==",
"dev": true,
"license": "MIT",
"dependencies": {
"parse5": "^8.0.0"
},
"funding": {
"url": "https://github.com/inikulin/parse5?sponsor=1"
}
},
"node_modules/parse5/node_modules/entities": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/entities/-/entities-6.0.1.tgz",
"integrity": "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==",
"dev": true,
"license": "BSD-2-Clause",
"engines": {
"node": ">=0.12"
},
"funding": {
"url": "https://github.com/fb55/entities?sponsor=1"
}
},
"node_modules/parseurl": {
"version": "1.3.3",
"resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz",
"integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.8"
}
},
"node_modules/path-key": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz",
"integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
}
},
"node_modules/path-parse": {
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz",
"integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==",
"dev": true,
"license": "MIT"
},
"node_modules/path-scurry": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-2.0.1.tgz",
"integrity": "sha512-oWyT4gICAu+kaA7QWk/jvCHWarMKNs6pXOGWKDTr7cw4IGcUbW+PeTfbaQiLGheFRpjo6O9J0PmyMfQPjH71oA==",
"dev": true,
"license": "BlueOak-1.0.0",
"dependencies": {
"lru-cache": "^11.0.0",
"minipass": "^7.1.2"
},
"engines": {
"node": "20 || >=22"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/path-scurry/node_modules/lru-cache": {
"version": "11.2.4",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.4.tgz",
"integrity": "sha512-B5Y16Jr9LB9dHVkh6ZevG+vAbOsNOYCX+sXvFWFu7B3Iz5mijW3zdbMyhsh8ANd2mSWBYdJgnqi+mL7/LrOPYg==",
"dev": true,
"license": "BlueOak-1.0.0",
"engines": {
"node": "20 || >=22"
}
},
"node_modules/path-to-regexp": {
"version": "8.3.0",
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.3.0.tgz",
"integrity": "sha512-7jdwVIRtsP8MYpdXSwOS0YdD0Du+qOoF/AEPIt88PcCFrZCzx41oxku1jD88hZBwbNUIEfpqvuhjFaMAqMTWnA==",
"dev": true,
"license": "MIT",
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/express"
}
},
"node_modules/picocolors": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
"integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==",
"dev": true,
"license": "ISC"
},
"node_modules/picomatch": {
"version": "4.0.3",
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz",
"integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/sponsors/jonschlinkert"
}
},
"node_modules/piscina": {
"version": "5.1.4",
"resolved": "https://registry.npmjs.org/piscina/-/piscina-5.1.4.tgz",
"integrity": "sha512-7uU4ZnKeQq22t9AsmHGD2w4OYQGonwFnTypDypaWi7Qr2EvQIFVtG8J5D/3bE7W123Wdc9+v4CZDu5hJXVCtBg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=20.x"
},
"optionalDependencies": {
"@napi-rs/nice": "^1.0.4"
}
},
"node_modules/pkce-challenge": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/pkce-challenge/-/pkce-challenge-5.0.1.tgz",
"integrity": "sha512-wQ0b/W4Fr01qtpHlqSqspcj3EhBvimsdh0KlHhH8HRZnMsEa0ea2fTULOXOS9ccQr3om+GcGRk4e+isrZWV8qQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=16.20.0"
}
},
"node_modules/postcss": {
"version": "8.5.6",
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz",
"integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==",
"dev": true,
"funding": [
{
"type": "opencollective",
"url": "https://opencollective.com/postcss/"
},
{
"type": "tidelift",
"url": "https://tidelift.com/funding/github/npm/postcss"
},
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
],
"license": "MIT",
"dependencies": {
"nanoid": "^3.3.11",
"picocolors": "^1.1.1",
"source-map-js": "^1.2.1"
},
"engines": {
"node": "^10 || ^12 || >=14"
}
},
"node_modules/postcss-media-query-parser": {
"version": "0.2.3",
"resolved": "https://registry.npmjs.org/postcss-media-query-parser/-/postcss-media-query-parser-0.2.3.tgz",
"integrity": "sha512-3sOlxmbKcSHMjlUXQZKQ06jOswE7oVkXPxmZdoB1r5l0q6gTFTQSHxNxOrCccElbW7dxNytifNEo8qidX2Vsig==",
"dev": true,
"license": "MIT"
},
"node_modules/proc-log": {
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/proc-log/-/proc-log-6.1.0.tgz",
"integrity": "sha512-iG+GYldRf2BQ0UDUAd6JQ/RwzaQy6mXmsk/IzlYyal4A4SNFw54MeH4/tLkF4I5WoWG9SQwuqWzS99jaFQHBuQ==",
"dev": true,
"license": "ISC",
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/promise-retry": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/promise-retry/-/promise-retry-2.0.1.tgz",
"integrity": "sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==",
"dev": true,
"license": "MIT",
"dependencies": {
"err-code": "^2.0.2",
"retry": "^0.12.0"
},
"engines": {
"node": ">=10"
}
},
"node_modules/proxy-addr": {
"version": "2.0.7",
"resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz",
"integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==",
"dev": true,
"license": "MIT",
"dependencies": {
"forwarded": "0.2.0",
"ipaddr.js": "1.9.1"
},
"engines": {
"node": ">= 0.10"
}
},
"node_modules/qs": {
"version": "6.14.1",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.14.1.tgz",
"integrity": "sha512-4EK3+xJl8Ts67nLYNwqw/dsFVnCf+qR7RgXSK9jEEm9unao3njwMDdmsdvoKBKHzxd7tCYz5e5M+SnMjdtXGQQ==",
"dev": true,
"license": "BSD-3-Clause",
"dependencies": {
"side-channel": "^1.1.0"
},
"engines": {
"node": ">=0.6"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/range-parser": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz",
"integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/raw-body": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/raw-body/-/raw-body-3.0.2.tgz",
"integrity": "sha512-K5zQjDllxWkf7Z5xJdV0/B0WTNqx6vxG70zJE4N0kBs4LovmEYWJzQGxC9bS9RAKu3bgM40lrd5zoLJ12MQ5BA==",
"dev": true,
"license": "MIT",
"dependencies": {
"bytes": "~3.1.2",
"http-errors": "~2.0.1",
"iconv-lite": "~0.7.0",
"unpipe": "~1.0.0"
},
"engines": {
"node": ">= 0.10"
}
},
"node_modules/readdirp": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/readdirp/-/readdirp-5.0.0.tgz",
"integrity": "sha512-9u/XQ1pvrQtYyMpZe7DXKv2p5CNvyVwzUB6uhLAnQwHMSgKMBR62lc7AHljaeteeHXn11XTAaLLUVZYVZyuRBQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 20.19.0"
},
"funding": {
"type": "individual",
"url": "https://paulmillr.com/funding/"
}
},
"node_modules/reflect-metadata": {
"version": "0.2.2",
"resolved": "https://registry.npmjs.org/reflect-metadata/-/reflect-metadata-0.2.2.tgz",
"integrity": "sha512-urBwgfrvVP/eAyXx4hluJivBKzuEbSQs9rKWCrCkbSxNv8mxPcUZKeuoF3Uy4mJl3Lwprp6yy5/39VWigZ4K6Q==",
"dev": true,
"license": "Apache-2.0"
},
"node_modules/require-from-string": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz",
"integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/resolve": {
"version": "1.22.11",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.11.tgz",
"integrity": "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"is-core-module": "^2.16.1",
"path-parse": "^1.0.7",
"supports-preserve-symlinks-flag": "^1.0.0"
},
"bin": {
"resolve": "bin/resolve"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/restore-cursor": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-5.1.0.tgz",
"integrity": "sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA==",
"dev": true,
"license": "MIT",
"dependencies": {
"onetime": "^7.0.0",
"signal-exit": "^4.1.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/retry": {
"version": "0.12.0",
"resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz",
"integrity": "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 4"
}
},
"node_modules/rfdc": {
"version": "1.4.1",
"resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.4.1.tgz",
"integrity": "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==",
"dev": true,
"license": "MIT"
},
"node_modules/rolldown": {
"version": "1.0.0-beta.58",
"resolved": "https://registry.npmjs.org/rolldown/-/rolldown-1.0.0-beta.58.tgz",
"integrity": "sha512-v1FCjMZCan7f+xGAHBi+mqiE4MlH7I+SXEHSQSJoMOGNNB2UYtvMiejsq9YuUOiZjNeUeV/a21nSFbrUR+4ZCQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@oxc-project/types": "=0.106.0",
"@rolldown/pluginutils": "1.0.0-beta.58"
},
"bin": {
"rolldown": "bin/cli.mjs"
},
"engines": {
"node": "^20.19.0 || >=22.12.0"
},
"optionalDependencies": {
"@rolldown/binding-android-arm64": "1.0.0-beta.58",
"@rolldown/binding-darwin-arm64": "1.0.0-beta.58",
"@rolldown/binding-darwin-x64": "1.0.0-beta.58",
"@rolldown/binding-freebsd-x64": "1.0.0-beta.58",
"@rolldown/binding-linux-arm-gnueabihf": "1.0.0-beta.58",
"@rolldown/binding-linux-arm64-gnu": "1.0.0-beta.58",
"@rolldown/binding-linux-arm64-musl": "1.0.0-beta.58",
"@rolldown/binding-linux-x64-gnu": "1.0.0-beta.58",
"@rolldown/binding-linux-x64-musl": "1.0.0-beta.58",
"@rolldown/binding-openharmony-arm64": "1.0.0-beta.58",
"@rolldown/binding-wasm32-wasi": "1.0.0-beta.58",
"@rolldown/binding-win32-arm64-msvc": "1.0.0-beta.58",
"@rolldown/binding-win32-x64-msvc": "1.0.0-beta.58"
}
},
"node_modules/rollup": {
"version": "4.55.2",
"resolved": "https://registry.npmjs.org/rollup/-/rollup-4.55.2.tgz",
"integrity": "sha512-PggGy4dhwx5qaW+CKBilA/98Ql9keyfnb7lh4SR6shQ91QQQi1ORJ1v4UinkdP2i87OBs9AQFooQylcrrRfIcg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/estree": "1.0.8"
},
"bin": {
"rollup": "dist/bin/rollup"
},
"engines": {
"node": ">=18.0.0",
"npm": ">=8.0.0"
},
"optionalDependencies": {
"@rollup/rollup-android-arm-eabi": "4.55.2",
"@rollup/rollup-android-arm64": "4.55.2",
"@rollup/rollup-darwin-arm64": "4.55.2",
"@rollup/rollup-darwin-x64": "4.55.2",
"@rollup/rollup-freebsd-arm64": "4.55.2",
"@rollup/rollup-freebsd-x64": "4.55.2",
"@rollup/rollup-linux-arm-gnueabihf": "4.55.2",
"@rollup/rollup-linux-arm-musleabihf": "4.55.2",
"@rollup/rollup-linux-arm64-gnu": "4.55.2",
"@rollup/rollup-linux-arm64-musl": "4.55.2",
"@rollup/rollup-linux-loong64-gnu": "4.55.2",
"@rollup/rollup-linux-loong64-musl": "4.55.2",
"@rollup/rollup-linux-ppc64-gnu": "4.55.2",
"@rollup/rollup-linux-ppc64-musl": "4.55.2",
"@rollup/rollup-linux-riscv64-gnu": "4.55.2",
"@rollup/rollup-linux-riscv64-musl": "4.55.2",
"@rollup/rollup-linux-s390x-gnu": "4.55.2",
"@rollup/rollup-linux-x64-gnu": "4.55.2",
"@rollup/rollup-linux-x64-musl": "4.55.2",
"@rollup/rollup-openbsd-x64": "4.55.2",
"@rollup/rollup-openharmony-arm64": "4.55.2",
"@rollup/rollup-win32-arm64-msvc": "4.55.2",
"@rollup/rollup-win32-ia32-msvc": "4.55.2",
"@rollup/rollup-win32-x64-gnu": "4.55.2",
"@rollup/rollup-win32-x64-msvc": "4.55.2",
"fsevents": "~2.3.2"
}
},
"node_modules/router": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/router/-/router-2.2.0.tgz",
"integrity": "sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"debug": "^4.4.0",
"depd": "^2.0.0",
"is-promise": "^4.0.0",
"parseurl": "^1.3.3",
"path-to-regexp": "^8.0.0"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/rxjs": {
"version": "7.8.2",
"resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.2.tgz",
"integrity": "sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA==",
"license": "Apache-2.0",
"peer": true,
"dependencies": {
"tslib": "^2.1.0"
}
},
"node_modules/safer-buffer": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==",
"dev": true,
"license": "MIT"
},
"node_modules/sass": {
"version": "1.97.1",
"resolved": "https://registry.npmjs.org/sass/-/sass-1.97.1.tgz",
"integrity": "sha512-uf6HoO8fy6ClsrShvMgaKUn14f2EHQLQRtpsZZLeU/Mv0Q1K5P0+x2uvH6Cub39TVVbWNSrraUhDAoFph6vh0A==",
"dev": true,
"license": "MIT",
"dependencies": {
"chokidar": "^4.0.0",
"immutable": "^5.0.2",
"source-map-js": ">=0.6.2 <2.0.0"
},
"bin": {
"sass": "sass.js"
},
"engines": {
"node": ">=14.0.0"
},
"optionalDependencies": {
"@parcel/watcher": "^2.4.1"
}
},
"node_modules/sass/node_modules/chokidar": {
"version": "4.0.3",
"resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz",
"integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==",
"dev": true,
"license": "MIT",
"dependencies": {
"readdirp": "^4.0.1"
},
"engines": {
"node": ">= 14.16.0"
},
"funding": {
"url": "https://paulmillr.com/funding/"
}
},
"node_modules/sass/node_modules/readdirp": {
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz",
"integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 14.18.0"
},
"funding": {
"type": "individual",
"url": "https://paulmillr.com/funding/"
}
},
"node_modules/semver": {
"version": "7.7.3",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz",
"integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==",
"dev": true,
"license": "ISC",
"bin": {
"semver": "bin/semver.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/send": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/send/-/send-1.2.1.tgz",
"integrity": "sha512-1gnZf7DFcoIcajTjTwjwuDjzuz4PPcY2StKPlsGAQ1+YH20IRVrBaXSWmdjowTJ6u8Rc01PoYOGHXfP1mYcZNQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"debug": "^4.4.3",
"encodeurl": "^2.0.0",
"escape-html": "^1.0.3",
"etag": "^1.8.1",
"fresh": "^2.0.0",
"http-errors": "^2.0.1",
"mime-types": "^3.0.2",
"ms": "^2.1.3",
"on-finished": "^2.4.1",
"range-parser": "^1.2.1",
"statuses": "^2.0.2"
},
"engines": {
"node": ">= 18"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/express"
}
},
"node_modules/serve-static": {
"version": "2.2.1",
"resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.2.1.tgz",
"integrity": "sha512-xRXBn0pPqQTVQiC8wyQrKs2MOlX24zQ0POGaj0kultvoOCstBQM5yvOhAVSUwOMjQtTvsPWoNCHfPGwaaQJhTw==",
"dev": true,
"license": "MIT",
"dependencies": {
"encodeurl": "^2.0.0",
"escape-html": "^1.0.3",
"parseurl": "^1.3.3",
"send": "^1.2.0"
},
"engines": {
"node": ">= 18"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/express"
}
},
"node_modules/setprototypeof": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz",
"integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==",
"dev": true,
"license": "ISC"
},
"node_modules/shebang-command": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
"integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
"dev": true,
"license": "MIT",
"dependencies": {
"shebang-regex": "^3.0.0"
},
"engines": {
"node": ">=8"
}
},
"node_modules/shebang-regex": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz",
"integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
}
},
"node_modules/side-channel": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz",
"integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==",
"dev": true,
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0",
"object-inspect": "^1.13.3",
"side-channel-list": "^1.0.0",
"side-channel-map": "^1.0.1",
"side-channel-weakmap": "^1.0.2"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/side-channel-list": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz",
"integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==",
"dev": true,
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0",
"object-inspect": "^1.13.3"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/side-channel-map": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz",
"integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==",
"dev": true,
"license": "MIT",
"dependencies": {
"call-bound": "^1.0.2",
"es-errors": "^1.3.0",
"get-intrinsic": "^1.2.5",
"object-inspect": "^1.13.3"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/side-channel-weakmap": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz",
"integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==",
"dev": true,
"license": "MIT",
"dependencies": {
"call-bound": "^1.0.2",
"es-errors": "^1.3.0",
"get-intrinsic": "^1.2.5",
"object-inspect": "^1.13.3",
"side-channel-map": "^1.0.1"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/signal-exit": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz",
"integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==",
"dev": true,
"license": "ISC",
"engines": {
"node": ">=14"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/sigstore": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/sigstore/-/sigstore-4.1.0.tgz",
"integrity": "sha512-/fUgUhYghuLzVT/gaJoeVehLCgZiUxPCPMcyVNY0lIf/cTCz58K/WTI7PefDarXxp9nUKpEwg1yyz3eSBMTtgA==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
"@sigstore/bundle": "^4.0.0",
"@sigstore/core": "^3.1.0",
"@sigstore/protobuf-specs": "^0.5.0",
"@sigstore/sign": "^4.1.0",
"@sigstore/tuf": "^4.0.1",
"@sigstore/verify": "^3.1.0"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/slice-ansi": {
"version": "7.1.2",
"resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-7.1.2.tgz",
"integrity": "sha512-iOBWFgUX7caIZiuutICxVgX1SdxwAVFFKwt1EvMYYec/NWO5meOJ6K5uQxhrYBdQJne4KxiqZc+KptFOWFSI9w==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-styles": "^6.2.1",
"is-fullwidth-code-point": "^5.0.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/chalk/slice-ansi?sponsor=1"
}
},
"node_modules/smart-buffer": {
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz",
"integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 6.0.0",
"npm": ">= 3.0.0"
}
},
"node_modules/socks": {
"version": "2.8.7",
"resolved": "https://registry.npmjs.org/socks/-/socks-2.8.7.tgz",
"integrity": "sha512-HLpt+uLy/pxB+bum/9DzAgiKS8CX1EvbWxI4zlmgGCExImLdiad2iCwXT5Z4c9c3Eq8rP2318mPW2c+QbtjK8A==",
"dev": true,
"license": "MIT",
"dependencies": {
"ip-address": "^10.0.1",
"smart-buffer": "^4.2.0"
},
"engines": {
"node": ">= 10.0.0",
"npm": ">= 3.0.0"
}
},
"node_modules/socks-proxy-agent": {
"version": "8.0.5",
"resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-8.0.5.tgz",
"integrity": "sha512-HehCEsotFqbPW9sJ8WVYB6UbmIMv7kUUORIF2Nncq4VQvBfNBLibW9YZR5dlYCSUhwcD628pRllm7n+E+YTzJw==",
"dev": true,
"license": "MIT",
"dependencies": {
"agent-base": "^7.1.2",
"debug": "^4.3.4",
"socks": "^2.8.3"
},
"engines": {
"node": ">= 14"
}
},
"node_modules/source-map": {
"version": "0.7.6",
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.6.tgz",
"integrity": "sha512-i5uvt8C3ikiWeNZSVZNWcfZPItFQOsYTUAOkcUPGd8DqDy1uOUikjt5dG+uRlwyvR108Fb9DOd4GvXfT0N2/uQ==",
"dev": true,
"license": "BSD-3-Clause",
"engines": {
"node": ">= 12"
}
},
"node_modules/source-map-js": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz",
"integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==",
"dev": true,
"license": "BSD-3-Clause",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/source-map-support": {
"version": "0.5.21",
"resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz",
"integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==",
"dev": true,
"license": "MIT",
"dependencies": {
"buffer-from": "^1.0.0",
"source-map": "^0.6.0"
}
},
"node_modules/source-map-support/node_modules/source-map": {
"version": "0.6.1",
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
"integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==",
"dev": true,
"license": "BSD-3-Clause",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/spdx-correct": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.2.0.tgz",
"integrity": "sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
"spdx-expression-parse": "^3.0.0",
"spdx-license-ids": "^3.0.0"
}
},
"node_modules/spdx-exceptions": {
"version": "2.5.0",
"resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz",
"integrity": "sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==",
"dev": true,
"license": "CC-BY-3.0"
},
"node_modules/spdx-expression-parse": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz",
"integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==",
"dev": true,
"license": "MIT",
"dependencies": {
"spdx-exceptions": "^2.1.0",
"spdx-license-ids": "^3.0.0"
}
},
"node_modules/spdx-license-ids": {
"version": "3.0.22",
"resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.22.tgz",
"integrity": "sha512-4PRT4nh1EImPbt2jASOKHX7PB7I+e4IWNLvkKFDxNhJlfjbYlleYQh285Z/3mPTHSAK/AvdMmw5BNNuYH8ShgQ==",
"dev": true,
"license": "CC0-1.0"
},
"node_modules/ssri": {
"version": "13.0.0",
"resolved": "https://registry.npmjs.org/ssri/-/ssri-13.0.0.tgz",
"integrity": "sha512-yizwGBpbCn4YomB2lzhZqrHLJoqFGXihNbib3ozhqF/cIp5ue+xSmOQrjNasEE62hFxsCcg/V/z23t4n8jMEng==",
"dev": true,
"license": "ISC",
"dependencies": {
"minipass": "^7.0.3"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/statuses": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz",
"integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.8"
}
},
"node_modules/stdin-discarder": {
"version": "0.2.2",
"resolved": "https://registry.npmjs.org/stdin-discarder/-/stdin-discarder-0.2.2.tgz",
"integrity": "sha512-UhDfHmA92YAlNnCfhmq0VeNL5bDbiZGg7sZ2IvPsXubGkiNa9EC+tUTsjBRsYUAz87btI6/1wf4XoVvQ3uRnmQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/string-width": {
"version": "8.1.0",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-8.1.0.tgz",
"integrity": "sha512-Kxl3KJGb/gxkaUMOjRsQ8IrXiGW75O4E3RPjFIINOVH8AMl2SQ/yWdTzWwF3FevIX9LcMAjJW+GRwAlAbTSXdg==",
"dev": true,
"license": "MIT",
"dependencies": {
"get-east-asian-width": "^1.3.0",
"strip-ansi": "^7.1.0"
},
"engines": {
"node": ">=20"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/strip-ansi": {
"version": "7.1.2",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz",
"integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-regex": "^6.0.1"
},
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/strip-ansi?sponsor=1"
}
},
"node_modules/supports-preserve-symlinks-flag": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz",
"integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/tar": {
"version": "7.5.4",
"resolved": "https://registry.npmjs.org/tar/-/tar-7.5.4.tgz",
"integrity": "sha512-AN04xbWGrSTDmVwlI4/GTlIIwMFk/XEv7uL8aa57zuvRy6s4hdBed+lVq2fAZ89XDa7Us3ANXcE3Tvqvja1kTA==",
"dev": true,
"license": "BlueOak-1.0.0",
"dependencies": {
"@isaacs/fs-minipass": "^4.0.0",
"chownr": "^3.0.0",
"minipass": "^7.1.2",
"minizlib": "^3.1.0",
"yallist": "^5.0.0"
},
"engines": {
"node": ">=18"
}
},
"node_modules/tar/node_modules/yallist": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz",
"integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==",
"dev": true,
"license": "BlueOak-1.0.0",
"engines": {
"node": ">=18"
}
},
"node_modules/tinyglobby": {
"version": "0.2.15",
"resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz",
"integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"fdir": "^6.5.0",
"picomatch": "^4.0.3"
},
"engines": {
"node": ">=12.0.0"
},
"funding": {
"url": "https://github.com/sponsors/SuperchupuDev"
}
},
"node_modules/toidentifier": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz",
"integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.6"
}
},
"node_modules/tslib": {
"version": "2.8.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==",
"license": "0BSD",
"peer": true
},
"node_modules/tuf-js": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-4.1.0.tgz",
"integrity": "sha512-50QV99kCKH5P/Vs4E2Gzp7BopNV+KzTXqWeaxrfu5IQJBOULRsTIS9seSsOVT8ZnGXzCyx55nYWAi4qJzpZKEQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@tufjs/models": "4.1.0",
"debug": "^4.4.3",
"make-fetch-happen": "^15.0.1"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/type-is": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.1.tgz",
"integrity": "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==",
"dev": true,
"license": "MIT",
"dependencies": {
"content-type": "^1.0.5",
"media-typer": "^1.1.0",
"mime-types": "^3.0.0"
},
"engines": {
"node": ">= 0.6"
}
},
"node_modules/typescript": {
"version": "5.9.3",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz",
"integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==",
"dev": true,
"license": "Apache-2.0",
"peer": true,
"bin": {
"tsc": "bin/tsc",
"tsserver": "bin/tsserver"
},
"engines": {
"node": ">=14.17"
}
},
"node_modules/undici": {
"version": "7.18.0",
"resolved": "https://registry.npmjs.org/undici/-/undici-7.18.0.tgz",
"integrity": "sha512-CfPufgPFHCYu0W4h1NiKW9+tNJ39o3kWm7Cm29ET1enSJx+AERfz7A2wAr26aY0SZbYzZlTBQtcHy15o60VZfQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=20.18.1"
}
},
"node_modules/unique-filename": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-5.0.0.tgz",
"integrity": "sha512-2RaJTAvAb4owyjllTfXzFClJ7WsGxlykkPvCr9pA//LD9goVq+m4PPAeBgNodGZ7nSrntT/auWpJ6Y5IFXcfjg==",
"dev": true,
"license": "ISC",
"dependencies": {
"unique-slug": "^6.0.0"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/unique-slug": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-6.0.0.tgz",
"integrity": "sha512-4Lup7Ezn8W3d52/xBhZBVdx323ckxa7DEvd9kPQHppTkLoJXw6ltrBCyj5pnrxj0qKDxYMJ56CoxNuFCscdTiw==",
"dev": true,
"license": "ISC",
"dependencies": {
"imurmurhash": "^0.1.4"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/unpipe": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz",
"integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.8"
}
},
"node_modules/update-browserslist-db": {
"version": "1.2.3",
"resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.2.3.tgz",
"integrity": "sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==",
"dev": true,
"funding": [
{
"type": "opencollective",
"url": "https://opencollective.com/browserslist"
},
{
"type": "tidelift",
"url": "https://tidelift.com/funding/github/npm/browserslist"
},
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
],
"license": "MIT",
"dependencies": {
"escalade": "^3.2.0",
"picocolors": "^1.1.1"
},
"bin": {
"update-browserslist-db": "cli.js"
},
"peerDependencies": {
"browserslist": ">= 4.21.0"
}
},
"node_modules/validate-npm-package-license": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz",
"integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
"spdx-correct": "^3.0.0",
"spdx-expression-parse": "^3.0.0"
}
},
"node_modules/validate-npm-package-name": {
"version": "7.0.2",
"resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-7.0.2.tgz",
"integrity": "sha512-hVDIBwsRruT73PbK7uP5ebUt+ezEtCmzZz3F59BSr2F6OVFnJ/6h8liuvdLrQ88Xmnk6/+xGGuq+pG9WwTuy3A==",
"dev": true,
"license": "ISC",
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/vary": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz",
"integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.8"
}
},
"node_modules/vite": {
"version": "7.3.0",
"resolved": "https://registry.npmjs.org/vite/-/vite-7.3.0.tgz",
"integrity": "sha512-dZwN5L1VlUBewiP6H9s2+B3e3Jg96D0vzN+Ry73sOefebhYr9f94wwkMNN/9ouoU8pV1BqA1d1zGk8928cx0rg==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"esbuild": "^0.27.0",
"fdir": "^6.5.0",
"picomatch": "^4.0.3",
"postcss": "^8.5.6",
"rollup": "^4.43.0",
"tinyglobby": "^0.2.15"
},
"bin": {
"vite": "bin/vite.js"
},
"engines": {
"node": "^20.19.0 || >=22.12.0"
},
"funding": {
"url": "https://github.com/vitejs/vite?sponsor=1"
},
"optionalDependencies": {
"fsevents": "~2.3.3"
},
"peerDependencies": {
"@types/node": "^20.19.0 || >=22.12.0",
"jiti": ">=1.21.0",
"less": "^4.0.0",
"lightningcss": "^1.21.0",
"sass": "^1.70.0",
"sass-embedded": "^1.70.0",
"stylus": ">=0.54.8",
"sugarss": "^5.0.0",
"terser": "^5.16.0",
"tsx": "^4.8.1",
"yaml": "^2.4.2"
},
"peerDependenciesMeta": {
"@types/node": {
"optional": true
},
"jiti": {
"optional": true
},
"less": {
"optional": true
},
"lightningcss": {
"optional": true
},
"sass": {
"optional": true
},
"sass-embedded": {
"optional": true
},
"stylus": {
"optional": true
},
"sugarss": {
"optional": true
},
"terser": {
"optional": true
},
"tsx": {
"optional": true
},
"yaml": {
"optional": true
}
}
},
"node_modules/watchpack": {
"version": "2.5.0",
"resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.5.0.tgz",
"integrity": "sha512-e6vZvY6xboSwLz2GD36c16+O/2Z6fKvIf4pOXptw2rY9MVwE/TXc6RGqxD3I3x0a28lwBY7DE+76uTPSsBrrCA==",
"dev": true,
"license": "MIT",
"dependencies": {
"glob-to-regexp": "^0.4.1",
"graceful-fs": "^4.1.2"
},
"engines": {
"node": ">=10.13.0"
}
},
"node_modules/weak-lru-cache": {
"version": "1.2.2",
"resolved": "https://registry.npmjs.org/weak-lru-cache/-/weak-lru-cache-1.2.2.tgz",
"integrity": "sha512-DEAoo25RfSYMuTGc9vPJzZcZullwIqRDSI9LOy+fkCJPi6hykCnfKaXTuPBDuXAUcqHXyOgFtHNp/kB2FjYHbw==",
"dev": true,
"license": "MIT",
"optional": true
},
"node_modules/which": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
"integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
"dev": true,
"license": "ISC",
"dependencies": {
"isexe": "^2.0.0"
},
"bin": {
"node-which": "bin/node-which"
},
"engines": {
"node": ">= 8"
}
},
"node_modules/wrap-ansi": {
"version": "6.2.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz",
"integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-styles": "^4.0.0",
"string-width": "^4.1.0",
"strip-ansi": "^6.0.0"
},
"engines": {
"node": ">=8"
}
},
"node_modules/wrap-ansi/node_modules/ansi-regex": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
}
},
"node_modules/wrap-ansi/node_modules/ansi-styles": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
"dev": true,
"license": "MIT",
"dependencies": {
"color-convert": "^2.0.1"
},
"engines": {
"node": ">=8"
},
"funding": {
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
}
},
"node_modules/wrap-ansi/node_modules/emoji-regex": {
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
"dev": true,
"license": "MIT"
},
"node_modules/wrap-ansi/node_modules/is-fullwidth-code-point": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
"integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
}
},
"node_modules/wrap-ansi/node_modules/string-width": {
"version": "4.2.3",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
"dev": true,
"license": "MIT",
"dependencies": {
"emoji-regex": "^8.0.0",
"is-fullwidth-code-point": "^3.0.0",
"strip-ansi": "^6.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/wrap-ansi/node_modules/strip-ansi": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-regex": "^5.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/wrappy": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
"integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==",
"dev": true,
"license": "ISC"
},
"node_modules/y18n": {
"version": "5.0.8",
"resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz",
"integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==",
"dev": true,
"license": "ISC",
"engines": {
"node": ">=10"
}
},
"node_modules/yallist": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz",
"integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==",
"dev": true,
"license": "ISC"
},
"node_modules/yargs": {
"version": "18.0.0",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-18.0.0.tgz",
"integrity": "sha512-4UEqdc2RYGHZc7Doyqkrqiln3p9X2DZVxaGbwhn2pi7MrRagKaOcIKe8L3OxYcbhXLgLFUS3zAYuQjKBQgmuNg==",
"dev": true,
"license": "MIT",
"dependencies": {
"cliui": "^9.0.1",
"escalade": "^3.1.1",
"get-caller-file": "^2.0.5",
"string-width": "^7.2.0",
"y18n": "^5.0.5",
"yargs-parser": "^22.0.0"
},
"engines": {
"node": "^20.19.0 || ^22.12.0 || >=23"
}
},
"node_modules/yargs-parser": {
"version": "22.0.0",
"resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-22.0.0.tgz",
"integrity": "sha512-rwu/ClNdSMpkSrUb+d6BRsSkLUq1fmfsY6TOpYzTwvwkg1/NRG85KBy3kq++A8LKQwX6lsu+aWad+2khvuXrqw==",
"dev": true,
"license": "ISC",
"engines": {
"node": "^20.19.0 || ^22.12.0 || >=23"
}
},
"node_modules/yargs/node_modules/string-width": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz",
"integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"emoji-regex": "^10.3.0",
"get-east-asian-width": "^1.0.0",
"strip-ansi": "^7.1.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/yoctocolors": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/yoctocolors/-/yoctocolors-2.1.2.tgz",
"integrity": "sha512-CzhO+pFNo8ajLM2d2IW/R93ipy99LWjtwblvC1RsoSUMZgyLbYFr221TnSNT7GjGdYui6P459mw9JH/g/zW2ug==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/yoctocolors-cjs": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/yoctocolors-cjs/-/yoctocolors-cjs-2.1.3.tgz",
"integrity": "sha512-U/PBtDf35ff0D8X8D0jfdzHYEPFxAI7jJlxZXwCSez5M3190m+QobIfh+sWDWSHMCWWJN2AWamkegn6vr6YBTw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/zod": {
"version": "4.3.5",
"resolved": "https://registry.npmjs.org/zod/-/zod-4.3.5.tgz",
"integrity": "sha512-k7Nwx6vuWx1IJ9Bjuf4Zt1PEllcwe7cls3VNzm4CQ1/hgtFUK2bRNG3rvnpPUhFjmqJKAKtjV576KnUkHocg/g==",
"dev": true,
"license": "MIT",
"peer": true,
"funding": {
"url": "https://github.com/sponsors/colinhacks"
}
},
"node_modules/zod-to-json-schema": {
"version": "3.25.1",
"resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.25.1.tgz",
"integrity": "sha512-pM/SU9d3YAggzi6MtR4h7ruuQlqKtad8e9S0fmxcMi+ueAK5Korys/aWcV9LIIHTVbj01NdzxcnXSN+O74ZIVA==",
"dev": true,
"license": "ISC",
"peerDependencies": {
"zod": "^3.25 || ^4"
}
},
"node_modules/zone.js": {
"version": "0.16.0",
"resolved": "https://registry.npmjs.org/zone.js/-/zone.js-0.16.0.tgz",
"integrity": "sha512-LqLPpIQANebrlxY6jKcYKdgN5DTXyyHAKnnWWjE5pPfEQ4n7j5zn7mOEEpwNZVKGqx3kKKmvplEmoBrvpgROTA==",
"license": "MIT",
"peer": true
}
}
} | json | github | https://github.com/angular/angular | adev/src/content/tutorials/homepage/package-lock.json |
---
navigation_title: "SAMPLE"
mapped_pages:
- https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-commands.html#esql-sample
---
# {{esql}} `SAMPLE` command [esql-sample]
:::{include} ../_snippets/commands/layout/sample.md
::: | unknown | github | https://github.com/elastic/elasticsearch | docs/reference/query-languages/esql/commands/sample.md |
/*
* Copyright 2012-present the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.test.json;
import java.io.IOException;
import java.io.Reader;
import com.google.gson.Gson;
import org.springframework.beans.factory.ObjectFactory;
import org.springframework.core.ResolvableType;
import org.springframework.util.Assert;
/**
* AssertJ based JSON tester backed by Gson. Usually instantiated via
* {@link #initFields(Object, Gson)}, for example: <pre class="code">
* public class ExampleObjectJsonTests {
*
* private GsonTester<ExampleObject> json;
*
* @Before
* public void setup() {
* Gson gson = new GsonBuilder().create();
* GsonTester.initFields(this, gson);
* }
*
* @Test
* public void testWriteJson() throws IOException {
* ExampleObject object = //...
* assertThat(json.write(object)).isEqualToJson("expected.json");
* }
*
* }
* </pre>
*
* See {@link AbstractJsonMarshalTester} for more details.
*
* @param <T> the type under test
* @author Phillip Webb
* @since 1.4.0
*/
public class GsonTester<T> extends AbstractJsonMarshalTester<T> {
private final Gson gson;
/**
* Create a new uninitialized {@link GsonTester} instance.
* @param gson the Gson instance
*/
protected GsonTester(Gson gson) {
Assert.notNull(gson, "'gson' must not be null");
this.gson = gson;
}
/**
* Create a new {@link GsonTester} instance.
* @param resourceLoadClass the source class used to load resources
* @param type the type under test
* @param gson the Gson instance
* @see #initFields(Object, Gson)
*/
public GsonTester(Class<?> resourceLoadClass, ResolvableType type, Gson gson) {
super(resourceLoadClass, type);
Assert.notNull(gson, "'gson' must not be null");
this.gson = gson;
}
@Override
protected String writeObject(T value, ResolvableType type) throws IOException {
return this.gson.toJson(value, type.getType());
}
@Override
protected T readObject(Reader reader, ResolvableType type) throws IOException {
return this.gson.fromJson(reader, type.getType());
}
/**
* Utility method to initialize {@link GsonTester} fields. See {@link GsonTester
* class-level documentation} for example usage.
* @param testInstance the test instance
* @param gson the Gson instance
*/
public static void initFields(Object testInstance, Gson gson) {
new GsonFieldInitializer().initFields(testInstance, gson);
}
/**
* Utility method to initialize {@link GsonTester} fields. See {@link GsonTester
* class-level documentation} for example usage.
* @param testInstance the test instance
* @param gson an object factory to create the Gson instance
*/
public static void initFields(Object testInstance, ObjectFactory<Gson> gson) {
new GsonFieldInitializer().initFields(testInstance, gson);
}
/**
* {@link FieldInitializer} for Gson.
*/
private static class GsonFieldInitializer extends FieldInitializer<Gson> {
protected GsonFieldInitializer() {
super(GsonTester.class);
}
@Override
protected AbstractJsonMarshalTester<Object> createTester(Class<?> resourceLoadClass, ResolvableType type,
Gson marshaller) {
return new GsonTester<>(resourceLoadClass, type, marshaller);
}
}
} | java | github | https://github.com/spring-projects/spring-boot | core/spring-boot-test/src/main/java/org/springframework/boot/test/json/GsonTester.java |
#
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat.mock import patch
from ansible.modules.network.onyx import onyx_traffic_class
from units.modules.utils import set_module_args
from .onyx_module import TestOnyxModule, load_fixture
class TestOnyxTrafficClassModule(TestOnyxModule):
module = onyx_traffic_class
arp_suppression = True
def setUp(self):
super(TestOnyxTrafficClassModule, self).setUp()
self.mock_get_congestion_control_config = patch.object(
onyx_traffic_class.OnyxTrafficClassModule, "_show_interface_congestion_control")
self.get_congestion_control_config = self.mock_get_congestion_control_config.start()
self.mock_load_config = patch(
'ansible.module_utils.network.onyx.onyx.load_config')
self.load_config = self.mock_load_config.start()
self.mock_get_dcb_config = patch.object(
onyx_traffic_class.OnyxTrafficClassModule, "_show_interface_dcb_ets")
self.get_dcb_config = self.mock_get_dcb_config.start()
def tearDown(self):
super(TestOnyxTrafficClassModule, self).tearDown()
self.mock_get_congestion_control_config.stop()
self.mock_load_config.stop()
self.mock_get_dcb_config.stop()
def load_fixtures(self, commands=None, transport='cli'):
interfaces_congestion_control_config_file = 'onyx_show_interface_congestion_control.cfg'
interfaces_dcb_config_file = 'onyx_show_dcb_ets_interface.cfg'
interfaces_congestion_control_data = load_fixture(interfaces_congestion_control_config_file)
interfaces_dcb_config_data = load_fixture(interfaces_dcb_config_file)
self.get_congestion_control_config.return_value = interfaces_congestion_control_data
self.get_dcb_config.return_value = interfaces_dcb_config_data
self.load_config.return_value = None
def test_configure_congestion_control_disabled_with_change(self):
set_module_args(dict(interfaces=["Eth1/1"], tc=1,
congestion_control=dict(control="ecn", threshold_mode="absolute",
min_threshold=500, max_threshold=1500)))
commands = [
"interface ethernet 1/1 traffic-class 1 congestion-control ecn minimum-absolute 500 maximum-absolute 1500"
]
self.execute_module(changed=True, commands=commands)
def test_configure_congestion_control_disabled_with_no_change(self):
set_module_args(dict(state="disabled", interfaces=["Eth1/1"], tc=0))
self.execute_module(changed=False)
def test_configure_congestion_control_with_change(self):
set_module_args(dict(interfaces=["Eth1/1"], tc=2,
congestion_control=dict(control="ecn", threshold_mode="relative",
min_threshold=9, max_threshold=88)))
commands = [
"interface ethernet 1/1 traffic-class 2 congestion-control ecn minimum-relative 9 maximum-relative 88"
]
self.execute_module(changed=True, commands=commands)
def test_configure_congestion_control_absolute_with_change(self):
set_module_args(dict(interfaces=["Eth1/1"], tc=3,
congestion_control=dict(control="ecn", threshold_mode="absolute",
min_threshold=500, max_threshold=1500)))
commands = [
"interface ethernet 1/1 traffic-class 3 congestion-control ecn minimum-absolute 500 maximum-absolute 1500"
]
self.execute_module(changed=True, commands=commands)
def test_configure_congestion_control_with_no_change(self):
set_module_args(dict(interfaces=["Eth1/1"], tc=3,
congestion_control=dict(control="ecn", threshold_mode="absolute",
min_threshold=500, max_threshold=1550)))
self.execute_module(changed=False)
def test_configure_dcb_mode_with_no_change(self):
set_module_args(dict(interfaces=["Eth1/1"], tc=3, dcb=dict(mode="strict")))
self.execute_module(changed=False)
def test_configure_dcb_strict_mode_with_change(self):
set_module_args(dict(interfaces=["Eth1/1"], tc=1, dcb=dict(mode="strict")))
commands = [
"interface ethernet 1/1 traffic-class 1 dcb ets strict"
]
self.execute_module(changed=True, commands=commands)
def test_configure_dcb_wrr_mode_with_change(self):
set_module_args(dict(interfaces=["Eth1/1"], tc=0, dcb=dict(mode="wrr", weight=10)))
commands = [
"interface ethernet 1/1 traffic-class 0 dcb ets wrr 10"
]
self.execute_module(changed=True, commands=commands)
def test_configure_dcb_wrr_mode_with_no_change(self):
set_module_args(dict(interfaces=["Eth1/1"], tc=0, dcb=dict(mode="wrr", weight=12)))
self.execute_module(changed=False) | unknown | codeparrot/codeparrot-clean | ||
#!venv/bin/python
# Creates a new post entry
def create(config):
from app import create_slug
template = 'page' if config.template == None else config.template
vars = {
'title' : config.title,
'template': template,
'tags' : config.tags,
'date' : datetime.now().strftime('%Y-%m-%d')
}
new_post_template = \
"""{%% extends "%(template)s.html" %%}
{%% set page_title = "%(title)s" %%}
{%% set page_description = "%(title)s" %%}
{%% set post_tags = "%(tags)s" %%}
{%% set post_date = "%(date)s" %%}
{%% set disable_comments = False %%}
{%% block page_content %%}
{{ macros.post_base({'title': page_title, 'date': post_date, 'tags': post_tags}, lang, vars) }}
<p>Mussum ipsum cacilds, vidis litro abertis. Consetis adipiscings elitis.
Pra la , depois divoltis porris, paradis. Paisis, filhis, espiritis santis.
Me faiz elementum girarzis, nisi eros vermeio, in elementis me pra quem e
amistosis quis leo. Manduma pindureta quium dia nois paga. Sapien in monti
palavris qui num significa nadis i pareci latim. Interessantiss quisso pudia
ce receita de bolis, mais bolis eu num gostis.</p>
{%% endblock %%}""" % vars
file_name = config.lang + '/' + create_slug(config.title) + '.html'
try:
with open('templates/pages/' + file_name, 'w') as new_file:
new_file.write(new_post_template)
except Exception, e:
raise e
else:
print "\n>>> New post created: %s\n" % file_name
return file_name
# Updates the index file with a new node representing the new post
def update_index(node):
import json
try:
with open('posts.json', 'rw+') as json_file:
index = json.load(json_file)
# append the new node to the key that corresponds to the language
index['posts'][node['lang']].append(node)
# set the file pointer to the beginning
json_file.seek(0)
json.dump(index, json_file, indent = 4)
except Exception, e:
raise e
else:
return index
# Parse command line arguments
def parse_args():
import argparse
desc = \
"""This is a generator that creates new posts for the blog.
Example of usage:
new_post.py -l en -tt 'Creating a new post!' -tg 'python,flask,fancytag'
"""
parser = argparse.ArgumentParser(description = desc)
parser.add_argument('-l', '--lang', help = 'Post language', required = True)
parser.add_argument('-tt', '--title', help = 'Post title', required = True)
parser.add_argument('-tg', '--tags', help = 'Post tags (separated by commas)', required = True)
parser.add_argument('-tp', '--template', help = 'Template used', required = False)
parser.add_argument('-no', '--noupdate', action='store_true', help = "Don't update index", required = False)
return parser.parse_args()
if __name__ == '__main__':
from datetime import datetime
try:
args = parse_args()
file_name = create(args)
new_node = {
'date' : datetime.now().strftime('%Y-%m-%d'),
'title': args.title,
'lang' : args.lang,
'uri' : '/' + file_name,
'tags' : args.tags
}
if args.noupdate == False:
update_index(new_node)
print ">>> Posts index successfully updated with entry: \n{0}\n".format(new_node)
except Exception, e:
raise e | unknown | codeparrot/codeparrot-clean | ||
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import cmd
import pprint
import sys
from ansible.plugins.strategy.linear import StrategyModule as LinearStrategyModule
from ansible.compat.six.moves import reduce
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class NextAction(object):
""" The next action after an interpreter's exit. """
REDO = 1
CONTINUE = 2
EXIT = 3
def __init__(self, result=EXIT):
self.result = result
class StrategyModule(LinearStrategyModule):
def __init__(self, tqm):
self.curr_tqm = tqm
super(StrategyModule, self).__init__(tqm)
def _queue_task(self, host, task, task_vars, play_context):
self.curr_host = host
self.curr_task = task
self.curr_task_vars = task_vars
self.curr_play_context = play_context
super(StrategyModule, self)._queue_task(host, task, task_vars, play_context)
def _process_pending_results(self, iterator, one_pass=False, max_passes=None):
if not hasattr(self, "curr_host"):
return super(StrategyModule, self)._process_pending_results(iterator, one_pass, max_passes)
prev_host_state = iterator.get_host_state(self.curr_host)
results = super(StrategyModule, self)._process_pending_results(iterator, one_pass)
while self._need_debug(results):
next_action = NextAction()
dbg = Debugger(self, results, next_action)
dbg.cmdloop()
if next_action.result == NextAction.REDO:
# rollback host state
self.curr_tqm.clear_failed_hosts()
iterator._host_states[self.curr_host.name] = prev_host_state
if reduce(lambda total, res : res.is_failed() or total, results, False):
self._tqm._stats.failures[self.curr_host.name] -= 1
elif reduce(lambda total, res : res.is_unreachable() or total, results, False):
self._tqm._stats.dark[self.curr_host.name] -= 1
# redo
super(StrategyModule, self)._queue_task(self.curr_host, self.curr_task, self.curr_task_vars, self.curr_play_context)
results = super(StrategyModule, self)._process_pending_results(iterator, one_pass)
elif next_action.result == NextAction.CONTINUE:
break
elif next_action.result == NextAction.EXIT:
exit(1)
return results
def _need_debug(self, results):
return reduce(lambda total, res : res.is_failed() or res.is_unreachable() or total, results, False)
class Debugger(cmd.Cmd):
prompt = '(debug) ' # debugger
prompt_continuous = '> ' # multiple lines
def __init__(self, strategy_module, results, next_action):
# cmd.Cmd is old-style class
cmd.Cmd.__init__(self)
self.intro = "Debugger invoked"
self.scope = {}
self.scope['task'] = strategy_module.curr_task
self.scope['vars'] = strategy_module.curr_task_vars
self.scope['host'] = strategy_module.curr_host
self.scope['result'] = results[0]._result
self.scope['results'] = results # for debug of this debugger
self.next_action = next_action
def cmdloop(self):
try:
cmd.Cmd.cmdloop(self)
except KeyboardInterrupt:
pass
def do_EOF(self, args):
return self.do_quit(args)
def do_quit(self, args):
display.display('aborted')
self.next_action.result = NextAction.EXIT
return True
do_q = do_quit
def do_continue(self, args):
self.next_action.result = NextAction.CONTINUE
return True
do_c = do_continue
def do_redo(self, args):
self.next_action.result = NextAction.REDO
return True
do_r = do_redo
def evaluate(self, args):
try:
return eval(args, globals(), self.scope)
except:
t, v = sys.exc_info()[:2]
if isinstance(t, str):
exc_type_name = t
else:
exc_type_name = t.__name__
display.display('***%s:%s' % (exc_type_name, repr(v)))
raise
def do_p(self, args):
try:
result = self.evaluate(args)
display.display(pprint.pformat(result))
except:
pass
def execute(self, args):
try:
code = compile(args + '\n', '<stdin>', 'single')
exec(code, globals(), self.scope)
except:
t, v = sys.exc_info()[:2]
if type(t) == type(''):
exc_type_name = t
else:
exc_type_name = t.__name__
display.display('***%s:%s' % (exc_type_name, repr(v)))
raise
def default(self, line):
try:
self.execute(line)
display.display(pprint.pformat(result))
except:
pass | unknown | codeparrot/codeparrot-clean | ||
"""
End-to-end tests for the main LMS Dashboard (aka, Student Dashboard).
"""
from common.test.acceptance.fixtures.course import CourseFixture, XBlockFixtureDesc
from common.test.acceptance.pages.common.auto_auth import AutoAuthPage
from common.test.acceptance.pages.lms.dashboard import DashboardPage
from common.test.acceptance.tests.helpers import UniqueCourseTest, generate_course_key
DEFAULT_SHORT_DATE_FORMAT = '{dt:%b} {dt.day}, {dt.year}'
TEST_DATE_FORMAT = '{dt:%b} {dt.day}, {dt.year} {dt.hour:02}:{dt.minute:02}'
class BaseLmsDashboardTestMultiple(UniqueCourseTest):
""" Base test suite for the LMS Student Dashboard with Multiple Courses"""
def setUp(self):
"""
Initializes the components (page objects, courses, users) for this test suite
"""
# Some parameters are provided by the parent setUp() routine, such as the following:
# self.course_id, self.course_info, self.unique_id
super().setUp()
# Load page objects for use by the tests
self.dashboard_page = DashboardPage(self.browser)
# Configure some aspects of the test course and install the settings into the course
self.courses = {
'A': {
'org': 'test_org',
'number': self.unique_id,
'run': 'test_run_A',
'display_name': 'Test Course A',
'enrollment_mode': 'audit',
'cert_name_long': 'Certificate of Audit Achievement'
},
'B': {
'org': 'test_org',
'number': self.unique_id,
'run': 'test_run_B',
'display_name': 'Test Course B',
'enrollment_mode': 'verified',
'cert_name_long': 'Certificate of Verified Achievement'
},
'C': {
'org': 'test_org',
'number': self.unique_id,
'run': 'test_run_C',
'display_name': 'Test Course C',
'enrollment_mode': 'credit',
'cert_name_long': 'Certificate of Credit Achievement'
}
}
self.username = f"test_{self.unique_id[0:6]}"
self.email = f"{self.username}@example.com"
self.course_keys = {}
self.course_fixtures = {}
for key, value in self.courses.items():
course_key = generate_course_key(
value['org'],
value['number'],
value['run'],
)
course_fixture = CourseFixture(
value['org'],
value['number'],
value['run'],
value['display_name'],
)
course_fixture.add_advanced_settings({
"social_sharing_url": {"value": "http://custom/course/url"},
"cert_name_long": {"value": value['cert_name_long']}
})
course_fixture.add_children(
XBlockFixtureDesc('chapter', 'Test Section 1').add_children(
XBlockFixtureDesc('sequential', 'Test Subsection 1,1').add_children(
XBlockFixtureDesc('problem', 'Test Problem 1', data='<problem>problem 1 dummy body</problem>'),
XBlockFixtureDesc('html', 'html 1', data="<html>html 1 dummy body</html>"),
XBlockFixtureDesc('problem', 'Test Problem 2', data="<problem>problem 2 dummy body</problem>"),
XBlockFixtureDesc('html', 'html 2', data="<html>html 2 dummy body</html>"),
),
XBlockFixtureDesc('sequential', 'Test Subsection 1,2').add_children(
XBlockFixtureDesc('problem', 'Test Problem 3', data='<problem>problem 3 dummy body</problem>'),
),
XBlockFixtureDesc(
'sequential', 'Test HIDDEN Subsection', metadata={'visible_to_staff_only': True}
).add_children(
XBlockFixtureDesc('problem', 'Test HIDDEN Problem', data='<problem>hidden problem</problem>'),
),
)
).install()
self.course_keys[key] = course_key
self.course_fixtures[key] = course_fixture
# Create the test user, register them for the course, and authenticate
AutoAuthPage(
self.browser,
username=self.username,
email=self.email,
course_id=course_key,
enrollment_mode=value['enrollment_mode']
).visit()
# Navigate the authenticated, enrolled user to the dashboard page and get testing!
self.dashboard_page.visit()
class LmsDashboardA11yTest(BaseLmsDashboardTestMultiple):
"""
Class to test lms student dashboard accessibility.
"""
a11y = True
def test_dashboard_course_listings_a11y(self):
"""
Test the accessibility of the course listings
"""
self.dashboard_page.a11y_audit.config.set_rules({
"ignore": [
'aria-valid-attr', # TODO: LEARNER-6611 & LEARNER-6865
'button-name', # TODO: AC-935
'landmark-no-duplicate-banner', # TODO: AC-934
'landmark-complementary-is-top-level', # TODO: AC-939
'region' # TODO: AC-932
]
})
course_listings = self.dashboard_page.get_courses()
assert len(course_listings) == 3
self.dashboard_page.a11y_audit.check_for_accessibility_errors() | unknown | codeparrot/codeparrot-clean | ||
# Owner(s): ["module: unknown"]
# ruff: noqa: F841
import functools
import unittest
import torch
import torch.nn.functional as F
import torch.utils.flop_counter
from torch._subclasses.fake_tensor import FakeTensorMode
from torch.testing._internal.common_cuda import (
PLATFORM_SUPPORTS_CUDNN_ATTENTION,
PLATFORM_SUPPORTS_FLASH_ATTENTION,
PLATFORM_SUPPORTS_FP8,
PLATFORM_SUPPORTS_MEM_EFF_ATTENTION,
)
from torch.testing._internal.common_device_type import e4m3_type
from torch.testing._internal.common_utils import (
run_tests,
TEST_WITH_TORCHDYNAMO,
TestCase,
)
from torch.testing._internal.triton_utils import requires_cuda_and_triton
try:
from torchvision import models as torchvision_models
HAS_TORCHVISION = True
except ImportError:
HAS_TORCHVISION = False
skipIfNoTorchVision = unittest.skipIf(not HAS_TORCHVISION, "no torchvision")
HAS_CUDA = torch.cuda.is_available()
def FlopCounterMode(*args, **kwargs):
return torch.utils.flop_counter.FlopCounterMode(*args, **kwargs, display=False)
def get_total_flops(mode):
return str(sum(v for _, v in mode.flop_counts["Global"].items()))
def T(*shape, requires_grad=False):
return torch.randn(*shape, requires_grad=requires_grad)
@unittest.skipIf(
TEST_WITH_TORCHDYNAMO, "torchdynamo doesn't work with __torch_dispatch__ right now"
)
class TestFlopCounter(TestCase):
def test_flop_counter_variety(self):
mod = torch.nn.Linear(9, 10)
with FlopCounterMode() as mode:
torch.mm(T(4, 5), T(5, 6))
torch.addmm(T(4, 6), T(4, 5), T(5, 6), beta=0.5, alpha=0.5)
torch.matmul(T(5, 6), T(6, 7))
torch.einsum("ab,bc->ac", T(6, 7), T(7, 8))
mod(T(8, 9))
self.assertExpectedInline(get_total_flops(mode), """3012""")
def test_op(self):
with FlopCounterMode() as mode:
torch.mm(T(4, 5), T(5, 6))
# 4 * 6 * 2 * 5 = 240
self.assertExpectedInline(get_total_flops(mode), """240""")
with mode:
torch.bmm(T(3, 4, 5), T(3, 5, 6))
# 3 * 4 * 6 * 2 * 5 = 720
self.assertExpectedInline(get_total_flops(mode), """720""")
with mode:
torch.addmm(T(4, 6), T(4, 5), T(5, 6))
torch.addmm(T(4, 1), T(4, 5), T(5, 6))
torch.addmm(T(6), T(4, 5), T(5, 6))
# 4 * 6 * 2 * 5 = 240
self.assertExpectedInline(get_total_flops(mode), """720""")
with mode:
torch.baddbmm(T(3, 4, 6), T(3, 4, 5), T(3, 5, 6))
# 3 * 4 * 6 * 2 * 5 = 720
self.assertExpectedInline(get_total_flops(mode), """720""")
with mode:
torch.conv2d(T(2, 3, 6, 6), T(6, 3, 4, 4), padding=1)
# out_image_size = 2 * 5 * 5
# kernel_size = 4 * 4
# c_out = 6
# c_in = 3
# out_image_size * kernel_size * c_out * 2 * c_in
# NB: I don't think this properly accounts for padding?
self.assertExpectedInline(get_total_flops(mode), """28800""")
with mode:
torch.conv1d(T(2, 3, 6), T(6, 3, 4), padding=1)
# out_image_size = 2 * 5
# kernel_size = 4
# c_out = 6
# c_in = 3
# out_image_size * kernel_size * c_out * 2 * c_in
# NB: I don't think this properly accounts for padding?
self.assertExpectedInline(get_total_flops(mode), """1440""")
def test_backward(self):
with FlopCounterMode() as mode:
a = T(4, 5, requires_grad=True)
a = torch.mm(a, T(5, 6))
a = a.unsqueeze(0).expand(7, 4, 6)
a = torch.bmm(a, T(7, 6, 7))
a.sum().backward()
self.assertExpectedInline(get_total_flops(mode), """5184""")
def test_backward_reset(self):
with FlopCounterMode() as mode:
a = T(4, 5, requires_grad=True)
a.mm(a.t()).sum().backward()
a.mm(a.t()).sum().backward()
self.assertExpectedInline(get_total_flops(mode), """960""")
def test_torchscript(self):
def foo(x):
return torch.mm(x, x)
with FlopCounterMode() as mode:
foo(T(5, 5))
unscripted_flops = get_total_flops(mode)
ts_foo = torch.jit.script(foo)
with mode:
ts_foo(T(5, 5))
self.assertEqual(unscripted_flops, get_total_flops(mode))
def test_autograd_op(self):
class _CustomOp(torch.autograd.Function):
@staticmethod
def forward(ctx, input: torch.Tensor) -> torch.Tensor:
return torch.mm(input, input)
@staticmethod
def backward(ctx, grad_output: torch.Tensor) -> torch.Tensor:
return torch.mm(grad_output, grad_output) + torch.mm(
grad_output, grad_output
)
a = T(5, 5, requires_grad=True)
with FlopCounterMode() as mode:
a = _CustomOp.apply(a)
a.sum().backward()
self.assertExpectedInline(get_total_flops(mode), """750""")
def test_conv_backwards_as_decomposition(self):
# [conv backwards decomposition as conv forwards]
class onlyConvs(torch.autograd.Function):
@staticmethod
def forward(inp, weight, transposed):
if not transposed:
return F.conv1d(inp, weight)
else:
return F.conv_transpose1d(inp, weight)
@staticmethod
def setup_context(ctx, inputs, output):
inp, weight, transposed = inputs
ctx.save_for_backward(inp, weight)
ctx.transposed = transposed
@staticmethod
def backward(ctx, grad_out):
inp, weight = ctx.saved_tensors
if not ctx.transposed:
grad_inp = F.conv_transpose1d(grad_out, weight)
grad_weight = F.conv1d(inp, grad_out)
return grad_inp, grad_weight, None
else:
grad_inp = F.conv1d(grad_out, weight)
grad_weight = F.conv1d(
grad_out.transpose(1, 0), inp.transpose(1, 0)
)
return grad_inp, grad_weight.transpose(1, 0), None
from torch.func import grad
x = torch.randn(2, 3, 16, dtype=torch.float64)
weight = torch.randn(3, 4, 4, dtype=torch.float64)
def boring_conv(x, weight, transposed):
if not transposed:
return F.conv1d(x, weight).pow(2).sum()
else:
return F.conv_transpose1d(x, weight).pow(2).sum()
def only_convs(x, weight, transposed):
return onlyConvs.apply(x, weight, transposed).pow(2).sum()
boring_grads = grad(boring_conv, argnums=(0, 1))(x, weight, True)
fun_grads = grad(only_convs, argnums=(0, 1))(x, weight, True)
self.assertEqual(boring_grads, fun_grads)
def test_convs(self):
def assert_equivalence(f, expected_forward=None):
with FlopCounterMode() as mode:
f()
conv_forward_flops = mode.get_flop_counts()["Global"][
torch.ops.aten.convolution
]
conv_backward_flops = mode.get_flop_counts()["Global"][
torch.ops.aten.convolution_backward
]
self.assertEqual(conv_forward_flops * 2, conv_backward_flops)
if expected_forward is not None:
self.assertEqual(conv_forward_flops, expected_forward)
x = torch.rand(1, 1, 2, 2, requires_grad=True)
weight = torch.randn(1, 1, 2, 2, requires_grad=True)
assert_equivalence(lambda: F.conv_transpose2d(x, weight).sum().backward(), 32)
x = torch.rand(1, 1, 2, 2, requires_grad=True)
weight = torch.randn(1, 1, 1, 1, requires_grad=True)
assert_equivalence(lambda: F.conv2d(x, weight).sum().backward(), 8)
for in_channels, out_channels, groups in [
(1, 1, 1),
(1, 3, 1),
(3, 1, 1),
(3, 7, 1),
(2, 4, 2),
(4, 2, 2),
]:
x = torch.rand(1, in_channels, 4, 4, requires_grad=True)
weight = torch.randn(out_channels, in_channels, 2, 2, requires_grad=True)
assert_equivalence(lambda: F.conv2d(x, weight).sum().backward())
transposed_weight = torch.randn(
in_channels, out_channels, 2, 2, requires_grad=True
)
assert_equivalence(
lambda: F.conv_transpose2d(x, transposed_weight).sum().backward()
)
@skipIfNoTorchVision
def test_module(self):
resnet18 = torchvision_models.resnet18()
with FlopCounterMode(resnet18) as mode:
a = T(1, 3, 224, 224, requires_grad=True)
resnet18(a).sum().backward()
self.assertExpectedInline(get_total_flops(mode), """10884440064""")
layer1_conv_flops = mode.flop_counts["ResNet.layer1"][
torch.ops.aten.convolution
]
layer1_conv_back_flops = mode.flop_counts["ResNet.layer1"][
torch.ops.aten.convolution_backward
]
self.assertExpectedInline(str(layer1_conv_flops), """924844032""")
self.assertExpectedInline(str(layer1_conv_back_flops), """1849688064""")
def test_conv_transpose_loop(self):
x = torch.rand(1, 4, 30, 2)
model = torch.nn.ConvTranspose2d(4, 8, (2, 2), stride=2)
with FlopCounterMode() as mode:
for _ in range(50):
out = model(x)
out.sum().backward()
self.assertExpectedInline(str(mode.get_total_flops()), """1536000""")
def test_custom(self):
mode = FlopCounterMode(
custom_mapping={torch.ops.aten.add: lambda *args, out_shape: 5}
)
with mode:
a = T(4, 5)
a + a
self.assertExpectedInline(get_total_flops(mode), """5""")
def count(*args, out_val):
return out_val.numel()
count._get_raw = True
mode = FlopCounterMode(custom_mapping={torch.ops.aten.add: count})
with mode:
a = T(4, 5)
a + a
self.assertExpectedInline(get_total_flops(mode), """20""")
def test_noop(self):
with FlopCounterMode() as mode:
T(4, 5).cos()
@unittest.skipIf(not HAS_CUDA, "CUDA not available")
@unittest.skipIf(
not PLATFORM_SUPPORTS_FLASH_ATTENTION
or not PLATFORM_SUPPORTS_MEM_EFF_ATTENTION
or not PLATFORM_SUPPORTS_CUDNN_ATTENTION,
"Does not support all SDPA backends (pre-SM80 hardware on CUDA)",
)
def test_sdpa(self):
batch_size = 4
n_heads = 8
seq_len_q = 128
seq_len_k = 256
head_dim = 64
head_dim_v = 64
dtype = torch.float16
torch.manual_seed(0)
def get_flops(
batch_size,
n_heads,
seq_len_q,
seq_len_k,
head_dim,
head_dim_v,
dtype,
backend,
with_backward=False,
):
query = torch.randn(
batch_size,
n_heads,
seq_len_q,
head_dim,
device="cuda",
dtype=dtype,
requires_grad=True,
)
key = torch.randn(
batch_size,
n_heads,
seq_len_k,
head_dim,
device="cuda",
dtype=dtype,
requires_grad=True,
)
value = torch.randn(
batch_size,
n_heads,
seq_len_k,
head_dim_v,
device="cuda",
dtype=dtype,
requires_grad=True,
)
if backend == "math":
backend = torch.backends.cuda.sdp_kernel(
enable_flash=False,
enable_math=True,
enable_mem_efficient=False,
enable_cudnn=False,
)
elif backend == "flash":
backend = torch.backends.cuda.sdp_kernel(
enable_flash=True,
enable_math=False,
enable_mem_efficient=False,
enable_cudnn=False,
)
elif backend == "mem_efficient":
backend = torch.backends.cuda.sdp_kernel(
enable_flash=False,
enable_math=False,
enable_mem_efficient=True,
enable_cudnn=False,
)
elif backend == "cudnn":
backend = torch.backends.cuda.sdp_kernel(
enable_flash=False,
enable_math=False,
enable_mem_efficient=False,
enable_cudnn=True,
)
mode = FlopCounterMode()
with backend, mode:
out = F.scaled_dot_product_attention(
query, key, value, dropout_p=0, is_causal=True
)
if with_backward:
out.sum().backward()
return int(get_total_flops(mode))
# Sets seq_len_q == seq_len_k and dim_q == dim_v
run_uniform_flops = functools.partial(
get_flops,
batch_size,
n_heads,
seq_len_q,
seq_len_q,
head_dim,
head_dim,
dtype,
)
flops = [
run_uniform_flops(backend, with_backward=False)
for backend in ["math", "flash", "mem_efficient", "cudnn"]
]
flops_fw_math, flops_fw_flash, flops_fw_efficient, flops_fw_cudnn = flops
self.assertEqual(flops_fw_math, flops_fw_flash)
self.assertEqual(flops_fw_math, flops_fw_efficient)
self.assertEqual(flops_fw_math, flops_fw_cudnn)
self.assertExpectedInline(str(flops_fw_math), """134217728""")
flops = [
run_uniform_flops(backend, with_backward=True)
for backend in ["math", "flash", "mem_efficient", "cudnn"]
]
(
flops_fw_bw_math,
flops_fw_bw_flash,
flops_fw_bw_efficient,
flops_fw_bw_cudnn,
) = flops
self.assertEqual(flops_fw_math * 3, flops_fw_bw_math)
self.assertEqual(flops_fw_math * 7 // 2, flops_fw_bw_flash)
self.assertEqual(flops_fw_bw_flash, flops_fw_bw_efficient)
self.assertEqual(flops_fw_bw_flash, flops_fw_bw_cudnn)
run_nonuniform_flops = functools.partial(
get_flops,
batch_size,
n_heads,
seq_len_q,
seq_len_k,
head_dim,
head_dim_v,
dtype,
)
# Flash does not support non-uniform attention, i.e. seq_len_q != seq_len_k or dim_q != dim_v"
non_uniform_backends = ["math", "mem_efficient"]
flops = [
run_nonuniform_flops(backend, with_backward=False)
for backend in non_uniform_backends
]
flops_fw_math, flops_fw_efficient = flops
self.assertEqual(flops_fw_math, flops_fw_efficient)
self.assertExpectedInline(str(flops_fw_math), """268435456""")
flops = [
run_nonuniform_flops(backend, with_backward=True)
for backend in non_uniform_backends
]
flops_fw_bw_math, flops_fw_bw_efficient = flops
self.assertExpectedInline(str(flops_fw_bw_math), """805306368""")
self.assertExpectedInline(str(flops_fw_bw_efficient), """939524096""")
@unittest.skipIf(not HAS_CUDA, "CUDA not available")
@unittest.skipIf(
not PLATFORM_SUPPORTS_FLASH_ATTENTION
or not PLATFORM_SUPPORTS_MEM_EFF_ATTENTION,
"Does not support all SDPA backends (pre-SM80 hardware on CUDA)",
)
def test_sdpa_nested_tensor(self):
def get_flops(q, k, v, backend, with_backward=False):
mode = FlopCounterMode()
if backend == "math":
backend = torch.backends.cuda.sdp_kernel(
enable_flash=False,
enable_math=True,
enable_mem_efficient=False,
enable_cudnn=False,
)
elif backend == "flash":
backend = torch.backends.cuda.sdp_kernel(
enable_flash=True,
enable_math=False,
enable_mem_efficient=False,
enable_cudnn=False,
)
elif backend == "mem_efficient":
backend = torch.backends.cuda.sdp_kernel(
enable_flash=False,
enable_math=False,
enable_mem_efficient=True,
enable_cudnn=False,
)
with backend, mode:
out = F.scaled_dot_product_attention(
q, k, v, dropout_p=0, is_causal=True
)
if with_backward:
if out.is_nested:
out.values().sum().backward()
else:
out.sum().backward()
return int(get_total_flops(mode))
def get_nested_inputs(
batch_size,
n_heads,
max_seq_len_q,
max_seq_len_k,
head_dim,
head_dim_v,
dtype,
):
q_lengths = torch.tensor(
[
max_seq_len_q // 4,
max_seq_len_q // 4 * 2,
max_seq_len_q // 4 * 3,
max_seq_len_q // 4 * 4,
]
)
k_lengths = torch.tensor(
[
max_seq_len_k // 4,
max_seq_len_k // 4 * 2,
max_seq_len_k // 4 * 3,
max_seq_len_k // 4 * 4,
]
)
q_offsets, k_offsets = (
torch.cat((torch.tensor([0]), torch.cumsum(lengths, dim=0))).cuda()
for lengths in (q_lengths, k_lengths)
)
q_values = torch.randn(
q_offsets[-1],
head_dim * n_heads,
dtype=dtype,
requires_grad=True,
device="cuda",
)
k_values = torch.randn(
k_offsets[-1],
head_dim * n_heads,
dtype=dtype,
requires_grad=True,
device="cuda",
)
v_values = torch.randn(
k_offsets[-1],
head_dim_v * n_heads,
dtype=dtype,
requires_grad=True,
device="cuda",
)
q = torch.nested.nested_tensor_from_jagged(q_values, q_offsets)
k = torch.nested.nested_tensor_from_jagged(k_values, k_offsets)
v = torch.nested.nested_tensor_from_jagged(v_values, k_offsets)
q = q.view(batch_size, -1, n_heads, head_dim).transpose(1, 2)
k = k.view(batch_size, -1, n_heads, head_dim).transpose(1, 2)
v = v.view(batch_size, -1, n_heads, head_dim_v).transpose(1, 2)
return q, k, v
def get_dense_flops(q, k, v, backend, with_backward=False):
def split_tensor(x):
return (
y.unsqueeze(0).transpose(1, 2).detach().requires_grad_(True)
for y in x.transpose(1, 2).unbind(0)
)
q_tensors = split_tensor(q)
k_tensors = split_tensor(k)
v_tensors = split_tensor(v)
flops = 0
for q_i, k_i, v_i in zip(q_tensors, k_tensors, v_tensors):
flops += get_flops(
q_i, k_i, v_i, backend=backend, with_backward=with_backward
)
return flops
uniform_config = {
"batch_size": 4,
"n_heads": 8,
"max_seq_len_q": 128,
"max_seq_len_k": 128,
"head_dim": 64,
"head_dim_v": 64,
"dtype": torch.float16,
}
# max_seq_len_q != max_seq_len_k doesn't work for flash attention with dense tensors.
differing_config = {
"batch_size": 4,
"n_heads": 8,
"max_seq_len_q": 128,
"max_seq_len_k": 256,
"head_dim": 64,
"head_dim_v": 64,
"dtype": torch.float16,
}
self.assertEqual(
get_dense_flops(
*get_nested_inputs(**uniform_config),
backend="flash",
with_backward=False,
),
get_flops(
*get_nested_inputs(**uniform_config),
backend="flash",
with_backward=False,
),
)
self.assertEqual(
get_dense_flops(
*get_nested_inputs(**uniform_config),
backend="mem_efficient",
with_backward=False,
),
get_flops(
*get_nested_inputs(**uniform_config),
backend="mem_efficient",
with_backward=False,
),
)
self.assertEqual(
get_dense_flops(
*get_nested_inputs(**differing_config),
backend="mem_efficient",
with_backward=False,
),
get_flops(
*get_nested_inputs(**differing_config),
backend="mem_efficient",
with_backward=False,
),
)
self.assertEqual(
get_dense_flops(
*get_nested_inputs(**uniform_config),
backend="flash",
with_backward=True,
),
get_flops(
*get_nested_inputs(**uniform_config),
backend="flash",
with_backward=True,
),
)
self.assertEqual(
get_dense_flops(
*get_nested_inputs(**uniform_config),
backend="mem_efficient",
with_backward=True,
),
get_flops(
*get_nested_inputs(**uniform_config),
backend="mem_efficient",
with_backward=True,
),
)
self.assertEqual(
get_dense_flops(
*get_nested_inputs(**differing_config),
backend="mem_efficient",
with_backward=True,
),
get_flops(
*get_nested_inputs(**differing_config),
backend="mem_efficient",
with_backward=True,
),
)
@unittest.skipIf(not HAS_CUDA, "CUDA not available")
@unittest.skipIf(
not PLATFORM_SUPPORTS_FLASH_ATTENTION,
"Does not support all SDPA backends (pre-SM80 hardware on CUDA)",
)
def test_nested_attention_fake_tensors(self):
x = torch.randn(123, 4, 16, device="cuda", dtype=torch.bfloat16)
offsets = torch.tensor([0, 30, 60, 90, 123], device="cuda")
max_seqlen = 40
with FakeTensorMode() as fake_mode:
fake_x = fake_mode.from_tensor(x)
fake_offsets = fake_mode.from_tensor(offsets)
with FlopCounterMode() as fake_flop_counter_mode:
torch.ops.aten._flash_attention_forward(
fake_x,
fake_x,
fake_x,
fake_offsets,
fake_offsets,
max_seqlen,
max_seqlen,
0.0,
False,
False,
)
dense_x = torch.randn(
4, 40, 4, 16, dtype=torch.bfloat16, device="cuda"
).transpose(1, 2)
with FlopCounterMode() as real_flop_counter_mode:
torch.ops.aten._flash_attention_forward(
dense_x,
dense_x,
dense_x,
None,
None,
max_seqlen,
max_seqlen,
0.0,
False,
False,
)
self.assertEqual(
int(get_total_flops(fake_flop_counter_mode)),
int(get_total_flops(real_flop_counter_mode)),
)
def test_addmm_out(self):
def f(x):
y = torch.zeros(10, 10)
return torch.mm(x, x, out=y)
with FlopCounterMode() as mode:
f(torch.randn(10, 10))
self.assertExpectedInline(get_total_flops(mode), """2000""")
def test_hook_registration(self):
model = torch.nn.Linear(100, 100)
x = torch.randn(3, 100)
with FlopCounterMode() as mode:
self.assertEqual(len(torch.nn.modules.module._global_forward_pre_hooks), 1)
self.assertEqual(len(torch.nn.modules.module._global_forward_hooks), 1)
model(x).sum().backward()
self.assertEqual(len(torch.nn.modules.module._global_forward_pre_hooks), 0)
self.assertEqual(len(torch.nn.modules.module._global_forward_hooks), 0)
def test_pytrees(self):
class Foo(torch.nn.Module):
def forward(self, x):
x = x["a"].relu_()
return {"a": torch.mm(x, x)}
class Mod(torch.nn.Module):
def __init__(self) -> None:
super().__init__()
self.a = Foo()
self.b = Foo()
def forward(self, x):
return self.b(self.a(x))
mod = Mod()
with FlopCounterMode() as mode:
mod({"a": torch.randn(10, 10, requires_grad=True).clone()})[
"a"
].sum().backward()
self.assertExpectedInline(
(mode.flop_counts["Mod"][torch.ops.aten.mm]), """12000"""
)
class Mod2(torch.nn.Module):
def forward(self, x):
return (torch.mm(x, x),)
mod = Mod2()
with FlopCounterMode() as mode:
mod(torch.randn(10, 10, requires_grad=True))[0].sum().backward()
self.assertExpectedInline(
(mode.flop_counts["Mod2"][torch.ops.aten.mm]), """6000"""
)
def test_warning(self):
mod = torch.nn.Linear(2, 2)
with self.assertWarnsRegex(UserWarning, "not needed"):
FlopCounterMode(mod)
def test_custom_op(self):
from torch.utils.flop_counter import FlopCounterMode, register_flop_formula
@torch.library.custom_op("mylib::foo", mutates_args=())
def foo(x: torch.Tensor) -> torch.Tensor:
return x.sin()
called = 0
with self.assertRaisesRegex(
ValueError, "expected each target to be OpOverloadPacket"
):
register_flop_formula(torch.ops.mylib.foo.default)(lambda x: x)
@register_flop_formula(torch.ops.mylib.foo)
def formula(*args, **kwargs):
nonlocal called
called += 1
return 9001
x = torch.randn(3)
with FlopCounterMode(display=False) as mode:
y = foo(x)
self.assertEqual(called, 1)
self.assertExpectedInline(get_total_flops(mode), """9001""")
@requires_cuda_and_triton
def test_flop_counter_custom_triton_manual_decomp(self):
import triton
import triton.language as tl
from torch.utils.flop_counter import _FlopCounterMode, register_flop_formula
@triton.jit
def sin_kernel(x_ptr, out_ptr, n_elements, BLOCK_SIZE: tl.constexpr):
pid = tl.program_id(axis=0)
block_start = pid * BLOCK_SIZE
offsets = block_start + tl.arange(0, BLOCK_SIZE)
mask = offsets < n_elements
x = tl.load(x_ptr + offsets, mask=mask)
out = tl.sin(x)
tl.store(out_ptr + offsets, out, mask=mask)
x = torch.randn(3, device="cuda")
out = torch.empty(3, device="cuda")
@register_flop_formula(sin_kernel)
def compute_sin_kernel_flops(*args, **kwargs) -> int:
# dummy implementation
return 2
def sin_grid(meta):
return (triton.cdiv(3, meta["BLOCK_SIZE"]),)
with FlopCounterMode() as m:
torch.library.wrap_triton(sin_kernel)[sin_grid](x, out, 3, 256)
self.assertExpectedInline(get_total_flops(m), """2""")
# Now, wrap in a triton op and do the decomp
@torch._library.triton.triton_op("mylib::sin_op", mutates_args=())
def op() -> None:
torch.library.wrap_triton(sin_kernel)[sin_grid](x, out, 3, 256)
def op_decompose(mode, *args, **kwargs):
with mode:
torch.library.wrap_triton(sin_kernel)[sin_grid](x, out, 3, 256)
torch.library.register_torch_dispatch(
"mylib::sin_op", _FlopCounterMode, op_decompose
)
# Should now output 2 flops; previously would be 0
with FlopCounterMode() as m2:
torch.ops.mylib.sin_op()
self.assertExpectedInline(get_total_flops(m2), """2""")
@requires_cuda_and_triton
def test_flop_counter_custom_triton_op_two_kernels_manual_decomp(self):
import triton
import triton.language as tl
from torch.utils.flop_counter import _FlopCounterMode, register_flop_formula
@triton.jit
def sin_kernel(x_ptr, out_ptr, n_elements, BLOCK_SIZE: tl.constexpr):
pid = tl.program_id(axis=0)
block_start = pid * BLOCK_SIZE
offsets = block_start + tl.arange(0, BLOCK_SIZE)
mask = offsets < n_elements
x = tl.load(x_ptr + offsets, mask=mask)
out = tl.sin(x)
tl.store(out_ptr + offsets, out, mask=mask)
@triton.jit
def cos_kernel(x_ptr, out_ptr, n_elements, BLOCK_SIZE: tl.constexpr):
pid = tl.program_id(axis=0)
block_start = pid * BLOCK_SIZE
offsets = block_start + tl.arange(0, BLOCK_SIZE)
mask = offsets < n_elements
x = tl.load(x_ptr + offsets, mask=mask)
out = tl.cos(x)
tl.store(out_ptr + offsets, out, mask=mask)
x = torch.randn(3, device="cuda")
out = torch.empty(3, device="cuda")
@register_flop_formula(sin_kernel)
def compute_sin_kernel_flops(*args, **kwargs) -> int:
return 1
@register_flop_formula(cos_kernel)
def compute_cos_kernel_flops(*args, **kwargs) -> int:
return 1
def sin_grid(meta):
return (triton.cdiv(3, meta["BLOCK_SIZE"]),)
def cos_grid(meta):
return (triton.cdiv(3, meta["BLOCK_SIZE"]),)
with FlopCounterMode() as m:
torch.library.wrap_triton(sin_kernel)[sin_grid](x, out, 3, 256)
torch.library.wrap_triton(cos_kernel)[cos_grid](x, out, 3, 256)
self.assertExpectedInline(get_total_flops(m), """2""")
# Now, wrap in a triton op and do the decomp
@torch._library.triton.triton_op("mylib::trig_op", mutates_args=())
def trig_op() -> None:
torch.library.wrap_triton(sin_kernel)[sin_grid](x, out, 3, 256)
torch.library.wrap_triton(cos_kernel)[cos_grid](x, out, 3, 256)
def op_decompose(mode, *args, **kwargs):
with mode:
torch.library.wrap_triton(sin_kernel)[sin_grid](x, out, 3, 256)
torch.library.wrap_triton(cos_kernel)[cos_grid](x, out, 3, 256)
# Simulate the decomposition of the triton op into its kernels
# this takes place in aot_autograd, which is then seen for AC
torch.library.register_torch_dispatch(
"mylib::trig_op", _FlopCounterMode, op_decompose
)
# Should now output 2 flops; It is important that we compile
# this function to aot_eager in order to decompose the triton
# op into its kernels
with FlopCounterMode() as m2:
torch.ops.mylib.trig_op()
self.assertExpectedInline(get_total_flops(m2), """2""")
@requires_cuda_and_triton
@torch._functorch.config.patch("activation_memory_budget", 0.1)
@torch._functorch.config.patch("activation_memory_budget_solver", "dp")
@torch._functorch.config.patch("is_non_builtin_to_include", True)
def test_flop_counter_custom_triton_op_two_kernels_auto_ac(self):
import triton
import triton.language as tl
from torch.utils.flop_counter import register_flop_formula
@triton.jit
def sin_kernel(x_ptr, out_ptr, n_elements, BLOCK_SIZE: tl.constexpr):
pid = tl.program_id(axis=0)
block_start = pid * BLOCK_SIZE
offsets = block_start + tl.arange(0, BLOCK_SIZE)
mask = offsets < n_elements
x = tl.load(x_ptr + offsets, mask=mask)
out = tl.sin(x)
tl.store(out_ptr + offsets, out, mask=mask)
@triton.jit
def cos_kernel(x_ptr, out_ptr, n_elements, BLOCK_SIZE: tl.constexpr):
pid = tl.program_id(axis=0)
block_start = pid * BLOCK_SIZE
offsets = block_start + tl.arange(0, BLOCK_SIZE)
mask = offsets < n_elements
x = tl.load(x_ptr + offsets, mask=mask)
out = tl.cos(x)
tl.store(out_ptr + offsets, out, mask=mask)
n_elements = int(1e7)
x = torch.randn(n_elements, device="cuda", requires_grad=True)
cos_flops_recorded, sin_flops_recorded = 0, 0
@register_flop_formula(sin_kernel)
def compute_sin_kernel_flops(*args, **kwargs) -> int:
# dummy implementation
nonlocal sin_flops_recorded
sin_flops_recorded += 1
return 1
@register_flop_formula(cos_kernel)
def compute_cos_kernel_flops(*args, **kwargs) -> int:
# dummy implementation
nonlocal cos_flops_recorded
cos_flops_recorded += 1
return 1
def sin_grid(meta):
return (triton.cdiv(n_elements, meta["BLOCK_SIZE"]),)
def cos_grid(meta):
return (triton.cdiv(n_elements, meta["BLOCK_SIZE"]),)
@torch._library.triton.triton_op("mylib::trig_op", mutates_args=())
def trig_op(x_inp: torch.Tensor) -> torch.Tensor:
output = torch.empty_like(x_inp)
torch.library.wrap_triton(sin_kernel)[sin_grid](
x_inp, output, n_elements, 256
)
torch.library.wrap_triton(cos_kernel)[cos_grid](
x_inp, output, n_elements, 256
)
return output
# Register a backward
def trig_op_backward(ctx, grad_output):
(out,) = ctx.saved_tensors
return grad_output * out
def trig_op_setup_context(ctx, inputs, output):
ctx.save_for_backward(output)
trig_op.register_autograd(trig_op_backward, setup_context=trig_op_setup_context)
def fn(x_inp: torch.Tensor):
y1 = torch.ops.mylib.trig_op(x_inp)
y2 = torch.ops.mylib.trig_op(y1)
y3 = torch.ops.mylib.trig_op(y2)
return y3
torch.compile(fn, backend="aot_eager_decomp_partition", fullgraph=True)(x)
# Since we decompose, we will call the formula 3 times
self.assertEqual(
sin_flops_recorded,
3,
"Custom formula for sin_kernel not recorded during partitioning",
)
self.assertEqual(
cos_flops_recorded,
3,
"Custom formula for cos_kernel not recorded during partitioning",
)
@skipIfNoTorchVision
def test_inference_mode(self):
def get_flops(model):
with FlopCounterMode(model) as mode:
a = T(1, 3, 224, 224)
model(a).sum()
return mode
resnet18 = torchvision_models.resnet18()
mode_standard = get_flops(resnet18)
with torch.inference_mode():
mode_inference = get_flops(resnet18)
self.assertEqual(
get_total_flops(mode_standard), get_total_flops(mode_inference)
)
layer1_conv_flops_standard = mode_standard.flop_counts["ResNet.layer1"][
torch.ops.aten.convolution
]
layer1_conv_flops_inference = mode_inference.flop_counts["ResNet.layer1"][
torch.ops.aten.convolution
]
self.assertEqual(layer1_conv_flops_standard, layer1_conv_flops_inference)
@unittest.skipIf(not HAS_CUDA, "CUDA not available")
@unittest.skipIf(
not PLATFORM_SUPPORTS_FP8,
"FP8 is only supported on H100+, SM 8.9 and MI300+ devices",
)
def test_scaled_mm(self):
dtype = e4m3_type
with FlopCounterMode() as mode:
torch._scaled_mm(
torch.randn((3 * 16, 5 * 16), device="cuda").to(dtype),
torch.randn((7 * 16, 5 * 16), device="cuda").to(dtype).t(),
scale_a=torch.ones((), device="cuda"),
scale_b=torch.ones((), device="cuda"),
out_dtype=torch.bfloat16,
)
self.assertExpectedInline(get_total_flops(mode), """860160""")
if __name__ == "__main__":
run_tests() | python | github | https://github.com/pytorch/pytorch | test/test_flop_counter.py |
"""**Docstores** are classes to store and load Documents.
The **Docstore** is a simplified version of the Document Loader.
"""
from typing import TYPE_CHECKING, Any
from langchain_classic._api import create_importer
if TYPE_CHECKING:
from langchain_community.docstore.arbitrary_fn import DocstoreFn
from langchain_community.docstore.in_memory import InMemoryDocstore
from langchain_community.docstore.wikipedia import Wikipedia
# Create a way to dynamically look up deprecated imports.
# Used to consolidate logic for raising deprecation warnings and
# handling optional imports.
DEPRECATED_LOOKUP = {
"DocstoreFn": "langchain_community.docstore.arbitrary_fn",
"InMemoryDocstore": "langchain_community.docstore.in_memory",
"Wikipedia": "langchain_community.docstore.wikipedia",
}
_import_attribute = create_importer(__package__, deprecated_lookups=DEPRECATED_LOOKUP)
def __getattr__(name: str) -> Any:
"""Look up attributes dynamically."""
return _import_attribute(name)
__all__ = [
"DocstoreFn",
"InMemoryDocstore",
"Wikipedia",
] | python | github | https://github.com/langchain-ai/langchain | libs/langchain/langchain_classic/docstore/__init__.py |
# -*- coding: utf-8; -*-
# This file is part of Superdesk.
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
#
# Author : superdesk
# Creation: 2017-08-14 11:47
from superdesk.commands.data_updates import DataUpdate
from superdesk import get_resource_service
from eve.utils import config, app
from eve.io.mongo.mongo import create_index
from superdesk.audit.commands import PurgeAudit
class DataUpdate(DataUpdate):
resource = 'audit'
def forwards(self, mongodb_collection, mongodb_database):
for audit in mongodb_collection.find({'resource': {'$in': PurgeAudit.item_resources}}):
audit_id = get_resource_service(self.resource)._extract_doc_id(audit.get('extra'))
print(mongodb_collection.update({'_id': audit.get(config.ID_FIELD)},
{'$set': {
'audit_id': audit_id
}}))
try:
create_index(app=app, resource=self.resource, name='audit_id', list_of_keys=[('audit_id', 1)],
index_options={'background': True})
except Exception:
print('create index failed')
def backwards(self, mongodb_collection, mongodb_database):
print(mongodb_collection.update({},
{'$unset': {'audit_id': []}},
upsert=False, multi=True)) | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python
#############################################################################
##
## Copyright (C) 2010 Riverbank Computing Limited.
## Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
## All rights reserved.
##
## This file is part of the examples of PyQt.
##
## $QT_BEGIN_LICENSE:BSD$
## You may use this file under the terms of the BSD license as follows:
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are
## met:
## * Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## * Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in
## the documentation and/or other materials provided with the
## distribution.
## * Neither the name of Nokia Corporation and its Subsidiary(-ies) nor
## the names of its contributors may be used to endorse or promote
## products derived from this software without specific prior written
## permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
## $QT_END_LICENSE$
##
#############################################################################
from PyQt4 import QtCore, QtGui
class TabDialog(QtGui.QDialog):
def __init__(self, fileName, parent=None):
super(TabDialog, self).__init__(parent)
fileInfo = QtCore.QFileInfo(fileName)
tabWidget = QtGui.QTabWidget()
tabWidget.addTab(GeneralTab(fileInfo), "General")
tabWidget.addTab(PermissionsTab(fileInfo), "Permissions")
tabWidget.addTab(ApplicationsTab(fileInfo), "Applications")
buttonBox = QtGui.QDialogButtonBox(QtGui.QDialogButtonBox.Ok | QtGui.QDialogButtonBox.Cancel)
buttonBox.accepted.connect(self.accept)
buttonBox.rejected.connect(self.reject)
mainLayout = QtGui.QVBoxLayout()
mainLayout.addWidget(tabWidget)
mainLayout.addWidget(buttonBox)
self.setLayout(mainLayout)
self.setWindowTitle("Tab Dialog")
class GeneralTab(QtGui.QWidget):
def __init__(self, fileInfo, parent=None):
super(GeneralTab, self).__init__(parent)
fileNameLabel = QtGui.QLabel("File Name:")
fileNameEdit = QtGui.QLineEdit(fileInfo.fileName())
pathLabel = QtGui.QLabel("Path:")
pathValueLabel = QtGui.QLabel(fileInfo.absoluteFilePath())
pathValueLabel.setFrameStyle(QtGui.QFrame.Panel | QtGui.QFrame.Sunken)
sizeLabel = QtGui.QLabel("Size:")
size = fileInfo.size() // 1024
sizeValueLabel = QtGui.QLabel("%d K" % size)
sizeValueLabel.setFrameStyle(QtGui.QFrame.Panel | QtGui.QFrame.Sunken)
lastReadLabel = QtGui.QLabel("Last Read:")
lastReadValueLabel = QtGui.QLabel(fileInfo.lastRead().toString())
lastReadValueLabel.setFrameStyle(QtGui.QFrame.Panel | QtGui.QFrame.Sunken)
lastModLabel = QtGui.QLabel("Last Modified:")
lastModValueLabel = QtGui.QLabel(fileInfo.lastModified().toString())
lastModValueLabel.setFrameStyle(QtGui.QFrame.Panel | QtGui.QFrame.Sunken)
mainLayout = QtGui.QVBoxLayout()
mainLayout.addWidget(fileNameLabel)
mainLayout.addWidget(fileNameEdit)
mainLayout.addWidget(pathLabel)
mainLayout.addWidget(pathValueLabel)
mainLayout.addWidget(sizeLabel)
mainLayout.addWidget(sizeValueLabel)
mainLayout.addWidget(lastReadLabel)
mainLayout.addWidget(lastReadValueLabel)
mainLayout.addWidget(lastModLabel)
mainLayout.addWidget(lastModValueLabel)
mainLayout.addStretch(1)
self.setLayout(mainLayout)
class PermissionsTab(QtGui.QWidget):
def __init__(self, fileInfo, parent=None):
super(PermissionsTab, self).__init__(parent)
permissionsGroup = QtGui.QGroupBox("Permissions")
readable = QtGui.QCheckBox("Readable")
if fileInfo.isReadable():
readable.setChecked(True)
writable = QtGui.QCheckBox("Writable")
if fileInfo.isWritable():
writable.setChecked(True)
executable = QtGui.QCheckBox("Executable")
if fileInfo.isExecutable():
executable.setChecked(True)
ownerGroup = QtGui.QGroupBox("Ownership")
ownerLabel = QtGui.QLabel("Owner")
ownerValueLabel = QtGui.QLabel(fileInfo.owner())
ownerValueLabel.setFrameStyle(QtGui.QFrame.Panel | QtGui.QFrame.Sunken)
groupLabel = QtGui.QLabel("Group")
groupValueLabel = QtGui.QLabel(fileInfo.group())
groupValueLabel.setFrameStyle(QtGui.QFrame.Panel | QtGui.QFrame.Sunken)
permissionsLayout = QtGui.QVBoxLayout()
permissionsLayout.addWidget(readable)
permissionsLayout.addWidget(writable)
permissionsLayout.addWidget(executable)
permissionsGroup.setLayout(permissionsLayout)
ownerLayout = QtGui.QVBoxLayout()
ownerLayout.addWidget(ownerLabel)
ownerLayout.addWidget(ownerValueLabel)
ownerLayout.addWidget(groupLabel)
ownerLayout.addWidget(groupValueLabel)
ownerGroup.setLayout(ownerLayout)
mainLayout = QtGui.QVBoxLayout()
mainLayout.addWidget(permissionsGroup)
mainLayout.addWidget(ownerGroup)
mainLayout.addStretch(1)
self.setLayout(mainLayout)
class ApplicationsTab(QtGui.QWidget):
def __init__(self, fileInfo, parent=None):
super(ApplicationsTab, self).__init__(parent)
topLabel = QtGui.QLabel("Open with:")
applicationsListBox = QtGui.QListWidget()
applications = []
for i in range(1, 31):
applications.append("Application %d" % i)
applicationsListBox.insertItems(0, applications)
alwaysCheckBox = QtGui.QCheckBox()
if fileInfo.suffix():
alwaysCheckBox = QtGui.QCheckBox("Always use this application to "
"open files with the extension '%s'" % fileInfo.suffix())
else:
alwaysCheckBox = QtGui.QCheckBox("Always use this application to "
"open this type of file")
layout = QtGui.QVBoxLayout()
layout.addWidget(topLabel)
layout.addWidget(applicationsListBox)
layout.addWidget(alwaysCheckBox)
self.setLayout(layout)
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
if len(sys.argv) >= 2:
fileName = sys.argv[1]
else:
fileName = "."
tabdialog = TabDialog(fileName)
sys.exit(tabdialog.exec_()) | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
import json
from flask import jsonify
from flask import render_template, request, url_for, redirect
import time, random
#------------------------------------------------------------------------------
def get_desktop_items_data():
"""
Returns items for Desktop in JSON array:
title
"""
items = [
{'title': 'OS/2 System', 'icon': '/appmedia/imgs/system_folder.png', 'left': '0px', 'top': '40px', 'action': '/system_folder/'},
{'title': 'Information', 'icon': '/appmedia/imgs/help.png', 'left': '0px', 'top': '120px', 'action': '/appmedia/help/desktop.html'},
{'title': 'Virtual PC', 'icon': '/appmedia/imgs/system/minimized.png', 'left': '0px', 'top': '200px', 'action': '/'},
{'title': 'WebExplorer', 'icon': '/appmedia/imgs/web/explore.gif', 'left': '0px', 'top': '280px', 'action': '/webexplorer/'},
{'title': 'WIN-OS/2 Window', 'icon': '/appmedia/imgs/cmd/win_wnd.png', 'left': '0px', 'top': '360px', 'action': '/cmd/?cmd=win_wnd', 'app': 'yes'},
{'title': 'Solitaire', 'icon': '/appmedia/imgs/files/sol.jpg', 'left': '0px', 'top': '440px', 'action': 'http://www.webolog.com/online_games/solitaire/loaderwm.swf', 'app': 'yes'},
]
#return jsonify(items=items)
return json.dumps(items)
#------------------------------------------------------------------------------
def get_lanchpad_data():
return render_template("lanchpad.html")
#------------------------------------------------------------------------------
def get_window_data():
"Returns rendered window with iframe inside"
title = request.args.get("title", "")
src = request.args.get("src", "")
width = request.args.get("width", "634")
height = request.args.get("height", "450")
win_id = int(time.time())
template = "pm/base_window.html"
if src.find("win_") != -1:
template = "pm/win_window.html"
#title = "Program Manager"
content = {
"title": title,
"src": src,
"win_id": win_id,
"wnd_left": random.randint(120, 300),
"wnd_top": random.randint(20, 100),
"width": width,
"height": height,
}
return render_template(template, **content)
#------------------------------------------------------------------------------
def get_dialog_data():
"Returns rendered dialog"
dlg = request.args.get("dlg", "")
title = request.args.get("title", "")
win_id = int(time.time())
template = "dialogs/%s.html" % dlg
content = {
"title": title,
"dlg": dlg,
"win_id": win_id,
"wnd_left": 400,
"wnd_top": 300,
"width": 290,
"height": 150,
}
return render_template(template, **content)
#------------------------------------------------------------------------------ | unknown | codeparrot/codeparrot-clean | ||
import { isReactRouterRepo } from "../config/is-react-router-repo";
export const ssrExternals = isReactRouterRepo()
? [
// This is only needed within this repo because these packages
// are linked to a directory outside of node_modules so Vite
// treats them as internal code by default.
"react-router",
"react-router-dom",
"@react-router/architect",
"@react-router/cloudflare",
"@react-router/dev",
"@react-router/express",
"@react-router/node",
"@react-router/serve",
]
: undefined; | typescript | github | https://github.com/remix-run/react-router | packages/react-router-dev/vite/ssr-externals.ts |
from datetime import date
from django.db import models
from django.test import TestCase
from django.contrib.auth.models import User
from turbion.bits.utils import merging
class MyProfile(models.Model):
user_ptr = models.ForeignKey(User, unique=True)
nickname = models.CharField(max_length=100)
www = models.URLField()
birth = models.DateField()
class Meta:
app_label="turbion"
class OtherProfile(models.Model):
user = models.ForeignKey(User, unique=True)
nickname = models.CharField(max_length=100)
website = models.URLField()
dob = models.DateField()
class Meta:
app_label="turbion"
class MyProfileLayer(merging.ModelLayer):
model = MyProfile
fields = ["nickname"]
aliases = {
"site": "www",
"day_of_birth": "birth"
}
key = 'user_ptr'
class OtherProfileLayer(merging.ModelLayer):
model = OtherProfile
fields = ["nickname"]
aliases = {
"site": "website",
"day_of_birth": "dob"
}
key = 'user'
create = True
merging.track([MyProfileLayer, OtherProfileLayer])
class Merge(TestCase):
def setUp(self):
self.user = User.objects.create_user(
"test",
"foobar@foo.bar"
)
self.my_profile = MyProfile.objects.create(
user_ptr=self.user,
nickname="test_foo",
www="http://foo.bar",
birth=date.today(),
)
def _test_objects(self, other):
my_profile = MyProfile.objects.get(pk=self.my_profile.pk)
self.assertEqual(other.nickname, my_profile.nickname)
self.assertEqual(other.website, my_profile.www)
self.assertEqual(other.dob, my_profile.birth)
def test_other_profile_existance(self):
self.assertEqual(
OtherProfile.objects.filter(user=self.user).count(),
1
)
other = OtherProfile.objects.get(user=self.user)
self._test_objects(other)
def test_other_change(self):
other = OtherProfile.objects.get(user=self.user)
other.website = "http://bar.foo"
other.save()
self._test_objects(other)
def test_my_change(self):
self.my_profile.website = "http://bar.foo"
self.my_profile.save()
other = OtherProfile.objects.get(user=self.user)
self._test_objects(other) | unknown | codeparrot/codeparrot-clean | ||
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'WMSSource.metadata'
db.add_column('lizard_wms_wmssource', 'metadata', self.gf('jsonfield.fields.JSONField')(null=True, blank=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'WMSSource.metadata'
db.delete_column('lizard_wms_wmssource', 'metadata')
models = {
'lizard_maptree.category': {
'Meta': {'ordering': "('name',)", 'object_name': 'Category'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_maptree.Category']", 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '20', 'db_index': 'True'})
},
'lizard_wms.featureline': {
'Meta': {'object_name': 'FeatureLine'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_hover': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'order_using': ('django.db.models.fields.IntegerField', [], {'default': '1000'}),
'render_as': ('django.db.models.fields.CharField', [], {'default': "u'T'", 'max_length': '1'}),
'use_as_id': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'wms_layer': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_wms.WMSSource']"})
},
'lizard_wms.wmsconnection': {
'Meta': {'object_name': 'WMSConnection'},
'category': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['lizard_maptree.Category']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'options': ('django.db.models.fields.TextField', [], {'default': 'u\'{"buffer": 0, "isBaseLayer": false, "opacity": 0.5}\''}),
'params': ('django.db.models.fields.TextField', [], {'default': 'u\'{"height": "256", "width": "256", "layers": "%s", "styles": "", "format": "image/png", "tiled": "true", "transparent": "true"}\''}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'version': ('django.db.models.fields.CharField', [], {'default': "u'1.3.0'", 'max_length': '20'}),
'xml': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'})
},
'lizard_wms.wmssource': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'WMSSource'},
'bbox': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'category': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['lizard_maptree.Category']", 'null': 'True', 'blank': 'True'}),
'connection': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_wms.WMSConnection']", 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'legend_url': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'}),
'metadata': ('jsonfield.fields.JSONField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'old_metadata': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'params': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'show_legend': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
}
}
complete_apps = ['lizard_wms'] | unknown | codeparrot/codeparrot-clean | ||
# Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_config import cfg
from nova.conductor import manager as conductor_manager
from nova import db
from nova.tests.functional.v3 import test_servers
from nova import utils
CONF = cfg.CONF
CONF.import_opt('osapi_compute_extension',
'nova.api.openstack.compute.legacy_v2.extensions')
class MigrateServerSamplesJsonTest(test_servers.ServersSampleBase):
extension_name = "os-migrate-server"
ctype = 'json'
extra_extensions_to_load = ["os-access-ips"]
_api_version = 'v2'
def _get_flags(self):
f = super(MigrateServerSamplesJsonTest, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.admin_actions.'
'Admin_actions')
return f
def setUp(self):
"""setUp Method for MigrateServer api samples extension
This method creates the server that will be used in each tests
"""
super(MigrateServerSamplesJsonTest, self).setUp()
self.uuid = self._post_server()
@mock.patch('nova.conductor.manager.ComputeTaskManager._cold_migrate')
def test_post_migrate(self, mock_cold_migrate):
# Get api samples to migrate server request.
response = self._do_post('servers/%s/action' % self.uuid,
'migrate-server', {})
self.assertEqual(202, response.status_code)
def test_post_live_migrate_server(self):
# Get api samples to server live migrate request.
def fake_live_migrate(_self, context, instance, scheduler_hint,
block_migration, disk_over_commit):
self.assertEqual(self.uuid, instance["uuid"])
host = scheduler_hint["host"]
self.assertEqual(self.compute.host, host)
self.stubs.Set(conductor_manager.ComputeTaskManager,
'_live_migrate',
fake_live_migrate)
def fake_get_compute(context, host):
service = dict(host=host,
binary='nova-compute',
topic='compute',
report_count=1,
updated_at='foo',
hypervisor_type='bar',
hypervisor_version=utils.convert_version_to_int(
'1.0'),
disabled=False)
return {'compute_node': [service]}
self.stubs.Set(db, "service_get_by_compute_host", fake_get_compute)
response = self._do_post('servers/%s/action' % self.uuid,
'live-migrate-server',
{'hostname': self.compute.host})
self.assertEqual(202, response.status_code) | unknown | codeparrot/codeparrot-clean | ||
"""Timing benchmark for AlexNet inference.
To run, use:
bazel run -c opt --config=cuda \
third_party/tensorflow/models/image/alexnet:alexnet_benchmark
Across 100 steps on batch size = 128.
Forward pass:
Run on Tesla K40c: 145 +/- 1.5 ms / batch
Run on Titan X: 70 +/- 0.1 ms / batch
Forward-backward pass:
Run on Tesla K40c: 480 +/- 48 ms / batch
Run on Titan X: 244 +/- 30 ms / batch
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from datetime import datetime
import math
from six.moves import xrange # pylint: disable=redefined-builtin
import time
import tensorflow.python.platform
import tensorflow as tf
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_integer('batch_size', 128,
"""Batch size.""")
tf.app.flags.DEFINE_integer('num_batches', 100,
"""Number of batches to run.""")
def print_activations(t):
print(t.op.name, ' ', t.get_shape().as_list())
def inference(images):
"""Build the AlexNet model.
Args:
images: Images Tensor
Returns:
pool5: the last Tensor in the convolutional component of AlexNet.
parameters: a list of Tensors corresponding to the weights and biases of the
AlexNet model.
"""
parameters = []
# conv1
with tf.name_scope('conv1') as scope:
kernel = tf.Variable(tf.truncated_normal([11, 11, 3, 64], dtype=tf.float32,
stddev=1e-1), name='weights')
conv = tf.nn.conv2d(images, kernel, [1, 4, 4, 1], padding='VALID')
biases = tf.Variable(tf.constant(0.0, shape=[64], dtype=tf.float32),
trainable=True, name='biases')
bias = tf.reshape(tf.nn.bias_add(conv, biases), conv.get_shape())
conv1 = tf.nn.relu(bias, name=scope)
print_activations(conv1)
parameters += [kernel, biases]
# lrn1
# TODO(shlens, jiayq): Add a GPU version of local response normalization.
# pool1
pool1 = tf.nn.max_pool(conv1,
ksize=[1, 3, 3, 1],
strides=[1, 2, 2, 1],
padding='VALID',
name='pool1')
print_activations(pool1)
# conv2
with tf.name_scope('conv2') as scope:
kernel = tf.Variable(tf.truncated_normal([5, 5, 64, 192], dtype=tf.float32,
stddev=1e-1), name='weights')
conv = tf.nn.conv2d(pool1, kernel, [1, 1, 1, 1], padding='SAME')
biases = tf.Variable(tf.constant(0.0, shape=[192], dtype=tf.float32),
trainable=True, name='biases')
bias = tf.reshape(tf.nn.bias_add(conv, biases), conv.get_shape())
conv2 = tf.nn.relu(bias, name=scope)
parameters += [kernel, biases]
print_activations(conv2)
# pool2
pool2 = tf.nn.max_pool(conv2,
ksize=[1, 3, 3, 1],
strides=[1, 2, 2, 1],
padding='VALID',
name='pool2')
print_activations(pool2)
# conv3
with tf.name_scope('conv3') as scope:
kernel = tf.Variable(tf.truncated_normal([3, 3, 192, 384],
dtype=tf.float32,
stddev=1e-1), name='weights')
conv = tf.nn.conv2d(pool2, kernel, [1, 1, 1, 1], padding='SAME')
biases = tf.Variable(tf.constant(0.0, shape=[384], dtype=tf.float32),
trainable=True, name='biases')
bias = tf.reshape(tf.nn.bias_add(conv, biases), conv.get_shape())
conv3 = tf.nn.relu(bias, name=scope)
parameters += [kernel, biases]
print_activations(conv3)
# conv4
with tf.name_scope('conv4') as scope:
kernel = tf.Variable(tf.truncated_normal([3, 3, 384, 256],
dtype=tf.float32,
stddev=1e-1), name='weights')
conv = tf.nn.conv2d(conv3, kernel, [1, 1, 1, 1], padding='SAME')
biases = tf.Variable(tf.constant(0.0, shape=[256], dtype=tf.float32),
trainable=True, name='biases')
bias = tf.reshape(tf.nn.bias_add(conv, biases), conv.get_shape())
conv4 = tf.nn.relu(bias, name=scope)
parameters += [kernel, biases]
print_activations(conv4)
# conv5
with tf.name_scope('conv5') as scope:
kernel = tf.Variable(tf.truncated_normal([3, 3, 256, 256],
dtype=tf.float32,
stddev=1e-1), name='weights')
conv = tf.nn.conv2d(conv4, kernel, [1, 1, 1, 1], padding='SAME')
biases = tf.Variable(tf.constant(0.0, shape=[256], dtype=tf.float32),
trainable=True, name='biases')
bias = tf.reshape(tf.nn.bias_add(conv, biases), conv.get_shape())
conv5 = tf.nn.relu(bias, name=scope)
parameters += [kernel, biases]
print_activations(conv5)
# pool5
pool5 = tf.nn.max_pool(conv5,
ksize=[1, 3, 3, 1],
strides=[1, 2, 2, 1],
padding='VALID',
name='pool5')
print_activations(pool5)
return pool5, parameters
def time_tensorflow_run(session, target, info_string):
"""Run the computation to obtain the target tensor and print timing stats.
Args:
session: the TensorFlow session to run the computation under.
target: the targe Tensor that is passed to the session's run() function.
info_string: a string summarizing this run, to be printed with the stats.
Returns:
None
"""
num_steps_burn_in = 10
total_duration = 0.0
total_duration_squared = 0.0
for i in xrange(FLAGS.num_batches + num_steps_burn_in):
start_time = time.time()
_ = session.run(target)
duration = time.time() - start_time
if i > num_steps_burn_in:
if not i % 10:
print ('%s: step %d, duration = %.3f' %
(datetime.now(), i - num_steps_burn_in, duration))
total_duration += duration
total_duration_squared += duration * duration
mn = total_duration / FLAGS.num_batches
vr = total_duration_squared / FLAGS.num_batches - mn * mn
sd = math.sqrt(vr)
print ('%s: %s across %d steps, %.3f +/- %.3f sec / batch' %
(datetime.now(), info_string, FLAGS.num_batches, mn, sd))
def run_benchmark():
"""Run the benchmark on AlexNet."""
with tf.Graph().as_default():
# Generate some dummy images.
image_size = 224
# Note that our padding definition is slightly different the cuda-convnet.
# In order to force the model to start with the same activations sizes,
# we add 3 to the image_size and employ VALID padding above.
images = tf.Variable(tf.random_normal([FLAGS.batch_size,
image_size + 3,
image_size + 3, 3],
dtype=tf.float32,
stddev=1e-1))
# Build a Graph that computes the logits predictions from the
# inference model.
pool5, parameters = inference(images)
# Build an initialization operation.
init = tf.initialize_all_variables()
# Start running operations on the Graph.
config = tf.ConfigProto()
config.gpu_options.allocator_type = 'BFC'
sess = tf.Session(config=config)
sess.run(init)
# Run the forward benchmark.
time_tensorflow_run(sess, pool5, "Forward")
# Add a simple objective so we can calculate the backward pass.
objective = tf.nn.l2_loss(pool5)
# Compute the gradient with respect to all the parameters.
grad = tf.gradients(objective, parameters)
# Run the backward benchmark.
time_tensorflow_run(sess, grad, "Forward-backward")
def main(_):
run_benchmark()
if __name__ == '__main__':
tf.app.run() | unknown | codeparrot/codeparrot-clean | ||
# coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
class URPlayIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?ur(?:play|skola)\.se/(?:program|Produkter)/(?P<id>[0-9]+)'
_TESTS = [{
'url': 'http://urplay.se/program/190031-tripp-trapp-trad-sovkudde',
'md5': 'ad5f0de86f16ca4c8062cd103959a9eb',
'info_dict': {
'id': '190031',
'ext': 'mp4',
'title': 'Tripp, Trapp, Träd : Sovkudde',
'description': 'md5:b86bffdae04a7e9379d1d7e5947df1d1',
},
}, {
'url': 'http://urskola.se/Produkter/155794-Smasagor-meankieli-Grodan-i-vida-varlden',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
urplayer_data = self._parse_json(self._search_regex(
r'urPlayer\.init\(({.+?})\);', webpage, 'urplayer data'), video_id)
host = self._download_json('http://streaming-loadbalancer.ur.se/loadbalancer.json', video_id)['redirect']
formats = []
for quality_attr, quality, preference in (('', 'sd', 0), ('_hd', 'hd', 1)):
file_http = urplayer_data.get('file_http' + quality_attr) or urplayer_data.get('file_http_sub' + quality_attr)
if file_http:
formats.extend(self._extract_wowza_formats(
'http://%s/%splaylist.m3u8' % (host, file_http), video_id, skip_protocols=['rtmp', 'rtsp']))
self._sort_formats(formats)
subtitles = {}
for subtitle in urplayer_data.get('subtitles', []):
subtitle_url = subtitle.get('file')
kind = subtitle.get('kind')
if not subtitle_url or (kind and kind != 'captions'):
continue
subtitles.setdefault(subtitle.get('label', 'Svenska'), []).append({
'url': subtitle_url,
})
return {
'id': video_id,
'title': urplayer_data['title'],
'description': self._og_search_description(webpage),
'thumbnail': urplayer_data.get('image'),
'series': urplayer_data.get('series_title'),
'subtitles': subtitles,
'formats': formats,
} | unknown | codeparrot/codeparrot-clean | ||
# Copyright 2010 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Possible states for xen resource pools.
A pool may be 'created', in which case the admin has triggered its
creation, but the underlying hypervisor pool has not actually being set up
yet. A pool may be 'changing', meaning that the underlying hypervisor
pool is being setup. A pool may be 'active', in which case the underlying
hypervisor pool is up and running. A pool may be 'dismissed' when it has
no hosts and it has been deleted. A pool may be in 'error' in all other
cases.
A 'created' pool becomes 'changing' during the first request of
adding a host. During a 'changing' status no other requests will be accepted;
this is to allow the hypervisor layer to instantiate the underlying pool
without any potential race condition that may incur in master/slave-based
configurations. The pool goes into the 'active' state when the underlying
pool has been correctly instantiated.
All other operations (e.g. add/remove hosts) that succeed will keep the
pool in the 'active' state. If a number of continuous requests fail,
an 'active' pool goes into an 'error' state. To recover from such a state,
admin intervention is required. Currently an error state is irreversible,
that is, in order to recover from it a pool must be deleted.
"""
CREATED = 'created'
CHANGING = 'changing'
ACTIVE = 'active'
ERROR = 'error'
DISMISSED = 'dismissed'
# Metadata keys
KEY = 'operational_state'
POOL_FLAG = 'hypervisor_pool'
def is_hv_pool(metadata):
"""Checks if aggregate is a hypervisor_pool."""
return POOL_FLAG in metadata.keys() | unknown | codeparrot/codeparrot-clean | ||
"""
Disk management utilities.
"""
# Authors: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
# Lars Buitinck <L.J.Buitinck@uva.nl>
# Copyright (c) 2010 Gael Varoquaux
# License: BSD Style, 3 clauses.
import errno
import os
import shutil
import sys
import time
def disk_used(path):
""" Return the disk usage in a directory."""
size = 0
for file in os.listdir(path) + ['.']:
stat = os.stat(os.path.join(path, file))
if hasattr(stat, 'st_blocks'):
size += stat.st_blocks * 512
else:
# on some platform st_blocks is not available (e.g., Windows)
# approximate by rounding to next multiple of 512
size += (stat.st_size // 512 + 1) * 512
# We need to convert to int to avoid having longs on some systems (we
# don't want longs to avoid problems we SQLite)
return int(size / 1024.)
def memstr_to_kbytes(text):
""" Convert a memory text to it's value in kilobytes.
"""
kilo = 1024
units = dict(K=1, M=kilo, G=kilo ** 2)
try:
size = int(units[text[-1]] * float(text[:-1]))
except (KeyError, ValueError):
raise ValueError(
"Invalid literal for size give: %s (type %s) should be "
"alike '10G', '500M', '50K'." % (text, type(text))
)
return size
def mkdirp(d):
"""Ensure directory d exists (like mkdir -p on Unix)
No guarantee that the directory is writable.
"""
try:
os.makedirs(d)
except OSError, e:
if e.errno != errno.EEXIST:
raise
# if a rmtree operation fails in rm_subdirs, wait for this much time (in secs),
# then retry once. if it still fails, raise the exception
RM_SUBDIRS_RETRY_TIME = 0.1
def rm_subdirs(path, onerror=None):
"""Remove all subdirectories in this path.
The directory indicated by `path` is left in place, and its subdirectories
are erased.
If onerror is set, it is called to handle the error with arguments (func,
path, exc_info) where func is os.listdir, os.remove, or os.rmdir;
path is the argument to that function that caused it to fail; and
exc_info is a tuple returned by sys.exc_info(). If onerror is None,
an exception is raised.
"""
# NOTE this code is adapted from the one in shutil.rmtree, and is
# just as fast
names = []
try:
names = os.listdir(path)
except os.error, err:
if onerror is not None:
onerror(os.listdir, path, sys.exc_info())
else:
raise
for name in names:
fullname = os.path.join(path, name)
if os.path.isdir(fullname):
if onerror is not None:
shutil.rmtree(fullname, False, onerror)
else:
# allow the rmtree to fail once, wait and re-try.
# if the error is raised again, fail
err_count = 0
while True:
try:
shutil.rmtree(fullname, False, None)
break
except os.error, err:
if err_count > 0:
raise
err_count += 1
time.sleep(RM_SUBDIRS_RETRY_TIME) | unknown | codeparrot/codeparrot-clean | ||
# META: timeout=long
import pytest
from tests.support.asserts import assert_dialog_handled, assert_error, assert_success
def close(session):
return session.transport.send(
"DELETE", "session/{session_id}/window".format(**vars(session)))
@pytest.fixture
def check_user_prompt_closed_without_exception(session, create_dialog, create_window):
def check_user_prompt_closed_without_exception(dialog_type, retval):
original_handle = session.window_handle
new_handle = create_window()
session.window_handle = new_handle
create_dialog(dialog_type, text=dialog_type)
response = close(session)
assert_success(response)
# Asserting that the dialog was handled requires valid top-level browsing
# context, so we must switch to the original window.
session.window_handle = original_handle
assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
assert new_handle not in session.handles
return check_user_prompt_closed_without_exception
@pytest.fixture
def check_user_prompt_closed_with_exception(session, create_dialog, create_window):
def check_user_prompt_closed_with_exception(dialog_type, retval):
new_handle = create_window()
session.window_handle = new_handle
create_dialog(dialog_type, text=dialog_type)
response = close(session)
assert_error(response, "unexpected alert open")
assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
assert new_handle in session.handles
return check_user_prompt_closed_with_exception
@pytest.fixture
def check_user_prompt_not_closed_but_exception(session, create_dialog, create_window):
def check_user_prompt_not_closed_but_exception(dialog_type):
new_handle = create_window()
session.window_handle = new_handle
create_dialog(dialog_type, text=dialog_type)
response = close(session)
assert_error(response, "unexpected alert open")
assert session.alert.text == dialog_type
session.alert.dismiss()
assert new_handle in session.handles
return check_user_prompt_not_closed_but_exception
@pytest.mark.capabilities({"unhandledPromptBehavior": "accept"})
@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
def test_accept(check_user_prompt_closed_without_exception, dialog_type):
# retval not testable for confirm and prompt because window is gone
check_user_prompt_closed_without_exception(dialog_type, None)
@pytest.mark.capabilities({"unhandledPromptBehavior": "accept and notify"})
@pytest.mark.parametrize("dialog_type, retval", [
("alert", None),
("confirm", True),
("prompt", ""),
])
def test_accept_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
check_user_prompt_closed_with_exception(dialog_type, retval)
@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss"})
@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
def test_dismiss(check_user_prompt_closed_without_exception, dialog_type):
# retval not testable for confirm and prompt because window is gone
check_user_prompt_closed_without_exception(dialog_type, None)
@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss and notify"})
@pytest.mark.parametrize("dialog_type, retval", [
("alert", None),
("confirm", False),
("prompt", None),
])
def test_dismiss_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
check_user_prompt_closed_with_exception(dialog_type, retval)
@pytest.mark.capabilities({"unhandledPromptBehavior": "ignore"})
@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
def test_ignore(check_user_prompt_not_closed_but_exception, dialog_type):
check_user_prompt_not_closed_but_exception(dialog_type)
@pytest.mark.parametrize("dialog_type, retval", [
("alert", None),
("confirm", False),
("prompt", None),
])
def test_default(check_user_prompt_closed_with_exception, dialog_type, retval):
check_user_prompt_closed_with_exception(dialog_type, retval) | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright, (c) 2018, Ansible Project
# Copyright, (c) 2018, Abhijeet Kasurde <akasurde@redhat.com>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: vmware_guest_custom_attributes
short_description: Manage custom attributes from VMWare for the given virtual machine
description:
- This module can be used to add, remove and update custom attributes for the given virtual machine.
version_added: 2.7
author:
- Jimmy Conner (@cigamit)
- Abhijeet Kasurde (@Akasurde)
notes:
- Tested on vSphere 6.5
requirements:
- "python >= 2.6"
- PyVmomi
options:
name:
description:
- Name of the virtual machine to work with.
required: True
state:
description:
- The action to take.
- If set to C(present), then custom attribute is added or updated.
- If set to C(absent), then custom attribute is removed.
default: 'present'
choices: ['present', 'absent']
uuid:
description:
- UUID of the virtual machine to manage if known. This is VMware's unique identifier.
- This is required parameter, if C(name) is not supplied.
folder:
description:
- Absolute path to find an existing guest.
- This is required parameter, if C(name) is supplied and multiple virtual machines with same name are found.
datacenter:
description:
- Datacenter name where the virtual machine is located in.
required: True
attributes:
description:
- A list of name and value of custom attributes that needs to be manage.
- Value of custom attribute is not required and will be ignored, if C(state) is set to C(absent).
default: []
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = '''
- name: Add virtual machine custom attributes
vmware_guest_custom_attributes:
hostname: "{{ vcenter_hostname }}"
username: "{{ vcenter_username }}"
password: "{{ vcenter_password }}"
uuid: 421e4592-c069-924d-ce20-7e7533fab926
state: present
attributes:
- name: MyAttribute
value: MyValue
delegate_to: localhost
register: attributes
- name: Add multiple virtual machine custom attributes
vmware_guest_custom_attributes:
hostname: "{{ vcenter_hostname }}"
username: "{{ vcenter_username }}"
password: "{{ vcenter_password }}"
uuid: 421e4592-c069-924d-ce20-7e7533fab926
state: present
attributes:
- name: MyAttribute
value: MyValue
- name: MyAttribute2
value: MyValue2
delegate_to: localhost
register: attributes
- name: Remove virtual machine Attribute
vmware_guest_custom_attributes:
hostname: "{{ vcenter_hostname }}"
username: "{{ vcenter_username }}"
password: "{{ vcenter_password }}"
uuid: 421e4592-c069-924d-ce20-7e7533fab926
state: absent
attributes:
- name: MyAttribute
delegate_to: localhost
register: attributes
'''
RETURN = """
custom_attributes:
description: metadata about the virtual machine attributes
returned: always
type: dict
sample: {
"mycustom": "my_custom_value",
"mycustom_2": "my_custom_value_2",
"sample_1": "sample_1_value",
"sample_2": "sample_2_value",
"sample_3": "sample_3_value"
}
"""
try:
from pyVmomi import vim
except ImportError:
pass
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.vmware import PyVmomi, vmware_argument_spec
class VmAttributeManager(PyVmomi):
def __init__(self, module):
super(VmAttributeManager, self).__init__(module)
self.custom_field_mgr = self.content.customFieldsManager.field
def set_custom_field(self, vm, user_fields):
result_fields = dict()
change_list = list()
changed = False
for field in user_fields:
field_key = self.check_exists(field['name'])
found = False
field_value = field.get('value', '')
for k, v in [(x.name, v.value) for x in self.custom_field_mgr for v in vm.customValue if x.key == v.key]:
if k == field['name']:
found = True
if v != field_value:
if not self.module.check_mode:
self.content.customFieldsManager.SetField(entity=vm, key=field_key.key, value=field_value)
result_fields[k] = field_value
change_list.append(True)
if not found and field_value != "":
if not field_key and not self.module.check_mode:
field_key = self.content.customFieldsManager.AddFieldDefinition(name=field['name'], moType=vim.VirtualMachine)
change_list.append(True)
if not self.module.check_mode:
self.content.customFieldsManager.SetField(entity=vm, key=field_key.key, value=field_value)
result_fields[field['name']] = field_value
if any(change_list):
changed = True
return {'changed': changed, 'failed': False, 'custom_attributes': result_fields}
def check_exists(self, field):
for x in self.custom_field_mgr:
if x.name == field:
return x
return False
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(
datacenter=dict(type='str'),
name=dict(required=True, type='str'),
folder=dict(type='str'),
uuid=dict(type='str'),
state=dict(type='str', default='present',
choices=['absent', 'present']),
attributes=dict(
type='list',
default=[],
options=dict(
name=dict(type='str', required=True),
value=dict(type='str'),
)
),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_one_of=[['name', 'uuid']],
)
if module.params.get('folder'):
# FindByInventoryPath() does not require an absolute path
# so we should leave the input folder path unmodified
module.params['folder'] = module.params['folder'].rstrip('/')
pyv = VmAttributeManager(module)
results = {'changed': False, 'failed': False, 'instance': dict()}
# Check if the virtual machine exists before continuing
vm = pyv.get_vm()
if vm:
# virtual machine already exists
if module.params['state'] == "present":
results = pyv.set_custom_field(vm, module.params['attributes'])
elif module.params['state'] == "absent":
results = pyv.set_custom_field(vm, module.params['attributes'])
module.exit_json(**results)
else:
# virtual machine does not exists
module.fail_json(msg="Unable to manage custom attributes for non-existing"
" virtual machine %s" % (module.params.get('name') or module.params.get('uuid')))
if __name__ == '__main__':
main() | unknown | codeparrot/codeparrot-clean | ||
#**************************************************************************
#* Copyright(c) 1998-2014, ALICE Experiment at CERN, All rights reserved. *
#* *
#* Author: The ALICE Off-line Project. *
#* Contributors are mentioned in the code where appropriate. *
#* *
#* Permission to use, copy, modify and distribute this software and its *
#* documentation strictly for non-commercial purposes is hereby granted *
#* without fee, provided that the above copyright notice appears in all *
#* copies and that both the copyright notice and this permission notice *
#* appear in the supporting documentation. The authors make no claims *
#* about the suitability of this software for any purpose. It is *
#* provided "as is" without express or implied warranty. *
#**************************************************************************
from ROOT import TFile, TH1D
from ROOT import gDirectory, gROOT
triggerlookup = {"MinBias":0, "EMCJHigh":1, "EMCJLow":2, "EMCGHigh":3, "EMCGLow":4}
def GetTriggerScalers(filename):
inputfile = TFile.Open(filename)
inputfile.cd("PtEMCalTriggerTask")
gDirectory.ls()
tasklist = gDirectory.Get("results")
histlist = tasklist.FindObject("histosPtEMCalTriggerHistograms")
gROOT.cd()
triggerhist = histlist.FindObject("hEventTriggers")
inputfile.Close()
# Get number of Min. Bias counts
mbcounts = GetCounts(triggerhist, "MinBias")
print "MinBias counts: %d" %(mbcounts)
triggerhist.GetAxis(0).SetRange(2,2)
triggercounts = {}
for trigger in triggerlookup.keys():
if trigger == "MinBias":
continue
triggercounts[trigger] = GetCounts(triggerhist, trigger)
print "Number of events for trigger %s: %d" %(trigger, triggercounts[trigger])
hScalers = TH1D("triggerScalers", "trigger scalers", len(triggercounts), -0.5, len(triggercounts) - 0.5)
counter = 1
for trigger in triggercounts.keys():
scaler = float(mbcounts)/float(triggercounts[trigger])
print "Scaler for trigger %s: %f" %(trigger, scaler)
hScalers.GetXaxis().SetBinLabel(counter, trigger)
hScalers.SetBinContent(counter, scaler)
counter += 1
outputfile = TFile("TriggerScalers.root", "RECREATE")
outputfile.cd()
hScalers.Write()
outputfile.Close()
def GetCounts(triggerhist, triggerclass):
projection = triggerhist.Projection(triggerlookup[triggerclass])
return projection.GetBinContent(2) | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python
# coding=UTF-8
# Author: Dennis Lutter <lad1337@gmail.com>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Fountion, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
if __name__ == "__main__":
import glob
import unittest
test_file_strings = [ x for x in glob.glob('*_tests.py') if not x in __file__]
module_strings = [file_string[0:len(file_string) - 3] for file_string in test_file_strings]
suites = [unittest.defaultTestLoader.loadTestsFromName(file_string) for file_string in module_strings]
testSuite = unittest.TestSuite(suites)
print "=================="
print "STARTING - ALL TESTS"
print "=================="
print "this will include"
for includedfiles in test_file_strings:
print "- " + includedfiles
text_runner = unittest.TextTestRunner().run(testSuite) | unknown | codeparrot/codeparrot-clean | ||
"""
Testing the `next_token_action` decorator on a toy class.
"""
import mws
import unittest
# pylint: disable=invalid-name
ACTION = "SomeAction"
class ToyClass(object):
"""
Example class using a method designed to be run with a `next_token`,
calling the corresponding `action_by_next_token` method
"""
def __init__(self):
self.method_run = None
def action_by_next_token(self, action, token):
"""
Toy next-action method, simply returns the action and token together.
The decorator should call THIS method automatically if a next_token kwarg
is present in the target call.
"""
self.method_run = 'action_by_next_token'
# Modify the action similar to how live code does it,
# for the sake of our sanity here.
modified_action = "{}ByNextToken".format(action)
return modified_action, token
@mws.decorators.next_token_action(ACTION)
def target_request_method(self, next_token=None):
"""
Toy request method, used as the target for our test.
"""
self.method_run = 'target_function'
return ACTION, next_token
class NextTokenTestCase(unittest.TestCase):
"""
Cases that cover the use of the next_token_action decorator.
"""
def test_request_run_normal(self):
"""
Call the target request method with no next_token, and we should
see that method run normally.
"""
instance = ToyClass()
action, token = instance.target_request_method()
self.assertEqual(action, ACTION)
self.assertIs(token, None)
self.assertEqual(instance.method_run, 'target_function')
def test_request_run_with_next_token(self):
"""
Call the target request method with no next_token, and we should
see that method run normally.
"""
instance = ToyClass()
next_token = "Olly Olly Oxen Free!"
action, token = instance.target_request_method(next_token=next_token)
what_action_should_be = "{}ByNextToken".format(ACTION)
self.assertEqual(action, what_action_should_be)
self.assertEqual(token, next_token)
self.assertEqual(instance.method_run, 'action_by_next_token') | unknown | codeparrot/codeparrot-clean | ||
// Formatting library for C++ - implementation
//
// Copyright (c) 2012 - 2016, Victor Zverovich
// All rights reserved.
//
// For the license information refer to format.h.
#ifndef FMT_FORMAT_INL_H_
#define FMT_FORMAT_INL_H_
#ifndef FMT_MODULE
# include <algorithm>
# include <cerrno> // errno
# include <climits>
# include <cmath>
# include <exception>
#endif
#if defined(_WIN32) && !defined(FMT_USE_WRITE_CONSOLE)
# include <io.h> // _isatty
#endif
#include "format.h"
#if FMT_USE_LOCALE
# include <locale>
#endif
#ifndef FMT_FUNC
# define FMT_FUNC
#endif
FMT_BEGIN_NAMESPACE
namespace detail {
FMT_FUNC void assert_fail(const char* file, int line, const char* message) {
// Use unchecked std::fprintf to avoid triggering another assertion when
// writing to stderr fails.
fprintf(stderr, "%s:%d: assertion failed: %s", file, line, message);
abort();
}
FMT_FUNC void format_error_code(detail::buffer<char>& out, int error_code,
string_view message) noexcept {
// Report error code making sure that the output fits into
// inline_buffer_size to avoid dynamic memory allocation and potential
// bad_alloc.
out.try_resize(0);
static const char SEP[] = ": ";
static const char ERROR_STR[] = "error ";
// Subtract 2 to account for terminating null characters in SEP and ERROR_STR.
size_t error_code_size = sizeof(SEP) + sizeof(ERROR_STR) - 2;
auto abs_value = static_cast<uint32_or_64_or_128_t<int>>(error_code);
if (detail::is_negative(error_code)) {
abs_value = 0 - abs_value;
++error_code_size;
}
error_code_size += detail::to_unsigned(detail::count_digits(abs_value));
auto it = appender(out);
if (message.size() <= inline_buffer_size - error_code_size)
fmt::format_to(it, FMT_STRING("{}{}"), message, SEP);
fmt::format_to(it, FMT_STRING("{}{}"), ERROR_STR, error_code);
FMT_ASSERT(out.size() <= inline_buffer_size, "");
}
FMT_FUNC void do_report_error(format_func func, int error_code,
const char* message) noexcept {
memory_buffer full_message;
func(full_message, error_code, message);
// Don't use fwrite_all because the latter may throw.
if (std::fwrite(full_message.data(), full_message.size(), 1, stderr) > 0)
std::fputc('\n', stderr);
}
// A wrapper around fwrite that throws on error.
inline void fwrite_all(const void* ptr, size_t count, FILE* stream) {
size_t written = std::fwrite(ptr, 1, count, stream);
if (written < count)
FMT_THROW(system_error(errno, FMT_STRING("cannot write to file")));
}
#if FMT_USE_LOCALE
using std::locale;
using std::numpunct;
using std::use_facet;
template <typename Locale>
locale_ref::locale_ref(const Locale& loc) : locale_(&loc) {
static_assert(std::is_same<Locale, locale>::value, "");
}
#else
struct locale {};
template <typename Char> struct numpunct {
auto grouping() const -> std::string { return "\03"; }
auto thousands_sep() const -> Char { return ','; }
auto decimal_point() const -> Char { return '.'; }
};
template <typename Facet> Facet use_facet(locale) { return {}; }
#endif // FMT_USE_LOCALE
template <typename Locale> auto locale_ref::get() const -> Locale {
static_assert(std::is_same<Locale, locale>::value, "");
#if FMT_USE_LOCALE
if (locale_) return *static_cast<const locale*>(locale_);
#endif
return locale();
}
template <typename Char>
FMT_FUNC auto thousands_sep_impl(locale_ref loc) -> thousands_sep_result<Char> {
auto&& facet = use_facet<numpunct<Char>>(loc.get<locale>());
auto grouping = facet.grouping();
auto thousands_sep = grouping.empty() ? Char() : facet.thousands_sep();
return {std::move(grouping), thousands_sep};
}
template <typename Char>
FMT_FUNC auto decimal_point_impl(locale_ref loc) -> Char {
return use_facet<numpunct<Char>>(loc.get<locale>()).decimal_point();
}
#if FMT_USE_LOCALE
FMT_FUNC auto write_loc(appender out, loc_value value,
const format_specs& specs, locale_ref loc) -> bool {
auto locale = loc.get<std::locale>();
// We cannot use the num_put<char> facet because it may produce output in
// a wrong encoding.
using facet = format_facet<std::locale>;
if (std::has_facet<facet>(locale))
return use_facet<facet>(locale).put(out, value, specs);
return facet(locale).put(out, value, specs);
}
#endif
} // namespace detail
FMT_FUNC void report_error(const char* message) {
#if FMT_USE_EXCEPTIONS
// Use FMT_THROW instead of throw to avoid bogus unreachable code warnings
// from MSVC.
FMT_THROW(format_error(message));
#else
fputs(message, stderr);
abort();
#endif
}
template <typename Locale> typename Locale::id format_facet<Locale>::id;
template <typename Locale> format_facet<Locale>::format_facet(Locale& loc) {
auto& np = detail::use_facet<detail::numpunct<char>>(loc);
grouping_ = np.grouping();
if (!grouping_.empty()) separator_ = std::string(1, np.thousands_sep());
}
#if FMT_USE_LOCALE
template <>
FMT_API FMT_FUNC auto format_facet<std::locale>::do_put(
appender out, loc_value val, const format_specs& specs) const -> bool {
return val.visit(
detail::loc_writer<>{out, specs, separator_, grouping_, decimal_point_});
}
#endif
FMT_FUNC auto vsystem_error(int error_code, string_view fmt, format_args args)
-> std::system_error {
auto ec = std::error_code(error_code, std::generic_category());
return std::system_error(ec, vformat(fmt, args));
}
namespace detail {
template <typename F>
inline auto operator==(basic_fp<F> x, basic_fp<F> y) -> bool {
return x.f == y.f && x.e == y.e;
}
// Compilers should be able to optimize this into the ror instruction.
FMT_CONSTEXPR inline auto rotr(uint32_t n, uint32_t r) noexcept -> uint32_t {
r &= 31;
return (n >> r) | (n << (32 - r));
}
FMT_CONSTEXPR inline auto rotr(uint64_t n, uint32_t r) noexcept -> uint64_t {
r &= 63;
return (n >> r) | (n << (64 - r));
}
// Implementation of Dragonbox algorithm: https://github.com/jk-jeon/dragonbox.
namespace dragonbox {
// Computes upper 64 bits of multiplication of a 32-bit unsigned integer and a
// 64-bit unsigned integer.
inline auto umul96_upper64(uint32_t x, uint64_t y) noexcept -> uint64_t {
return umul128_upper64(static_cast<uint64_t>(x) << 32, y);
}
// Computes lower 128 bits of multiplication of a 64-bit unsigned integer and a
// 128-bit unsigned integer.
inline auto umul192_lower128(uint64_t x, uint128_fallback y) noexcept
-> uint128_fallback {
uint64_t high = x * y.high();
uint128_fallback high_low = umul128(x, y.low());
return {high + high_low.high(), high_low.low()};
}
// Computes lower 64 bits of multiplication of a 32-bit unsigned integer and a
// 64-bit unsigned integer.
inline auto umul96_lower64(uint32_t x, uint64_t y) noexcept -> uint64_t {
return x * y;
}
// Various fast log computations.
inline auto floor_log10_pow2_minus_log10_4_over_3(int e) noexcept -> int {
FMT_ASSERT(e <= 2936 && e >= -2985, "too large exponent");
return (e * 631305 - 261663) >> 21;
}
FMT_INLINE_VARIABLE constexpr struct div_small_pow10_infos_struct {
uint32_t divisor;
int shift_amount;
} div_small_pow10_infos[] = {{10, 16}, {100, 16}};
// Replaces n by floor(n / pow(10, N)) returning true if and only if n is
// divisible by pow(10, N).
// Precondition: n <= pow(10, N + 1).
template <int N>
auto check_divisibility_and_divide_by_pow10(uint32_t& n) noexcept -> bool {
// The numbers below are chosen such that:
// 1. floor(n/d) = floor(nm / 2^k) where d=10 or d=100,
// 2. nm mod 2^k < m if and only if n is divisible by d,
// where m is magic_number, k is shift_amount
// and d is divisor.
//
// Item 1 is a common technique of replacing division by a constant with
// multiplication, see e.g. "Division by Invariant Integers Using
// Multiplication" by Granlund and Montgomery (1994). magic_number (m) is set
// to ceil(2^k/d) for large enough k.
// The idea for item 2 originates from Schubfach.
constexpr auto info = div_small_pow10_infos[N - 1];
FMT_ASSERT(n <= info.divisor * 10, "n is too large");
constexpr uint32_t magic_number =
(1u << info.shift_amount) / info.divisor + 1;
n *= magic_number;
const uint32_t comparison_mask = (1u << info.shift_amount) - 1;
bool result = (n & comparison_mask) < magic_number;
n >>= info.shift_amount;
return result;
}
// Computes floor(n / pow(10, N)) for small n and N.
// Precondition: n <= pow(10, N + 1).
template <int N> auto small_division_by_pow10(uint32_t n) noexcept -> uint32_t {
constexpr auto info = div_small_pow10_infos[N - 1];
FMT_ASSERT(n <= info.divisor * 10, "n is too large");
constexpr uint32_t magic_number =
(1u << info.shift_amount) / info.divisor + 1;
return (n * magic_number) >> info.shift_amount;
}
// Computes floor(n / 10^(kappa + 1)) (float)
inline auto divide_by_10_to_kappa_plus_1(uint32_t n) noexcept -> uint32_t {
// 1374389535 = ceil(2^37/100)
return static_cast<uint32_t>((static_cast<uint64_t>(n) * 1374389535) >> 37);
}
// Computes floor(n / 10^(kappa + 1)) (double)
inline auto divide_by_10_to_kappa_plus_1(uint64_t n) noexcept -> uint64_t {
// 2361183241434822607 = ceil(2^(64+7)/1000)
return umul128_upper64(n, 2361183241434822607ull) >> 7;
}
// Various subroutines using pow10 cache
template <typename T> struct cache_accessor;
template <> struct cache_accessor<float> {
using carrier_uint = float_info<float>::carrier_uint;
using cache_entry_type = uint64_t;
static auto get_cached_power(int k) noexcept -> uint64_t {
FMT_ASSERT(k >= float_info<float>::min_k && k <= float_info<float>::max_k,
"k is out of range");
static constexpr const uint64_t pow10_significands[] = {
0x81ceb32c4b43fcf5, 0xa2425ff75e14fc32, 0xcad2f7f5359a3b3f,
0xfd87b5f28300ca0e, 0x9e74d1b791e07e49, 0xc612062576589ddb,
0xf79687aed3eec552, 0x9abe14cd44753b53, 0xc16d9a0095928a28,
0xf1c90080baf72cb2, 0x971da05074da7bef, 0xbce5086492111aeb,
0xec1e4a7db69561a6, 0x9392ee8e921d5d08, 0xb877aa3236a4b44a,
0xe69594bec44de15c, 0x901d7cf73ab0acda, 0xb424dc35095cd810,
0xe12e13424bb40e14, 0x8cbccc096f5088cc, 0xafebff0bcb24aaff,
0xdbe6fecebdedd5bf, 0x89705f4136b4a598, 0xabcc77118461cefd,
0xd6bf94d5e57a42bd, 0x8637bd05af6c69b6, 0xa7c5ac471b478424,
0xd1b71758e219652c, 0x83126e978d4fdf3c, 0xa3d70a3d70a3d70b,
0xcccccccccccccccd, 0x8000000000000000, 0xa000000000000000,
0xc800000000000000, 0xfa00000000000000, 0x9c40000000000000,
0xc350000000000000, 0xf424000000000000, 0x9896800000000000,
0xbebc200000000000, 0xee6b280000000000, 0x9502f90000000000,
0xba43b74000000000, 0xe8d4a51000000000, 0x9184e72a00000000,
0xb5e620f480000000, 0xe35fa931a0000000, 0x8e1bc9bf04000000,
0xb1a2bc2ec5000000, 0xde0b6b3a76400000, 0x8ac7230489e80000,
0xad78ebc5ac620000, 0xd8d726b7177a8000, 0x878678326eac9000,
0xa968163f0a57b400, 0xd3c21bcecceda100, 0x84595161401484a0,
0xa56fa5b99019a5c8, 0xcecb8f27f4200f3a, 0x813f3978f8940985,
0xa18f07d736b90be6, 0xc9f2c9cd04674edf, 0xfc6f7c4045812297,
0x9dc5ada82b70b59e, 0xc5371912364ce306, 0xf684df56c3e01bc7,
0x9a130b963a6c115d, 0xc097ce7bc90715b4, 0xf0bdc21abb48db21,
0x96769950b50d88f5, 0xbc143fa4e250eb32, 0xeb194f8e1ae525fe,
0x92efd1b8d0cf37bf, 0xb7abc627050305ae, 0xe596b7b0c643c71a,
0x8f7e32ce7bea5c70, 0xb35dbf821ae4f38c, 0xe0352f62a19e306f};
return pow10_significands[k - float_info<float>::min_k];
}
struct compute_mul_result {
carrier_uint result;
bool is_integer;
};
struct compute_mul_parity_result {
bool parity;
bool is_integer;
};
static auto compute_mul(carrier_uint u,
const cache_entry_type& cache) noexcept
-> compute_mul_result {
auto r = umul96_upper64(u, cache);
return {static_cast<carrier_uint>(r >> 32),
static_cast<carrier_uint>(r) == 0};
}
static auto compute_delta(const cache_entry_type& cache, int beta) noexcept
-> uint32_t {
return static_cast<uint32_t>(cache >> (64 - 1 - beta));
}
static auto compute_mul_parity(carrier_uint two_f,
const cache_entry_type& cache,
int beta) noexcept
-> compute_mul_parity_result {
FMT_ASSERT(beta >= 1, "");
FMT_ASSERT(beta < 64, "");
auto r = umul96_lower64(two_f, cache);
return {((r >> (64 - beta)) & 1) != 0,
static_cast<uint32_t>(r >> (32 - beta)) == 0};
}
static auto compute_left_endpoint_for_shorter_interval_case(
const cache_entry_type& cache, int beta) noexcept -> carrier_uint {
return static_cast<carrier_uint>(
(cache - (cache >> (num_significand_bits<float>() + 2))) >>
(64 - num_significand_bits<float>() - 1 - beta));
}
static auto compute_right_endpoint_for_shorter_interval_case(
const cache_entry_type& cache, int beta) noexcept -> carrier_uint {
return static_cast<carrier_uint>(
(cache + (cache >> (num_significand_bits<float>() + 1))) >>
(64 - num_significand_bits<float>() - 1 - beta));
}
static auto compute_round_up_for_shorter_interval_case(
const cache_entry_type& cache, int beta) noexcept -> carrier_uint {
return (static_cast<carrier_uint>(
cache >> (64 - num_significand_bits<float>() - 2 - beta)) +
1) /
2;
}
};
template <> struct cache_accessor<double> {
using carrier_uint = float_info<double>::carrier_uint;
using cache_entry_type = uint128_fallback;
static auto get_cached_power(int k) noexcept -> uint128_fallback {
FMT_ASSERT(k >= float_info<double>::min_k && k <= float_info<double>::max_k,
"k is out of range");
static constexpr const uint128_fallback pow10_significands[] = {
#if FMT_USE_FULL_CACHE_DRAGONBOX
{0xff77b1fcbebcdc4f, 0x25e8e89c13bb0f7b},
{0x9faacf3df73609b1, 0x77b191618c54e9ad},
{0xc795830d75038c1d, 0xd59df5b9ef6a2418},
{0xf97ae3d0d2446f25, 0x4b0573286b44ad1e},
{0x9becce62836ac577, 0x4ee367f9430aec33},
{0xc2e801fb244576d5, 0x229c41f793cda740},
{0xf3a20279ed56d48a, 0x6b43527578c11110},
{0x9845418c345644d6, 0x830a13896b78aaaa},
{0xbe5691ef416bd60c, 0x23cc986bc656d554},
{0xedec366b11c6cb8f, 0x2cbfbe86b7ec8aa9},
{0x94b3a202eb1c3f39, 0x7bf7d71432f3d6aa},
{0xb9e08a83a5e34f07, 0xdaf5ccd93fb0cc54},
{0xe858ad248f5c22c9, 0xd1b3400f8f9cff69},
{0x91376c36d99995be, 0x23100809b9c21fa2},
{0xb58547448ffffb2d, 0xabd40a0c2832a78b},
{0xe2e69915b3fff9f9, 0x16c90c8f323f516d},
{0x8dd01fad907ffc3b, 0xae3da7d97f6792e4},
{0xb1442798f49ffb4a, 0x99cd11cfdf41779d},
{0xdd95317f31c7fa1d, 0x40405643d711d584},
{0x8a7d3eef7f1cfc52, 0x482835ea666b2573},
{0xad1c8eab5ee43b66, 0xda3243650005eed0},
{0xd863b256369d4a40, 0x90bed43e40076a83},
{0x873e4f75e2224e68, 0x5a7744a6e804a292},
{0xa90de3535aaae202, 0x711515d0a205cb37},
{0xd3515c2831559a83, 0x0d5a5b44ca873e04},
{0x8412d9991ed58091, 0xe858790afe9486c3},
{0xa5178fff668ae0b6, 0x626e974dbe39a873},
{0xce5d73ff402d98e3, 0xfb0a3d212dc81290},
{0x80fa687f881c7f8e, 0x7ce66634bc9d0b9a},
{0xa139029f6a239f72, 0x1c1fffc1ebc44e81},
{0xc987434744ac874e, 0xa327ffb266b56221},
{0xfbe9141915d7a922, 0x4bf1ff9f0062baa9},
{0x9d71ac8fada6c9b5, 0x6f773fc3603db4aa},
{0xc4ce17b399107c22, 0xcb550fb4384d21d4},
{0xf6019da07f549b2b, 0x7e2a53a146606a49},
{0x99c102844f94e0fb, 0x2eda7444cbfc426e},
{0xc0314325637a1939, 0xfa911155fefb5309},
{0xf03d93eebc589f88, 0x793555ab7eba27cb},
{0x96267c7535b763b5, 0x4bc1558b2f3458df},
{0xbbb01b9283253ca2, 0x9eb1aaedfb016f17},
{0xea9c227723ee8bcb, 0x465e15a979c1cadd},
{0x92a1958a7675175f, 0x0bfacd89ec191eca},
{0xb749faed14125d36, 0xcef980ec671f667c},
{0xe51c79a85916f484, 0x82b7e12780e7401b},
{0x8f31cc0937ae58d2, 0xd1b2ecb8b0908811},
{0xb2fe3f0b8599ef07, 0x861fa7e6dcb4aa16},
{0xdfbdcece67006ac9, 0x67a791e093e1d49b},
{0x8bd6a141006042bd, 0xe0c8bb2c5c6d24e1},
{0xaecc49914078536d, 0x58fae9f773886e19},
{0xda7f5bf590966848, 0xaf39a475506a899f},
{0x888f99797a5e012d, 0x6d8406c952429604},
{0xaab37fd7d8f58178, 0xc8e5087ba6d33b84},
{0xd5605fcdcf32e1d6, 0xfb1e4a9a90880a65},
{0x855c3be0a17fcd26, 0x5cf2eea09a550680},
{0xa6b34ad8c9dfc06f, 0xf42faa48c0ea481f},
{0xd0601d8efc57b08b, 0xf13b94daf124da27},
{0x823c12795db6ce57, 0x76c53d08d6b70859},
{0xa2cb1717b52481ed, 0x54768c4b0c64ca6f},
{0xcb7ddcdda26da268, 0xa9942f5dcf7dfd0a},
{0xfe5d54150b090b02, 0xd3f93b35435d7c4d},
{0x9efa548d26e5a6e1, 0xc47bc5014a1a6db0},
{0xc6b8e9b0709f109a, 0x359ab6419ca1091c},
{0xf867241c8cc6d4c0, 0xc30163d203c94b63},
{0x9b407691d7fc44f8, 0x79e0de63425dcf1e},
{0xc21094364dfb5636, 0x985915fc12f542e5},
{0xf294b943e17a2bc4, 0x3e6f5b7b17b2939e},
{0x979cf3ca6cec5b5a, 0xa705992ceecf9c43},
{0xbd8430bd08277231, 0x50c6ff782a838354},
{0xece53cec4a314ebd, 0xa4f8bf5635246429},
{0x940f4613ae5ed136, 0x871b7795e136be9a},
{0xb913179899f68584, 0x28e2557b59846e40},
{0xe757dd7ec07426e5, 0x331aeada2fe589d0},
{0x9096ea6f3848984f, 0x3ff0d2c85def7622},
{0xb4bca50b065abe63, 0x0fed077a756b53aa},
{0xe1ebce4dc7f16dfb, 0xd3e8495912c62895},
{0x8d3360f09cf6e4bd, 0x64712dd7abbbd95d},
{0xb080392cc4349dec, 0xbd8d794d96aacfb4},
{0xdca04777f541c567, 0xecf0d7a0fc5583a1},
{0x89e42caaf9491b60, 0xf41686c49db57245},
{0xac5d37d5b79b6239, 0x311c2875c522ced6},
{0xd77485cb25823ac7, 0x7d633293366b828c},
{0x86a8d39ef77164bc, 0xae5dff9c02033198},
{0xa8530886b54dbdeb, 0xd9f57f830283fdfd},
{0xd267caa862a12d66, 0xd072df63c324fd7c},
{0x8380dea93da4bc60, 0x4247cb9e59f71e6e},
{0xa46116538d0deb78, 0x52d9be85f074e609},
{0xcd795be870516656, 0x67902e276c921f8c},
{0x806bd9714632dff6, 0x00ba1cd8a3db53b7},
{0xa086cfcd97bf97f3, 0x80e8a40eccd228a5},
{0xc8a883c0fdaf7df0, 0x6122cd128006b2ce},
{0xfad2a4b13d1b5d6c, 0x796b805720085f82},
{0x9cc3a6eec6311a63, 0xcbe3303674053bb1},
{0xc3f490aa77bd60fc, 0xbedbfc4411068a9d},
{0xf4f1b4d515acb93b, 0xee92fb5515482d45},
{0x991711052d8bf3c5, 0x751bdd152d4d1c4b},
{0xbf5cd54678eef0b6, 0xd262d45a78a0635e},
{0xef340a98172aace4, 0x86fb897116c87c35},
{0x9580869f0e7aac0e, 0xd45d35e6ae3d4da1},
{0xbae0a846d2195712, 0x8974836059cca10a},
{0xe998d258869facd7, 0x2bd1a438703fc94c},
{0x91ff83775423cc06, 0x7b6306a34627ddd0},
{0xb67f6455292cbf08, 0x1a3bc84c17b1d543},
{0xe41f3d6a7377eeca, 0x20caba5f1d9e4a94},
{0x8e938662882af53e, 0x547eb47b7282ee9d},
{0xb23867fb2a35b28d, 0xe99e619a4f23aa44},
{0xdec681f9f4c31f31, 0x6405fa00e2ec94d5},
{0x8b3c113c38f9f37e, 0xde83bc408dd3dd05},
{0xae0b158b4738705e, 0x9624ab50b148d446},
{0xd98ddaee19068c76, 0x3badd624dd9b0958},
{0x87f8a8d4cfa417c9, 0xe54ca5d70a80e5d7},
{0xa9f6d30a038d1dbc, 0x5e9fcf4ccd211f4d},
{0xd47487cc8470652b, 0x7647c32000696720},
{0x84c8d4dfd2c63f3b, 0x29ecd9f40041e074},
{0xa5fb0a17c777cf09, 0xf468107100525891},
{0xcf79cc9db955c2cc, 0x7182148d4066eeb5},
{0x81ac1fe293d599bf, 0xc6f14cd848405531},
{0xa21727db38cb002f, 0xb8ada00e5a506a7d},
{0xca9cf1d206fdc03b, 0xa6d90811f0e4851d},
{0xfd442e4688bd304a, 0x908f4a166d1da664},
{0x9e4a9cec15763e2e, 0x9a598e4e043287ff},
{0xc5dd44271ad3cdba, 0x40eff1e1853f29fe},
{0xf7549530e188c128, 0xd12bee59e68ef47d},
{0x9a94dd3e8cf578b9, 0x82bb74f8301958cf},
{0xc13a148e3032d6e7, 0xe36a52363c1faf02},
{0xf18899b1bc3f8ca1, 0xdc44e6c3cb279ac2},
{0x96f5600f15a7b7e5, 0x29ab103a5ef8c0ba},
{0xbcb2b812db11a5de, 0x7415d448f6b6f0e8},
{0xebdf661791d60f56, 0x111b495b3464ad22},
{0x936b9fcebb25c995, 0xcab10dd900beec35},
{0xb84687c269ef3bfb, 0x3d5d514f40eea743},
{0xe65829b3046b0afa, 0x0cb4a5a3112a5113},
{0x8ff71a0fe2c2e6dc, 0x47f0e785eaba72ac},
{0xb3f4e093db73a093, 0x59ed216765690f57},
{0xe0f218b8d25088b8, 0x306869c13ec3532d},
{0x8c974f7383725573, 0x1e414218c73a13fc},
{0xafbd2350644eeacf, 0xe5d1929ef90898fb},
{0xdbac6c247d62a583, 0xdf45f746b74abf3a},
{0x894bc396ce5da772, 0x6b8bba8c328eb784},
{0xab9eb47c81f5114f, 0x066ea92f3f326565},
{0xd686619ba27255a2, 0xc80a537b0efefebe},
{0x8613fd0145877585, 0xbd06742ce95f5f37},
{0xa798fc4196e952e7, 0x2c48113823b73705},
{0xd17f3b51fca3a7a0, 0xf75a15862ca504c6},
{0x82ef85133de648c4, 0x9a984d73dbe722fc},
{0xa3ab66580d5fdaf5, 0xc13e60d0d2e0ebbb},
{0xcc963fee10b7d1b3, 0x318df905079926a9},
{0xffbbcfe994e5c61f, 0xfdf17746497f7053},
{0x9fd561f1fd0f9bd3, 0xfeb6ea8bedefa634},
{0xc7caba6e7c5382c8, 0xfe64a52ee96b8fc1},
{0xf9bd690a1b68637b, 0x3dfdce7aa3c673b1},
{0x9c1661a651213e2d, 0x06bea10ca65c084f},
{0xc31bfa0fe5698db8, 0x486e494fcff30a63},
{0xf3e2f893dec3f126, 0x5a89dba3c3efccfb},
{0x986ddb5c6b3a76b7, 0xf89629465a75e01d},
{0xbe89523386091465, 0xf6bbb397f1135824},
{0xee2ba6c0678b597f, 0x746aa07ded582e2d},
{0x94db483840b717ef, 0xa8c2a44eb4571cdd},
{0xba121a4650e4ddeb, 0x92f34d62616ce414},
{0xe896a0d7e51e1566, 0x77b020baf9c81d18},
{0x915e2486ef32cd60, 0x0ace1474dc1d122f},
{0xb5b5ada8aaff80b8, 0x0d819992132456bb},
{0xe3231912d5bf60e6, 0x10e1fff697ed6c6a},
{0x8df5efabc5979c8f, 0xca8d3ffa1ef463c2},
{0xb1736b96b6fd83b3, 0xbd308ff8a6b17cb3},
{0xddd0467c64bce4a0, 0xac7cb3f6d05ddbdf},
{0x8aa22c0dbef60ee4, 0x6bcdf07a423aa96c},
{0xad4ab7112eb3929d, 0x86c16c98d2c953c7},
{0xd89d64d57a607744, 0xe871c7bf077ba8b8},
{0x87625f056c7c4a8b, 0x11471cd764ad4973},
{0xa93af6c6c79b5d2d, 0xd598e40d3dd89bd0},
{0xd389b47879823479, 0x4aff1d108d4ec2c4},
{0x843610cb4bf160cb, 0xcedf722a585139bb},
{0xa54394fe1eedb8fe, 0xc2974eb4ee658829},
{0xce947a3da6a9273e, 0x733d226229feea33},
{0x811ccc668829b887, 0x0806357d5a3f5260},
{0xa163ff802a3426a8, 0xca07c2dcb0cf26f8},
{0xc9bcff6034c13052, 0xfc89b393dd02f0b6},
{0xfc2c3f3841f17c67, 0xbbac2078d443ace3},
{0x9d9ba7832936edc0, 0xd54b944b84aa4c0e},
{0xc5029163f384a931, 0x0a9e795e65d4df12},
{0xf64335bcf065d37d, 0x4d4617b5ff4a16d6},
{0x99ea0196163fa42e, 0x504bced1bf8e4e46},
{0xc06481fb9bcf8d39, 0xe45ec2862f71e1d7},
{0xf07da27a82c37088, 0x5d767327bb4e5a4d},
{0x964e858c91ba2655, 0x3a6a07f8d510f870},
{0xbbe226efb628afea, 0x890489f70a55368c},
{0xeadab0aba3b2dbe5, 0x2b45ac74ccea842f},
{0x92c8ae6b464fc96f, 0x3b0b8bc90012929e},
{0xb77ada0617e3bbcb, 0x09ce6ebb40173745},
{0xe55990879ddcaabd, 0xcc420a6a101d0516},
{0x8f57fa54c2a9eab6, 0x9fa946824a12232e},
{0xb32df8e9f3546564, 0x47939822dc96abfa},
{0xdff9772470297ebd, 0x59787e2b93bc56f8},
{0x8bfbea76c619ef36, 0x57eb4edb3c55b65b},
{0xaefae51477a06b03, 0xede622920b6b23f2},
{0xdab99e59958885c4, 0xe95fab368e45ecee},
{0x88b402f7fd75539b, 0x11dbcb0218ebb415},
{0xaae103b5fcd2a881, 0xd652bdc29f26a11a},
{0xd59944a37c0752a2, 0x4be76d3346f04960},
{0x857fcae62d8493a5, 0x6f70a4400c562ddc},
{0xa6dfbd9fb8e5b88e, 0xcb4ccd500f6bb953},
{0xd097ad07a71f26b2, 0x7e2000a41346a7a8},
{0x825ecc24c873782f, 0x8ed400668c0c28c9},
{0xa2f67f2dfa90563b, 0x728900802f0f32fb},
{0xcbb41ef979346bca, 0x4f2b40a03ad2ffba},
{0xfea126b7d78186bc, 0xe2f610c84987bfa9},
{0x9f24b832e6b0f436, 0x0dd9ca7d2df4d7ca},
{0xc6ede63fa05d3143, 0x91503d1c79720dbc},
{0xf8a95fcf88747d94, 0x75a44c6397ce912b},
{0x9b69dbe1b548ce7c, 0xc986afbe3ee11abb},
{0xc24452da229b021b, 0xfbe85badce996169},
{0xf2d56790ab41c2a2, 0xfae27299423fb9c4},
{0x97c560ba6b0919a5, 0xdccd879fc967d41b},
{0xbdb6b8e905cb600f, 0x5400e987bbc1c921},
{0xed246723473e3813, 0x290123e9aab23b69},
{0x9436c0760c86e30b, 0xf9a0b6720aaf6522},
{0xb94470938fa89bce, 0xf808e40e8d5b3e6a},
{0xe7958cb87392c2c2, 0xb60b1d1230b20e05},
{0x90bd77f3483bb9b9, 0xb1c6f22b5e6f48c3},
{0xb4ecd5f01a4aa828, 0x1e38aeb6360b1af4},
{0xe2280b6c20dd5232, 0x25c6da63c38de1b1},
{0x8d590723948a535f, 0x579c487e5a38ad0f},
{0xb0af48ec79ace837, 0x2d835a9df0c6d852},
{0xdcdb1b2798182244, 0xf8e431456cf88e66},
{0x8a08f0f8bf0f156b, 0x1b8e9ecb641b5900},
{0xac8b2d36eed2dac5, 0xe272467e3d222f40},
{0xd7adf884aa879177, 0x5b0ed81dcc6abb10},
{0x86ccbb52ea94baea, 0x98e947129fc2b4ea},
{0xa87fea27a539e9a5, 0x3f2398d747b36225},
{0xd29fe4b18e88640e, 0x8eec7f0d19a03aae},
{0x83a3eeeef9153e89, 0x1953cf68300424ad},
{0xa48ceaaab75a8e2b, 0x5fa8c3423c052dd8},
{0xcdb02555653131b6, 0x3792f412cb06794e},
{0x808e17555f3ebf11, 0xe2bbd88bbee40bd1},
{0xa0b19d2ab70e6ed6, 0x5b6aceaeae9d0ec5},
{0xc8de047564d20a8b, 0xf245825a5a445276},
{0xfb158592be068d2e, 0xeed6e2f0f0d56713},
{0x9ced737bb6c4183d, 0x55464dd69685606c},
{0xc428d05aa4751e4c, 0xaa97e14c3c26b887},
{0xf53304714d9265df, 0xd53dd99f4b3066a9},
{0x993fe2c6d07b7fab, 0xe546a8038efe402a},
{0xbf8fdb78849a5f96, 0xde98520472bdd034},
{0xef73d256a5c0f77c, 0x963e66858f6d4441},
{0x95a8637627989aad, 0xdde7001379a44aa9},
{0xbb127c53b17ec159, 0x5560c018580d5d53},
{0xe9d71b689dde71af, 0xaab8f01e6e10b4a7},
{0x9226712162ab070d, 0xcab3961304ca70e9},
{0xb6b00d69bb55c8d1, 0x3d607b97c5fd0d23},
{0xe45c10c42a2b3b05, 0x8cb89a7db77c506b},
{0x8eb98a7a9a5b04e3, 0x77f3608e92adb243},
{0xb267ed1940f1c61c, 0x55f038b237591ed4},
{0xdf01e85f912e37a3, 0x6b6c46dec52f6689},
{0x8b61313bbabce2c6, 0x2323ac4b3b3da016},
{0xae397d8aa96c1b77, 0xabec975e0a0d081b},
{0xd9c7dced53c72255, 0x96e7bd358c904a22},
{0x881cea14545c7575, 0x7e50d64177da2e55},
{0xaa242499697392d2, 0xdde50bd1d5d0b9ea},
{0xd4ad2dbfc3d07787, 0x955e4ec64b44e865},
{0x84ec3c97da624ab4, 0xbd5af13bef0b113f},
{0xa6274bbdd0fadd61, 0xecb1ad8aeacdd58f},
{0xcfb11ead453994ba, 0x67de18eda5814af3},
{0x81ceb32c4b43fcf4, 0x80eacf948770ced8},
{0xa2425ff75e14fc31, 0xa1258379a94d028e},
{0xcad2f7f5359a3b3e, 0x096ee45813a04331},
{0xfd87b5f28300ca0d, 0x8bca9d6e188853fd},
{0x9e74d1b791e07e48, 0x775ea264cf55347e},
{0xc612062576589dda, 0x95364afe032a819e},
{0xf79687aed3eec551, 0x3a83ddbd83f52205},
{0x9abe14cd44753b52, 0xc4926a9672793543},
{0xc16d9a0095928a27, 0x75b7053c0f178294},
{0xf1c90080baf72cb1, 0x5324c68b12dd6339},
{0x971da05074da7bee, 0xd3f6fc16ebca5e04},
{0xbce5086492111aea, 0x88f4bb1ca6bcf585},
{0xec1e4a7db69561a5, 0x2b31e9e3d06c32e6},
{0x9392ee8e921d5d07, 0x3aff322e62439fd0},
{0xb877aa3236a4b449, 0x09befeb9fad487c3},
{0xe69594bec44de15b, 0x4c2ebe687989a9b4},
{0x901d7cf73ab0acd9, 0x0f9d37014bf60a11},
{0xb424dc35095cd80f, 0x538484c19ef38c95},
{0xe12e13424bb40e13, 0x2865a5f206b06fba},
{0x8cbccc096f5088cb, 0xf93f87b7442e45d4},
{0xafebff0bcb24aafe, 0xf78f69a51539d749},
{0xdbe6fecebdedd5be, 0xb573440e5a884d1c},
{0x89705f4136b4a597, 0x31680a88f8953031},
{0xabcc77118461cefc, 0xfdc20d2b36ba7c3e},
{0xd6bf94d5e57a42bc, 0x3d32907604691b4d},
{0x8637bd05af6c69b5, 0xa63f9a49c2c1b110},
{0xa7c5ac471b478423, 0x0fcf80dc33721d54},
{0xd1b71758e219652b, 0xd3c36113404ea4a9},
{0x83126e978d4fdf3b, 0x645a1cac083126ea},
{0xa3d70a3d70a3d70a, 0x3d70a3d70a3d70a4},
{0xcccccccccccccccc, 0xcccccccccccccccd},
{0x8000000000000000, 0x0000000000000000},
{0xa000000000000000, 0x0000000000000000},
{0xc800000000000000, 0x0000000000000000},
{0xfa00000000000000, 0x0000000000000000},
{0x9c40000000000000, 0x0000000000000000},
{0xc350000000000000, 0x0000000000000000},
{0xf424000000000000, 0x0000000000000000},
{0x9896800000000000, 0x0000000000000000},
{0xbebc200000000000, 0x0000000000000000},
{0xee6b280000000000, 0x0000000000000000},
{0x9502f90000000000, 0x0000000000000000},
{0xba43b74000000000, 0x0000000000000000},
{0xe8d4a51000000000, 0x0000000000000000},
{0x9184e72a00000000, 0x0000000000000000},
{0xb5e620f480000000, 0x0000000000000000},
{0xe35fa931a0000000, 0x0000000000000000},
{0x8e1bc9bf04000000, 0x0000000000000000},
{0xb1a2bc2ec5000000, 0x0000000000000000},
{0xde0b6b3a76400000, 0x0000000000000000},
{0x8ac7230489e80000, 0x0000000000000000},
{0xad78ebc5ac620000, 0x0000000000000000},
{0xd8d726b7177a8000, 0x0000000000000000},
{0x878678326eac9000, 0x0000000000000000},
{0xa968163f0a57b400, 0x0000000000000000},
{0xd3c21bcecceda100, 0x0000000000000000},
{0x84595161401484a0, 0x0000000000000000},
{0xa56fa5b99019a5c8, 0x0000000000000000},
{0xcecb8f27f4200f3a, 0x0000000000000000},
{0x813f3978f8940984, 0x4000000000000000},
{0xa18f07d736b90be5, 0x5000000000000000},
{0xc9f2c9cd04674ede, 0xa400000000000000},
{0xfc6f7c4045812296, 0x4d00000000000000},
{0x9dc5ada82b70b59d, 0xf020000000000000},
{0xc5371912364ce305, 0x6c28000000000000},
{0xf684df56c3e01bc6, 0xc732000000000000},
{0x9a130b963a6c115c, 0x3c7f400000000000},
{0xc097ce7bc90715b3, 0x4b9f100000000000},
{0xf0bdc21abb48db20, 0x1e86d40000000000},
{0x96769950b50d88f4, 0x1314448000000000},
{0xbc143fa4e250eb31, 0x17d955a000000000},
{0xeb194f8e1ae525fd, 0x5dcfab0800000000},
{0x92efd1b8d0cf37be, 0x5aa1cae500000000},
{0xb7abc627050305ad, 0xf14a3d9e40000000},
{0xe596b7b0c643c719, 0x6d9ccd05d0000000},
{0x8f7e32ce7bea5c6f, 0xe4820023a2000000},
{0xb35dbf821ae4f38b, 0xdda2802c8a800000},
{0xe0352f62a19e306e, 0xd50b2037ad200000},
{0x8c213d9da502de45, 0x4526f422cc340000},
{0xaf298d050e4395d6, 0x9670b12b7f410000},
{0xdaf3f04651d47b4c, 0x3c0cdd765f114000},
{0x88d8762bf324cd0f, 0xa5880a69fb6ac800},
{0xab0e93b6efee0053, 0x8eea0d047a457a00},
{0xd5d238a4abe98068, 0x72a4904598d6d880},
{0x85a36366eb71f041, 0x47a6da2b7f864750},
{0xa70c3c40a64e6c51, 0x999090b65f67d924},
{0xd0cf4b50cfe20765, 0xfff4b4e3f741cf6d},
{0x82818f1281ed449f, 0xbff8f10e7a8921a5},
{0xa321f2d7226895c7, 0xaff72d52192b6a0e},
{0xcbea6f8ceb02bb39, 0x9bf4f8a69f764491},
{0xfee50b7025c36a08, 0x02f236d04753d5b5},
{0x9f4f2726179a2245, 0x01d762422c946591},
{0xc722f0ef9d80aad6, 0x424d3ad2b7b97ef6},
{0xf8ebad2b84e0d58b, 0xd2e0898765a7deb3},
{0x9b934c3b330c8577, 0x63cc55f49f88eb30},
{0xc2781f49ffcfa6d5, 0x3cbf6b71c76b25fc},
{0xf316271c7fc3908a, 0x8bef464e3945ef7b},
{0x97edd871cfda3a56, 0x97758bf0e3cbb5ad},
{0xbde94e8e43d0c8ec, 0x3d52eeed1cbea318},
{0xed63a231d4c4fb27, 0x4ca7aaa863ee4bde},
{0x945e455f24fb1cf8, 0x8fe8caa93e74ef6b},
{0xb975d6b6ee39e436, 0xb3e2fd538e122b45},
{0xe7d34c64a9c85d44, 0x60dbbca87196b617},
{0x90e40fbeea1d3a4a, 0xbc8955e946fe31ce},
{0xb51d13aea4a488dd, 0x6babab6398bdbe42},
{0xe264589a4dcdab14, 0xc696963c7eed2dd2},
{0x8d7eb76070a08aec, 0xfc1e1de5cf543ca3},
{0xb0de65388cc8ada8, 0x3b25a55f43294bcc},
{0xdd15fe86affad912, 0x49ef0eb713f39ebf},
{0x8a2dbf142dfcc7ab, 0x6e3569326c784338},
{0xacb92ed9397bf996, 0x49c2c37f07965405},
{0xd7e77a8f87daf7fb, 0xdc33745ec97be907},
{0x86f0ac99b4e8dafd, 0x69a028bb3ded71a4},
{0xa8acd7c0222311bc, 0xc40832ea0d68ce0d},
{0xd2d80db02aabd62b, 0xf50a3fa490c30191},
{0x83c7088e1aab65db, 0x792667c6da79e0fb},
{0xa4b8cab1a1563f52, 0x577001b891185939},
{0xcde6fd5e09abcf26, 0xed4c0226b55e6f87},
{0x80b05e5ac60b6178, 0x544f8158315b05b5},
{0xa0dc75f1778e39d6, 0x696361ae3db1c722},
{0xc913936dd571c84c, 0x03bc3a19cd1e38ea},
{0xfb5878494ace3a5f, 0x04ab48a04065c724},
{0x9d174b2dcec0e47b, 0x62eb0d64283f9c77},
{0xc45d1df942711d9a, 0x3ba5d0bd324f8395},
{0xf5746577930d6500, 0xca8f44ec7ee3647a},
{0x9968bf6abbe85f20, 0x7e998b13cf4e1ecc},
{0xbfc2ef456ae276e8, 0x9e3fedd8c321a67f},
{0xefb3ab16c59b14a2, 0xc5cfe94ef3ea101f},
{0x95d04aee3b80ece5, 0xbba1f1d158724a13},
{0xbb445da9ca61281f, 0x2a8a6e45ae8edc98},
{0xea1575143cf97226, 0xf52d09d71a3293be},
{0x924d692ca61be758, 0x593c2626705f9c57},
{0xb6e0c377cfa2e12e, 0x6f8b2fb00c77836d},
{0xe498f455c38b997a, 0x0b6dfb9c0f956448},
{0x8edf98b59a373fec, 0x4724bd4189bd5ead},
{0xb2977ee300c50fe7, 0x58edec91ec2cb658},
{0xdf3d5e9bc0f653e1, 0x2f2967b66737e3ee},
{0x8b865b215899f46c, 0xbd79e0d20082ee75},
{0xae67f1e9aec07187, 0xecd8590680a3aa12},
{0xda01ee641a708de9, 0xe80e6f4820cc9496},
{0x884134fe908658b2, 0x3109058d147fdcde},
{0xaa51823e34a7eede, 0xbd4b46f0599fd416},
{0xd4e5e2cdc1d1ea96, 0x6c9e18ac7007c91b},
{0x850fadc09923329e, 0x03e2cf6bc604ddb1},
{0xa6539930bf6bff45, 0x84db8346b786151d},
{0xcfe87f7cef46ff16, 0xe612641865679a64},
{0x81f14fae158c5f6e, 0x4fcb7e8f3f60c07f},
{0xa26da3999aef7749, 0xe3be5e330f38f09e},
{0xcb090c8001ab551c, 0x5cadf5bfd3072cc6},
{0xfdcb4fa002162a63, 0x73d9732fc7c8f7f7},
{0x9e9f11c4014dda7e, 0x2867e7fddcdd9afb},
{0xc646d63501a1511d, 0xb281e1fd541501b9},
{0xf7d88bc24209a565, 0x1f225a7ca91a4227},
{0x9ae757596946075f, 0x3375788de9b06959},
{0xc1a12d2fc3978937, 0x0052d6b1641c83af},
{0xf209787bb47d6b84, 0xc0678c5dbd23a49b},
{0x9745eb4d50ce6332, 0xf840b7ba963646e1},
{0xbd176620a501fbff, 0xb650e5a93bc3d899},
{0xec5d3fa8ce427aff, 0xa3e51f138ab4cebf},
{0x93ba47c980e98cdf, 0xc66f336c36b10138},
{0xb8a8d9bbe123f017, 0xb80b0047445d4185},
{0xe6d3102ad96cec1d, 0xa60dc059157491e6},
{0x9043ea1ac7e41392, 0x87c89837ad68db30},
{0xb454e4a179dd1877, 0x29babe4598c311fc},
{0xe16a1dc9d8545e94, 0xf4296dd6fef3d67b},
{0x8ce2529e2734bb1d, 0x1899e4a65f58660d},
{0xb01ae745b101e9e4, 0x5ec05dcff72e7f90},
{0xdc21a1171d42645d, 0x76707543f4fa1f74},
{0x899504ae72497eba, 0x6a06494a791c53a9},
{0xabfa45da0edbde69, 0x0487db9d17636893},
{0xd6f8d7509292d603, 0x45a9d2845d3c42b7},
{0x865b86925b9bc5c2, 0x0b8a2392ba45a9b3},
{0xa7f26836f282b732, 0x8e6cac7768d7141f},
{0xd1ef0244af2364ff, 0x3207d795430cd927},
{0x8335616aed761f1f, 0x7f44e6bd49e807b9},
{0xa402b9c5a8d3a6e7, 0x5f16206c9c6209a7},
{0xcd036837130890a1, 0x36dba887c37a8c10},
{0x802221226be55a64, 0xc2494954da2c978a},
{0xa02aa96b06deb0fd, 0xf2db9baa10b7bd6d},
{0xc83553c5c8965d3d, 0x6f92829494e5acc8},
{0xfa42a8b73abbf48c, 0xcb772339ba1f17fa},
{0x9c69a97284b578d7, 0xff2a760414536efc},
{0xc38413cf25e2d70d, 0xfef5138519684abb},
{0xf46518c2ef5b8cd1, 0x7eb258665fc25d6a},
{0x98bf2f79d5993802, 0xef2f773ffbd97a62},
{0xbeeefb584aff8603, 0xaafb550ffacfd8fb},
{0xeeaaba2e5dbf6784, 0x95ba2a53f983cf39},
{0x952ab45cfa97a0b2, 0xdd945a747bf26184},
{0xba756174393d88df, 0x94f971119aeef9e5},
{0xe912b9d1478ceb17, 0x7a37cd5601aab85e},
{0x91abb422ccb812ee, 0xac62e055c10ab33b},
{0xb616a12b7fe617aa, 0x577b986b314d600a},
{0xe39c49765fdf9d94, 0xed5a7e85fda0b80c},
{0x8e41ade9fbebc27d, 0x14588f13be847308},
{0xb1d219647ae6b31c, 0x596eb2d8ae258fc9},
{0xde469fbd99a05fe3, 0x6fca5f8ed9aef3bc},
{0x8aec23d680043bee, 0x25de7bb9480d5855},
{0xada72ccc20054ae9, 0xaf561aa79a10ae6b},
{0xd910f7ff28069da4, 0x1b2ba1518094da05},
{0x87aa9aff79042286, 0x90fb44d2f05d0843},
{0xa99541bf57452b28, 0x353a1607ac744a54},
{0xd3fa922f2d1675f2, 0x42889b8997915ce9},
{0x847c9b5d7c2e09b7, 0x69956135febada12},
{0xa59bc234db398c25, 0x43fab9837e699096},
{0xcf02b2c21207ef2e, 0x94f967e45e03f4bc},
{0x8161afb94b44f57d, 0x1d1be0eebac278f6},
{0xa1ba1ba79e1632dc, 0x6462d92a69731733},
{0xca28a291859bbf93, 0x7d7b8f7503cfdcff},
{0xfcb2cb35e702af78, 0x5cda735244c3d43f},
{0x9defbf01b061adab, 0x3a0888136afa64a8},
{0xc56baec21c7a1916, 0x088aaa1845b8fdd1},
{0xf6c69a72a3989f5b, 0x8aad549e57273d46},
{0x9a3c2087a63f6399, 0x36ac54e2f678864c},
{0xc0cb28a98fcf3c7f, 0x84576a1bb416a7de},
{0xf0fdf2d3f3c30b9f, 0x656d44a2a11c51d6},
{0x969eb7c47859e743, 0x9f644ae5a4b1b326},
{0xbc4665b596706114, 0x873d5d9f0dde1fef},
{0xeb57ff22fc0c7959, 0xa90cb506d155a7eb},
{0x9316ff75dd87cbd8, 0x09a7f12442d588f3},
{0xb7dcbf5354e9bece, 0x0c11ed6d538aeb30},
{0xe5d3ef282a242e81, 0x8f1668c8a86da5fb},
{0x8fa475791a569d10, 0xf96e017d694487bd},
{0xb38d92d760ec4455, 0x37c981dcc395a9ad},
{0xe070f78d3927556a, 0x85bbe253f47b1418},
{0x8c469ab843b89562, 0x93956d7478ccec8f},
{0xaf58416654a6babb, 0x387ac8d1970027b3},
{0xdb2e51bfe9d0696a, 0x06997b05fcc0319f},
{0x88fcf317f22241e2, 0x441fece3bdf81f04},
{0xab3c2fddeeaad25a, 0xd527e81cad7626c4},
{0xd60b3bd56a5586f1, 0x8a71e223d8d3b075},
{0x85c7056562757456, 0xf6872d5667844e4a},
{0xa738c6bebb12d16c, 0xb428f8ac016561dc},
{0xd106f86e69d785c7, 0xe13336d701beba53},
{0x82a45b450226b39c, 0xecc0024661173474},
{0xa34d721642b06084, 0x27f002d7f95d0191},
{0xcc20ce9bd35c78a5, 0x31ec038df7b441f5},
{0xff290242c83396ce, 0x7e67047175a15272},
{0x9f79a169bd203e41, 0x0f0062c6e984d387},
{0xc75809c42c684dd1, 0x52c07b78a3e60869},
{0xf92e0c3537826145, 0xa7709a56ccdf8a83},
{0x9bbcc7a142b17ccb, 0x88a66076400bb692},
{0xc2abf989935ddbfe, 0x6acff893d00ea436},
{0xf356f7ebf83552fe, 0x0583f6b8c4124d44},
{0x98165af37b2153de, 0xc3727a337a8b704b},
{0xbe1bf1b059e9a8d6, 0x744f18c0592e4c5d},
{0xeda2ee1c7064130c, 0x1162def06f79df74},
{0x9485d4d1c63e8be7, 0x8addcb5645ac2ba9},
{0xb9a74a0637ce2ee1, 0x6d953e2bd7173693},
{0xe8111c87c5c1ba99, 0xc8fa8db6ccdd0438},
{0x910ab1d4db9914a0, 0x1d9c9892400a22a3},
{0xb54d5e4a127f59c8, 0x2503beb6d00cab4c},
{0xe2a0b5dc971f303a, 0x2e44ae64840fd61e},
{0x8da471a9de737e24, 0x5ceaecfed289e5d3},
{0xb10d8e1456105dad, 0x7425a83e872c5f48},
{0xdd50f1996b947518, 0xd12f124e28f7771a},
{0x8a5296ffe33cc92f, 0x82bd6b70d99aaa70},
{0xace73cbfdc0bfb7b, 0x636cc64d1001550c},
{0xd8210befd30efa5a, 0x3c47f7e05401aa4f},
{0x8714a775e3e95c78, 0x65acfaec34810a72},
{0xa8d9d1535ce3b396, 0x7f1839a741a14d0e},
{0xd31045a8341ca07c, 0x1ede48111209a051},
{0x83ea2b892091e44d, 0x934aed0aab460433},
{0xa4e4b66b68b65d60, 0xf81da84d56178540},
{0xce1de40642e3f4b9, 0x36251260ab9d668f},
{0x80d2ae83e9ce78f3, 0xc1d72b7c6b42601a},
{0xa1075a24e4421730, 0xb24cf65b8612f820},
{0xc94930ae1d529cfc, 0xdee033f26797b628},
{0xfb9b7cd9a4a7443c, 0x169840ef017da3b2},
{0x9d412e0806e88aa5, 0x8e1f289560ee864f},
{0xc491798a08a2ad4e, 0xf1a6f2bab92a27e3},
{0xf5b5d7ec8acb58a2, 0xae10af696774b1dc},
{0x9991a6f3d6bf1765, 0xacca6da1e0a8ef2a},
{0xbff610b0cc6edd3f, 0x17fd090a58d32af4},
{0xeff394dcff8a948e, 0xddfc4b4cef07f5b1},
{0x95f83d0a1fb69cd9, 0x4abdaf101564f98f},
{0xbb764c4ca7a4440f, 0x9d6d1ad41abe37f2},
{0xea53df5fd18d5513, 0x84c86189216dc5ee},
{0x92746b9be2f8552c, 0x32fd3cf5b4e49bb5},
{0xb7118682dbb66a77, 0x3fbc8c33221dc2a2},
{0xe4d5e82392a40515, 0x0fabaf3feaa5334b},
{0x8f05b1163ba6832d, 0x29cb4d87f2a7400f},
{0xb2c71d5bca9023f8, 0x743e20e9ef511013},
{0xdf78e4b2bd342cf6, 0x914da9246b255417},
{0x8bab8eefb6409c1a, 0x1ad089b6c2f7548f},
{0xae9672aba3d0c320, 0xa184ac2473b529b2},
{0xda3c0f568cc4f3e8, 0xc9e5d72d90a2741f},
{0x8865899617fb1871, 0x7e2fa67c7a658893},
{0xaa7eebfb9df9de8d, 0xddbb901b98feeab8},
{0xd51ea6fa85785631, 0x552a74227f3ea566},
{0x8533285c936b35de, 0xd53a88958f872760},
{0xa67ff273b8460356, 0x8a892abaf368f138},
{0xd01fef10a657842c, 0x2d2b7569b0432d86},
{0x8213f56a67f6b29b, 0x9c3b29620e29fc74},
{0xa298f2c501f45f42, 0x8349f3ba91b47b90},
{0xcb3f2f7642717713, 0x241c70a936219a74},
{0xfe0efb53d30dd4d7, 0xed238cd383aa0111},
{0x9ec95d1463e8a506, 0xf4363804324a40ab},
{0xc67bb4597ce2ce48, 0xb143c6053edcd0d6},
{0xf81aa16fdc1b81da, 0xdd94b7868e94050b},
{0x9b10a4e5e9913128, 0xca7cf2b4191c8327},
{0xc1d4ce1f63f57d72, 0xfd1c2f611f63a3f1},
{0xf24a01a73cf2dccf, 0xbc633b39673c8ced},
{0x976e41088617ca01, 0xd5be0503e085d814},
{0xbd49d14aa79dbc82, 0x4b2d8644d8a74e19},
{0xec9c459d51852ba2, 0xddf8e7d60ed1219f},
{0x93e1ab8252f33b45, 0xcabb90e5c942b504},
{0xb8da1662e7b00a17, 0x3d6a751f3b936244},
{0xe7109bfba19c0c9d, 0x0cc512670a783ad5},
{0x906a617d450187e2, 0x27fb2b80668b24c6},
{0xb484f9dc9641e9da, 0xb1f9f660802dedf7},
{0xe1a63853bbd26451, 0x5e7873f8a0396974},
{0x8d07e33455637eb2, 0xdb0b487b6423e1e9},
{0xb049dc016abc5e5f, 0x91ce1a9a3d2cda63},
{0xdc5c5301c56b75f7, 0x7641a140cc7810fc},
{0x89b9b3e11b6329ba, 0xa9e904c87fcb0a9e},
{0xac2820d9623bf429, 0x546345fa9fbdcd45},
{0xd732290fbacaf133, 0xa97c177947ad4096},
{0x867f59a9d4bed6c0, 0x49ed8eabcccc485e},
{0xa81f301449ee8c70, 0x5c68f256bfff5a75},
{0xd226fc195c6a2f8c, 0x73832eec6fff3112},
{0x83585d8fd9c25db7, 0xc831fd53c5ff7eac},
{0xa42e74f3d032f525, 0xba3e7ca8b77f5e56},
{0xcd3a1230c43fb26f, 0x28ce1bd2e55f35ec},
{0x80444b5e7aa7cf85, 0x7980d163cf5b81b4},
{0xa0555e361951c366, 0xd7e105bcc3326220},
{0xc86ab5c39fa63440, 0x8dd9472bf3fefaa8},
{0xfa856334878fc150, 0xb14f98f6f0feb952},
{0x9c935e00d4b9d8d2, 0x6ed1bf9a569f33d4},
{0xc3b8358109e84f07, 0x0a862f80ec4700c9},
{0xf4a642e14c6262c8, 0xcd27bb612758c0fb},
{0x98e7e9cccfbd7dbd, 0x8038d51cb897789d},
{0xbf21e44003acdd2c, 0xe0470a63e6bd56c4},
{0xeeea5d5004981478, 0x1858ccfce06cac75},
{0x95527a5202df0ccb, 0x0f37801e0c43ebc9},
{0xbaa718e68396cffd, 0xd30560258f54e6bb},
{0xe950df20247c83fd, 0x47c6b82ef32a206a},
{0x91d28b7416cdd27e, 0x4cdc331d57fa5442},
{0xb6472e511c81471d, 0xe0133fe4adf8e953},
{0xe3d8f9e563a198e5, 0x58180fddd97723a7},
{0x8e679c2f5e44ff8f, 0x570f09eaa7ea7649},
{0xb201833b35d63f73, 0x2cd2cc6551e513db},
{0xde81e40a034bcf4f, 0xf8077f7ea65e58d2},
{0x8b112e86420f6191, 0xfb04afaf27faf783},
{0xadd57a27d29339f6, 0x79c5db9af1f9b564},
{0xd94ad8b1c7380874, 0x18375281ae7822bd},
{0x87cec76f1c830548, 0x8f2293910d0b15b6},
{0xa9c2794ae3a3c69a, 0xb2eb3875504ddb23},
{0xd433179d9c8cb841, 0x5fa60692a46151ec},
{0x849feec281d7f328, 0xdbc7c41ba6bcd334},
{0xa5c7ea73224deff3, 0x12b9b522906c0801},
{0xcf39e50feae16bef, 0xd768226b34870a01},
{0x81842f29f2cce375, 0xe6a1158300d46641},
{0xa1e53af46f801c53, 0x60495ae3c1097fd1},
{0xca5e89b18b602368, 0x385bb19cb14bdfc5},
{0xfcf62c1dee382c42, 0x46729e03dd9ed7b6},
{0x9e19db92b4e31ba9, 0x6c07a2c26a8346d2},
{0xc5a05277621be293, 0xc7098b7305241886},
{0xf70867153aa2db38, 0xb8cbee4fc66d1ea8},
{0x9a65406d44a5c903, 0x737f74f1dc043329},
{0xc0fe908895cf3b44, 0x505f522e53053ff3},
{0xf13e34aabb430a15, 0x647726b9e7c68ff0},
{0x96c6e0eab509e64d, 0x5eca783430dc19f6},
{0xbc789925624c5fe0, 0xb67d16413d132073},
{0xeb96bf6ebadf77d8, 0xe41c5bd18c57e890},
{0x933e37a534cbaae7, 0x8e91b962f7b6f15a},
{0xb80dc58e81fe95a1, 0x723627bbb5a4adb1},
{0xe61136f2227e3b09, 0xcec3b1aaa30dd91d},
{0x8fcac257558ee4e6, 0x213a4f0aa5e8a7b2},
{0xb3bd72ed2af29e1f, 0xa988e2cd4f62d19e},
{0xe0accfa875af45a7, 0x93eb1b80a33b8606},
{0x8c6c01c9498d8b88, 0xbc72f130660533c4},
{0xaf87023b9bf0ee6a, 0xeb8fad7c7f8680b5},
{0xdb68c2ca82ed2a05, 0xa67398db9f6820e2},
#else
{0xff77b1fcbebcdc4f, 0x25e8e89c13bb0f7b},
{0xce5d73ff402d98e3, 0xfb0a3d212dc81290},
{0xa6b34ad8c9dfc06f, 0xf42faa48c0ea481f},
{0x86a8d39ef77164bc, 0xae5dff9c02033198},
{0xd98ddaee19068c76, 0x3badd624dd9b0958},
{0xafbd2350644eeacf, 0xe5d1929ef90898fb},
{0x8df5efabc5979c8f, 0xca8d3ffa1ef463c2},
{0xe55990879ddcaabd, 0xcc420a6a101d0516},
{0xb94470938fa89bce, 0xf808e40e8d5b3e6a},
{0x95a8637627989aad, 0xdde7001379a44aa9},
{0xf1c90080baf72cb1, 0x5324c68b12dd6339},
{0xc350000000000000, 0x0000000000000000},
{0x9dc5ada82b70b59d, 0xf020000000000000},
{0xfee50b7025c36a08, 0x02f236d04753d5b5},
{0xcde6fd5e09abcf26, 0xed4c0226b55e6f87},
{0xa6539930bf6bff45, 0x84db8346b786151d},
{0x865b86925b9bc5c2, 0x0b8a2392ba45a9b3},
{0xd910f7ff28069da4, 0x1b2ba1518094da05},
{0xaf58416654a6babb, 0x387ac8d1970027b3},
{0x8da471a9de737e24, 0x5ceaecfed289e5d3},
{0xe4d5e82392a40515, 0x0fabaf3feaa5334b},
{0xb8da1662e7b00a17, 0x3d6a751f3b936244},
{0x95527a5202df0ccb, 0x0f37801e0c43ebc9},
{0xf13e34aabb430a15, 0x647726b9e7c68ff0}
#endif
};
#if FMT_USE_FULL_CACHE_DRAGONBOX
return pow10_significands[k - float_info<double>::min_k];
#else
static constexpr const uint64_t powers_of_5_64[] = {
0x0000000000000001, 0x0000000000000005, 0x0000000000000019,
0x000000000000007d, 0x0000000000000271, 0x0000000000000c35,
0x0000000000003d09, 0x000000000001312d, 0x000000000005f5e1,
0x00000000001dcd65, 0x00000000009502f9, 0x0000000002e90edd,
0x000000000e8d4a51, 0x0000000048c27395, 0x000000016bcc41e9,
0x000000071afd498d, 0x0000002386f26fc1, 0x000000b1a2bc2ec5,
0x000003782dace9d9, 0x00001158e460913d, 0x000056bc75e2d631,
0x0001b1ae4d6e2ef5, 0x000878678326eac9, 0x002a5a058fc295ed,
0x00d3c21bcecceda1, 0x0422ca8b0a00a425, 0x14adf4b7320334b9};
static const int compression_ratio = 27;
// Compute base index.
int cache_index = (k - float_info<double>::min_k) / compression_ratio;
int kb = cache_index * compression_ratio + float_info<double>::min_k;
int offset = k - kb;
// Get base cache.
uint128_fallback base_cache = pow10_significands[cache_index];
if (offset == 0) return base_cache;
// Compute the required amount of bit-shift.
int alpha = floor_log2_pow10(kb + offset) - floor_log2_pow10(kb) - offset;
FMT_ASSERT(alpha > 0 && alpha < 64, "shifting error detected");
// Try to recover the real cache.
uint64_t pow5 = powers_of_5_64[offset];
uint128_fallback recovered_cache = umul128(base_cache.high(), pow5);
uint128_fallback middle_low = umul128(base_cache.low(), pow5);
recovered_cache += middle_low.high();
uint64_t high_to_middle = recovered_cache.high() << (64 - alpha);
uint64_t middle_to_low = recovered_cache.low() << (64 - alpha);
recovered_cache =
uint128_fallback{(recovered_cache.low() >> alpha) | high_to_middle,
((middle_low.low() >> alpha) | middle_to_low)};
FMT_ASSERT(recovered_cache.low() + 1 != 0, "");
return {recovered_cache.high(), recovered_cache.low() + 1};
#endif
}
struct compute_mul_result {
carrier_uint result;
bool is_integer;
};
struct compute_mul_parity_result {
bool parity;
bool is_integer;
};
static auto compute_mul(carrier_uint u,
const cache_entry_type& cache) noexcept
-> compute_mul_result {
auto r = umul192_upper128(u, cache);
return {r.high(), r.low() == 0};
}
static auto compute_delta(const cache_entry_type& cache, int beta) noexcept
-> uint32_t {
return static_cast<uint32_t>(cache.high() >> (64 - 1 - beta));
}
static auto compute_mul_parity(carrier_uint two_f,
const cache_entry_type& cache,
int beta) noexcept
-> compute_mul_parity_result {
FMT_ASSERT(beta >= 1, "");
FMT_ASSERT(beta < 64, "");
auto r = umul192_lower128(two_f, cache);
return {((r.high() >> (64 - beta)) & 1) != 0,
((r.high() << beta) | (r.low() >> (64 - beta))) == 0};
}
static auto compute_left_endpoint_for_shorter_interval_case(
const cache_entry_type& cache, int beta) noexcept -> carrier_uint {
return (cache.high() -
(cache.high() >> (num_significand_bits<double>() + 2))) >>
(64 - num_significand_bits<double>() - 1 - beta);
}
static auto compute_right_endpoint_for_shorter_interval_case(
const cache_entry_type& cache, int beta) noexcept -> carrier_uint {
return (cache.high() +
(cache.high() >> (num_significand_bits<double>() + 1))) >>
(64 - num_significand_bits<double>() - 1 - beta);
}
static auto compute_round_up_for_shorter_interval_case(
const cache_entry_type& cache, int beta) noexcept -> carrier_uint {
return ((cache.high() >> (64 - num_significand_bits<double>() - 2 - beta)) +
1) /
2;
}
};
FMT_FUNC auto get_cached_power(int k) noexcept -> uint128_fallback {
return cache_accessor<double>::get_cached_power(k);
}
// Various integer checks
template <typename T>
auto is_left_endpoint_integer_shorter_interval(int exponent) noexcept -> bool {
const int case_shorter_interval_left_endpoint_lower_threshold = 2;
const int case_shorter_interval_left_endpoint_upper_threshold = 3;
return exponent >= case_shorter_interval_left_endpoint_lower_threshold &&
exponent <= case_shorter_interval_left_endpoint_upper_threshold;
}
// Remove trailing zeros from n and return the number of zeros removed (float)
FMT_INLINE int remove_trailing_zeros(uint32_t& n, int s = 0) noexcept {
FMT_ASSERT(n != 0, "");
// Modular inverse of 5 (mod 2^32): (mod_inv_5 * 5) mod 2^32 = 1.
constexpr uint32_t mod_inv_5 = 0xcccccccd;
constexpr uint32_t mod_inv_25 = 0xc28f5c29; // = mod_inv_5 * mod_inv_5
while (true) {
auto q = rotr(n * mod_inv_25, 2);
if (q > max_value<uint32_t>() / 100) break;
n = q;
s += 2;
}
auto q = rotr(n * mod_inv_5, 1);
if (q <= max_value<uint32_t>() / 10) {
n = q;
s |= 1;
}
return s;
}
// Removes trailing zeros and returns the number of zeros removed (double)
FMT_INLINE int remove_trailing_zeros(uint64_t& n) noexcept {
FMT_ASSERT(n != 0, "");
// This magic number is ceil(2^90 / 10^8).
constexpr uint64_t magic_number = 12379400392853802749ull;
auto nm = umul128(n, magic_number);
// Is n is divisible by 10^8?
if ((nm.high() & ((1ull << (90 - 64)) - 1)) == 0 && nm.low() < magic_number) {
// If yes, work with the quotient...
auto n32 = static_cast<uint32_t>(nm.high() >> (90 - 64));
// ... and use the 32 bit variant of the function
int s = remove_trailing_zeros(n32, 8);
n = n32;
return s;
}
// If n is not divisible by 10^8, work with n itself.
constexpr uint64_t mod_inv_5 = 0xcccccccccccccccd;
constexpr uint64_t mod_inv_25 = 0x8f5c28f5c28f5c29; // mod_inv_5 * mod_inv_5
int s = 0;
while (true) {
auto q = rotr(n * mod_inv_25, 2);
if (q > max_value<uint64_t>() / 100) break;
n = q;
s += 2;
}
auto q = rotr(n * mod_inv_5, 1);
if (q <= max_value<uint64_t>() / 10) {
n = q;
s |= 1;
}
return s;
}
// The main algorithm for shorter interval case
template <typename T>
FMT_INLINE decimal_fp<T> shorter_interval_case(int exponent) noexcept {
decimal_fp<T> ret_value;
// Compute k and beta
const int minus_k = floor_log10_pow2_minus_log10_4_over_3(exponent);
const int beta = exponent + floor_log2_pow10(-minus_k);
// Compute xi and zi
using cache_entry_type = typename cache_accessor<T>::cache_entry_type;
const cache_entry_type cache = cache_accessor<T>::get_cached_power(-minus_k);
auto xi = cache_accessor<T>::compute_left_endpoint_for_shorter_interval_case(
cache, beta);
auto zi = cache_accessor<T>::compute_right_endpoint_for_shorter_interval_case(
cache, beta);
// If the left endpoint is not an integer, increase it
if (!is_left_endpoint_integer_shorter_interval<T>(exponent)) ++xi;
// Try bigger divisor
ret_value.significand = zi / 10;
// If succeed, remove trailing zeros if necessary and return
if (ret_value.significand * 10 >= xi) {
ret_value.exponent = minus_k + 1;
ret_value.exponent += remove_trailing_zeros(ret_value.significand);
return ret_value;
}
// Otherwise, compute the round-up of y
ret_value.significand =
cache_accessor<T>::compute_round_up_for_shorter_interval_case(cache,
beta);
ret_value.exponent = minus_k;
// When tie occurs, choose one of them according to the rule
if (exponent >= float_info<T>::shorter_interval_tie_lower_threshold &&
exponent <= float_info<T>::shorter_interval_tie_upper_threshold) {
ret_value.significand = ret_value.significand % 2 == 0
? ret_value.significand
: ret_value.significand - 1;
} else if (ret_value.significand < xi) {
++ret_value.significand;
}
return ret_value;
}
template <typename T> auto to_decimal(T x) noexcept -> decimal_fp<T> {
// Step 1: integer promotion & Schubfach multiplier calculation.
using carrier_uint = typename float_info<T>::carrier_uint;
using cache_entry_type = typename cache_accessor<T>::cache_entry_type;
auto br = bit_cast<carrier_uint>(x);
// Extract significand bits and exponent bits.
const carrier_uint significand_mask =
(static_cast<carrier_uint>(1) << num_significand_bits<T>()) - 1;
carrier_uint significand = (br & significand_mask);
int exponent =
static_cast<int>((br & exponent_mask<T>()) >> num_significand_bits<T>());
if (exponent != 0) { // Check if normal.
exponent -= exponent_bias<T>() + num_significand_bits<T>();
// Shorter interval case; proceed like Schubfach.
// In fact, when exponent == 1 and significand == 0, the interval is
// regular. However, it can be shown that the end-results are anyway same.
if (significand == 0) return shorter_interval_case<T>(exponent);
significand |= (static_cast<carrier_uint>(1) << num_significand_bits<T>());
} else {
// Subnormal case; the interval is always regular.
if (significand == 0) return {0, 0};
exponent =
std::numeric_limits<T>::min_exponent - num_significand_bits<T>() - 1;
}
const bool include_left_endpoint = (significand % 2 == 0);
const bool include_right_endpoint = include_left_endpoint;
// Compute k and beta.
const int minus_k = floor_log10_pow2(exponent) - float_info<T>::kappa;
const cache_entry_type cache = cache_accessor<T>::get_cached_power(-minus_k);
const int beta = exponent + floor_log2_pow10(-minus_k);
// Compute zi and deltai.
// 10^kappa <= deltai < 10^(kappa + 1)
const uint32_t deltai = cache_accessor<T>::compute_delta(cache, beta);
const carrier_uint two_fc = significand << 1;
// For the case of binary32, the result of integer check is not correct for
// 29711844 * 2^-82
// = 6.1442653300000000008655037797566933477355632930994033813476... * 10^-18
// and 29711844 * 2^-81
// = 1.2288530660000000001731007559513386695471126586198806762695... * 10^-17,
// and they are the unique counterexamples. However, since 29711844 is even,
// this does not cause any problem for the endpoints calculations; it can only
// cause a problem when we need to perform integer check for the center.
// Fortunately, with these inputs, that branch is never executed, so we are
// fine.
const typename cache_accessor<T>::compute_mul_result z_mul =
cache_accessor<T>::compute_mul((two_fc | 1) << beta, cache);
// Step 2: Try larger divisor; remove trailing zeros if necessary.
// Using an upper bound on zi, we might be able to optimize the division
// better than the compiler; we are computing zi / big_divisor here.
decimal_fp<T> ret_value;
ret_value.significand = divide_by_10_to_kappa_plus_1(z_mul.result);
uint32_t r = static_cast<uint32_t>(z_mul.result - float_info<T>::big_divisor *
ret_value.significand);
if (r < deltai) {
// Exclude the right endpoint if necessary.
if (r == 0 && (z_mul.is_integer & !include_right_endpoint)) {
--ret_value.significand;
r = float_info<T>::big_divisor;
goto small_divisor_case_label;
}
} else if (r > deltai) {
goto small_divisor_case_label;
} else {
// r == deltai; compare fractional parts.
const typename cache_accessor<T>::compute_mul_parity_result x_mul =
cache_accessor<T>::compute_mul_parity(two_fc - 1, cache, beta);
if (!(x_mul.parity | (x_mul.is_integer & include_left_endpoint)))
goto small_divisor_case_label;
}
ret_value.exponent = minus_k + float_info<T>::kappa + 1;
// We may need to remove trailing zeros.
ret_value.exponent += remove_trailing_zeros(ret_value.significand);
return ret_value;
// Step 3: Find the significand with the smaller divisor.
small_divisor_case_label:
ret_value.significand *= 10;
ret_value.exponent = minus_k + float_info<T>::kappa;
uint32_t dist = r - (deltai / 2) + (float_info<T>::small_divisor / 2);
const bool approx_y_parity =
((dist ^ (float_info<T>::small_divisor / 2)) & 1) != 0;
// Is dist divisible by 10^kappa?
const bool divisible_by_small_divisor =
check_divisibility_and_divide_by_pow10<float_info<T>::kappa>(dist);
// Add dist / 10^kappa to the significand.
ret_value.significand += dist;
if (!divisible_by_small_divisor) return ret_value;
// Check z^(f) >= epsilon^(f).
// We have either yi == zi - epsiloni or yi == (zi - epsiloni) - 1,
// where yi == zi - epsiloni if and only if z^(f) >= epsilon^(f).
// Since there are only 2 possibilities, we only need to care about the
// parity. Also, zi and r should have the same parity since the divisor
// is an even number.
const auto y_mul = cache_accessor<T>::compute_mul_parity(two_fc, cache, beta);
// If z^(f) >= epsilon^(f), we might have a tie when z^(f) == epsilon^(f),
// or equivalently, when y is an integer.
if (y_mul.parity != approx_y_parity)
--ret_value.significand;
else if (y_mul.is_integer & (ret_value.significand % 2 != 0))
--ret_value.significand;
return ret_value;
}
} // namespace dragonbox
} // namespace detail
template <> struct formatter<detail::bigint> {
FMT_CONSTEXPR auto parse(format_parse_context& ctx)
-> format_parse_context::iterator {
return ctx.begin();
}
auto format(const detail::bigint& n, format_context& ctx) const
-> format_context::iterator {
auto out = ctx.out();
bool first = true;
for (auto i = n.bigits_.size(); i > 0; --i) {
auto value = n.bigits_[i - 1u];
if (first) {
out = fmt::format_to(out, FMT_STRING("{:x}"), value);
first = false;
continue;
}
out = fmt::format_to(out, FMT_STRING("{:08x}"), value);
}
if (n.exp_ > 0)
out = fmt::format_to(out, FMT_STRING("p{}"),
n.exp_ * detail::bigint::bigit_bits);
return out;
}
};
FMT_FUNC detail::utf8_to_utf16::utf8_to_utf16(string_view s) {
for_each_codepoint(s, [this](uint32_t cp, string_view) {
if (cp == invalid_code_point) FMT_THROW(std::runtime_error("invalid utf8"));
if (cp <= 0xFFFF) {
buffer_.push_back(static_cast<wchar_t>(cp));
} else {
cp -= 0x10000;
buffer_.push_back(static_cast<wchar_t>(0xD800 + (cp >> 10)));
buffer_.push_back(static_cast<wchar_t>(0xDC00 + (cp & 0x3FF)));
}
return true;
});
buffer_.push_back(0);
}
FMT_FUNC void format_system_error(detail::buffer<char>& out, int error_code,
const char* message) noexcept {
FMT_TRY {
auto ec = std::error_code(error_code, std::generic_category());
detail::write(appender(out), std::system_error(ec, message).what());
return;
}
FMT_CATCH(...) {}
format_error_code(out, error_code, message);
}
FMT_FUNC void report_system_error(int error_code,
const char* message) noexcept {
do_report_error(format_system_error, error_code, message);
}
FMT_FUNC auto vformat(string_view fmt, format_args args) -> std::string {
// Don't optimize the "{}" case to keep the binary size small and because it
// can be better optimized in fmt::format anyway.
auto buffer = memory_buffer();
detail::vformat_to(buffer, fmt, args);
return to_string(buffer);
}
namespace detail {
FMT_FUNC void vformat_to(buffer<char>& buf, string_view fmt, format_args args,
locale_ref loc) {
auto out = appender(buf);
if (fmt.size() == 2 && equal2(fmt.data(), "{}"))
return args.get(0).visit(default_arg_formatter<char>{out});
parse_format_string(
fmt, format_handler<char>{parse_context<char>(fmt), {out, args, loc}});
}
template <typename T> struct span {
T* data;
size_t size;
};
template <typename F> auto flockfile(F* f) -> decltype(_lock_file(f)) {
_lock_file(f);
}
template <typename F> auto funlockfile(F* f) -> decltype(_unlock_file(f)) {
_unlock_file(f);
}
#ifndef getc_unlocked
template <typename F> auto getc_unlocked(F* f) -> decltype(_fgetc_nolock(f)) {
return _fgetc_nolock(f);
}
#endif
template <typename F = FILE, typename Enable = void>
struct has_flockfile : std::false_type {};
template <typename F>
struct has_flockfile<F, void_t<decltype(flockfile(&std::declval<F&>()))>>
: std::true_type {};
// A FILE wrapper. F is FILE defined as a template parameter to make system API
// detection work.
template <typename F> class file_base {
public:
F* file_;
public:
file_base(F* file) : file_(file) {}
operator F*() const { return file_; }
// Reads a code unit from the stream.
auto get() -> int {
int result = getc_unlocked(file_);
if (result == EOF && ferror(file_) != 0)
FMT_THROW(system_error(errno, FMT_STRING("getc failed")));
return result;
}
// Puts the code unit back into the stream buffer.
void unget(char c) {
if (ungetc(c, file_) == EOF)
FMT_THROW(system_error(errno, FMT_STRING("ungetc failed")));
}
void flush() { fflush(this->file_); }
};
// A FILE wrapper for glibc.
template <typename F> class glibc_file : public file_base<F> {
private:
enum {
line_buffered = 0x200, // _IO_LINE_BUF
unbuffered = 2 // _IO_UNBUFFERED
};
public:
using file_base<F>::file_base;
auto is_buffered() const -> bool {
return (this->file_->_flags & unbuffered) == 0;
}
void init_buffer() {
if (this->file_->_IO_write_ptr < this->file_->_IO_write_end) return;
// Force buffer initialization by placing and removing a char in a buffer.
putc_unlocked(0, this->file_);
--this->file_->_IO_write_ptr;
}
// Returns the file's read buffer.
auto get_read_buffer() const -> span<const char> {
auto ptr = this->file_->_IO_read_ptr;
return {ptr, to_unsigned(this->file_->_IO_read_end - ptr)};
}
// Returns the file's write buffer.
auto get_write_buffer() const -> span<char> {
auto ptr = this->file_->_IO_write_ptr;
return {ptr, to_unsigned(this->file_->_IO_buf_end - ptr)};
}
void advance_write_buffer(size_t size) { this->file_->_IO_write_ptr += size; }
bool needs_flush() const {
if ((this->file_->_flags & line_buffered) == 0) return false;
char* end = this->file_->_IO_write_end;
return memchr(end, '\n', to_unsigned(this->file_->_IO_write_ptr - end));
}
void flush() { fflush_unlocked(this->file_); }
};
// A FILE wrapper for Apple's libc.
template <typename F> class apple_file : public file_base<F> {
private:
enum {
line_buffered = 1, // __SNBF
unbuffered = 2 // __SLBF
};
public:
using file_base<F>::file_base;
auto is_buffered() const -> bool {
return (this->file_->_flags & unbuffered) == 0;
}
void init_buffer() {
if (this->file_->_p) return;
// Force buffer initialization by placing and removing a char in a buffer.
putc_unlocked(0, this->file_);
--this->file_->_p;
++this->file_->_w;
}
auto get_read_buffer() const -> span<const char> {
return {reinterpret_cast<char*>(this->file_->_p),
to_unsigned(this->file_->_r)};
}
auto get_write_buffer() const -> span<char> {
return {reinterpret_cast<char*>(this->file_->_p),
to_unsigned(this->file_->_bf._base + this->file_->_bf._size -
this->file_->_p)};
}
void advance_write_buffer(size_t size) {
this->file_->_p += size;
this->file_->_w -= size;
}
bool needs_flush() const {
if ((this->file_->_flags & line_buffered) == 0) return false;
return memchr(this->file_->_p + this->file_->_w, '\n',
to_unsigned(-this->file_->_w));
}
};
// A fallback FILE wrapper.
template <typename F> class fallback_file : public file_base<F> {
private:
char next_; // The next unconsumed character in the buffer.
bool has_next_ = false;
public:
using file_base<F>::file_base;
auto is_buffered() const -> bool { return false; }
auto needs_flush() const -> bool { return false; }
void init_buffer() {}
auto get_read_buffer() const -> span<const char> {
return {&next_, has_next_ ? 1u : 0u};
}
auto get_write_buffer() const -> span<char> { return {nullptr, 0}; }
void advance_write_buffer(size_t) {}
auto get() -> int {
has_next_ = false;
return file_base<F>::get();
}
void unget(char c) {
file_base<F>::unget(c);
next_ = c;
has_next_ = true;
}
};
#ifndef FMT_USE_FALLBACK_FILE
# define FMT_USE_FALLBACK_FILE 0
#endif
template <typename F,
FMT_ENABLE_IF(sizeof(F::_p) != 0 && !FMT_USE_FALLBACK_FILE)>
auto get_file(F* f, int) -> apple_file<F> {
return f;
}
template <typename F,
FMT_ENABLE_IF(sizeof(F::_IO_read_ptr) != 0 && !FMT_USE_FALLBACK_FILE)>
inline auto get_file(F* f, int) -> glibc_file<F> {
return f;
}
inline auto get_file(FILE* f, ...) -> fallback_file<FILE> { return f; }
using file_ref = decltype(get_file(static_cast<FILE*>(nullptr), 0));
template <typename F = FILE, typename Enable = void>
class file_print_buffer : public buffer<char> {
public:
explicit file_print_buffer(F*) : buffer(nullptr, size_t()) {}
};
template <typename F>
class file_print_buffer<F, enable_if_t<has_flockfile<F>::value>>
: public buffer<char> {
private:
file_ref file_;
static void grow(buffer<char>& base, size_t) {
auto& self = static_cast<file_print_buffer&>(base);
self.file_.advance_write_buffer(self.size());
if (self.file_.get_write_buffer().size == 0) self.file_.flush();
auto buf = self.file_.get_write_buffer();
FMT_ASSERT(buf.size > 0, "");
self.set(buf.data, buf.size);
self.clear();
}
public:
explicit file_print_buffer(F* f) : buffer(grow, size_t()), file_(f) {
flockfile(f);
file_.init_buffer();
auto buf = file_.get_write_buffer();
set(buf.data, buf.size);
}
~file_print_buffer() {
file_.advance_write_buffer(size());
bool flush = file_.needs_flush();
F* f = file_; // Make funlockfile depend on the template parameter F
funlockfile(f); // for the system API detection to work.
if (flush) fflush(file_);
}
};
#if !defined(_WIN32) || defined(FMT_USE_WRITE_CONSOLE)
FMT_FUNC auto write_console(int, string_view) -> bool { return false; }
#else
using dword = conditional_t<sizeof(long) == 4, unsigned long, unsigned>;
extern "C" __declspec(dllimport) int __stdcall WriteConsoleW( //
void*, const void*, dword, dword*, void*);
FMT_FUNC bool write_console(int fd, string_view text) {
auto u16 = utf8_to_utf16(text);
return WriteConsoleW(reinterpret_cast<void*>(_get_osfhandle(fd)), u16.c_str(),
static_cast<dword>(u16.size()), nullptr, nullptr) != 0;
}
#endif
#ifdef _WIN32
// Print assuming legacy (non-Unicode) encoding.
FMT_FUNC void vprint_mojibake(std::FILE* f, string_view fmt, format_args args,
bool newline) {
auto buffer = memory_buffer();
detail::vformat_to(buffer, fmt, args);
if (newline) buffer.push_back('\n');
fwrite_all(buffer.data(), buffer.size(), f);
}
#endif
FMT_FUNC void print(std::FILE* f, string_view text) {
#if defined(_WIN32) && !defined(FMT_USE_WRITE_CONSOLE)
int fd = _fileno(f);
if (_isatty(fd)) {
std::fflush(f);
if (write_console(fd, text)) return;
}
#endif
fwrite_all(text.data(), text.size(), f);
}
} // namespace detail
FMT_FUNC void vprint_buffered(std::FILE* f, string_view fmt, format_args args) {
auto buffer = memory_buffer();
detail::vformat_to(buffer, fmt, args);
detail::print(f, {buffer.data(), buffer.size()});
}
FMT_FUNC void vprint(std::FILE* f, string_view fmt, format_args args) {
if (!detail::file_ref(f).is_buffered() || !detail::has_flockfile<>())
return vprint_buffered(f, fmt, args);
auto&& buffer = detail::file_print_buffer<>(f);
return detail::vformat_to(buffer, fmt, args);
}
FMT_FUNC void vprintln(std::FILE* f, string_view fmt, format_args args) {
auto buffer = memory_buffer();
detail::vformat_to(buffer, fmt, args);
buffer.push_back('\n');
detail::print(f, {buffer.data(), buffer.size()});
}
FMT_FUNC void vprint(string_view fmt, format_args args) {
vprint(stdout, fmt, args);
}
namespace detail {
struct singleton {
unsigned char upper;
unsigned char lower_count;
};
inline auto is_printable(uint16_t x, const singleton* singletons,
size_t singletons_size,
const unsigned char* singleton_lowers,
const unsigned char* normal, size_t normal_size)
-> bool {
auto upper = x >> 8;
auto lower_start = 0;
for (size_t i = 0; i < singletons_size; ++i) {
auto s = singletons[i];
auto lower_end = lower_start + s.lower_count;
if (upper < s.upper) break;
if (upper == s.upper) {
for (auto j = lower_start; j < lower_end; ++j) {
if (singleton_lowers[j] == (x & 0xff)) return false;
}
}
lower_start = lower_end;
}
auto xsigned = static_cast<int>(x);
auto current = true;
for (size_t i = 0; i < normal_size; ++i) {
auto v = static_cast<int>(normal[i]);
auto len = (v & 0x80) != 0 ? (v & 0x7f) << 8 | normal[++i] : v;
xsigned -= len;
if (xsigned < 0) break;
current = !current;
}
return current;
}
// This code is generated by support/printable.py.
FMT_FUNC auto is_printable(uint32_t cp) -> bool {
static constexpr singleton singletons0[] = {
{0x00, 1}, {0x03, 5}, {0x05, 6}, {0x06, 3}, {0x07, 6}, {0x08, 8},
{0x09, 17}, {0x0a, 28}, {0x0b, 25}, {0x0c, 20}, {0x0d, 16}, {0x0e, 13},
{0x0f, 4}, {0x10, 3}, {0x12, 18}, {0x13, 9}, {0x16, 1}, {0x17, 5},
{0x18, 2}, {0x19, 3}, {0x1a, 7}, {0x1c, 2}, {0x1d, 1}, {0x1f, 22},
{0x20, 3}, {0x2b, 3}, {0x2c, 2}, {0x2d, 11}, {0x2e, 1}, {0x30, 3},
{0x31, 2}, {0x32, 1}, {0xa7, 2}, {0xa9, 2}, {0xaa, 4}, {0xab, 8},
{0xfa, 2}, {0xfb, 5}, {0xfd, 4}, {0xfe, 3}, {0xff, 9},
};
static constexpr unsigned char singletons0_lower[] = {
0xad, 0x78, 0x79, 0x8b, 0x8d, 0xa2, 0x30, 0x57, 0x58, 0x8b, 0x8c, 0x90,
0x1c, 0x1d, 0xdd, 0x0e, 0x0f, 0x4b, 0x4c, 0xfb, 0xfc, 0x2e, 0x2f, 0x3f,
0x5c, 0x5d, 0x5f, 0xb5, 0xe2, 0x84, 0x8d, 0x8e, 0x91, 0x92, 0xa9, 0xb1,
0xba, 0xbb, 0xc5, 0xc6, 0xc9, 0xca, 0xde, 0xe4, 0xe5, 0xff, 0x00, 0x04,
0x11, 0x12, 0x29, 0x31, 0x34, 0x37, 0x3a, 0x3b, 0x3d, 0x49, 0x4a, 0x5d,
0x84, 0x8e, 0x92, 0xa9, 0xb1, 0xb4, 0xba, 0xbb, 0xc6, 0xca, 0xce, 0xcf,
0xe4, 0xe5, 0x00, 0x04, 0x0d, 0x0e, 0x11, 0x12, 0x29, 0x31, 0x34, 0x3a,
0x3b, 0x45, 0x46, 0x49, 0x4a, 0x5e, 0x64, 0x65, 0x84, 0x91, 0x9b, 0x9d,
0xc9, 0xce, 0xcf, 0x0d, 0x11, 0x29, 0x45, 0x49, 0x57, 0x64, 0x65, 0x8d,
0x91, 0xa9, 0xb4, 0xba, 0xbb, 0xc5, 0xc9, 0xdf, 0xe4, 0xe5, 0xf0, 0x0d,
0x11, 0x45, 0x49, 0x64, 0x65, 0x80, 0x84, 0xb2, 0xbc, 0xbe, 0xbf, 0xd5,
0xd7, 0xf0, 0xf1, 0x83, 0x85, 0x8b, 0xa4, 0xa6, 0xbe, 0xbf, 0xc5, 0xc7,
0xce, 0xcf, 0xda, 0xdb, 0x48, 0x98, 0xbd, 0xcd, 0xc6, 0xce, 0xcf, 0x49,
0x4e, 0x4f, 0x57, 0x59, 0x5e, 0x5f, 0x89, 0x8e, 0x8f, 0xb1, 0xb6, 0xb7,
0xbf, 0xc1, 0xc6, 0xc7, 0xd7, 0x11, 0x16, 0x17, 0x5b, 0x5c, 0xf6, 0xf7,
0xfe, 0xff, 0x80, 0x0d, 0x6d, 0x71, 0xde, 0xdf, 0x0e, 0x0f, 0x1f, 0x6e,
0x6f, 0x1c, 0x1d, 0x5f, 0x7d, 0x7e, 0xae, 0xaf, 0xbb, 0xbc, 0xfa, 0x16,
0x17, 0x1e, 0x1f, 0x46, 0x47, 0x4e, 0x4f, 0x58, 0x5a, 0x5c, 0x5e, 0x7e,
0x7f, 0xb5, 0xc5, 0xd4, 0xd5, 0xdc, 0xf0, 0xf1, 0xf5, 0x72, 0x73, 0x8f,
0x74, 0x75, 0x96, 0x2f, 0x5f, 0x26, 0x2e, 0x2f, 0xa7, 0xaf, 0xb7, 0xbf,
0xc7, 0xcf, 0xd7, 0xdf, 0x9a, 0x40, 0x97, 0x98, 0x30, 0x8f, 0x1f, 0xc0,
0xc1, 0xce, 0xff, 0x4e, 0x4f, 0x5a, 0x5b, 0x07, 0x08, 0x0f, 0x10, 0x27,
0x2f, 0xee, 0xef, 0x6e, 0x6f, 0x37, 0x3d, 0x3f, 0x42, 0x45, 0x90, 0x91,
0xfe, 0xff, 0x53, 0x67, 0x75, 0xc8, 0xc9, 0xd0, 0xd1, 0xd8, 0xd9, 0xe7,
0xfe, 0xff,
};
static constexpr singleton singletons1[] = {
{0x00, 6}, {0x01, 1}, {0x03, 1}, {0x04, 2}, {0x08, 8}, {0x09, 2},
{0x0a, 5}, {0x0b, 2}, {0x0e, 4}, {0x10, 1}, {0x11, 2}, {0x12, 5},
{0x13, 17}, {0x14, 1}, {0x15, 2}, {0x17, 2}, {0x19, 13}, {0x1c, 5},
{0x1d, 8}, {0x24, 1}, {0x6a, 3}, {0x6b, 2}, {0xbc, 2}, {0xd1, 2},
{0xd4, 12}, {0xd5, 9}, {0xd6, 2}, {0xd7, 2}, {0xda, 1}, {0xe0, 5},
{0xe1, 2}, {0xe8, 2}, {0xee, 32}, {0xf0, 4}, {0xf8, 2}, {0xf9, 2},
{0xfa, 2}, {0xfb, 1},
};
static constexpr unsigned char singletons1_lower[] = {
0x0c, 0x27, 0x3b, 0x3e, 0x4e, 0x4f, 0x8f, 0x9e, 0x9e, 0x9f, 0x06, 0x07,
0x09, 0x36, 0x3d, 0x3e, 0x56, 0xf3, 0xd0, 0xd1, 0x04, 0x14, 0x18, 0x36,
0x37, 0x56, 0x57, 0x7f, 0xaa, 0xae, 0xaf, 0xbd, 0x35, 0xe0, 0x12, 0x87,
0x89, 0x8e, 0x9e, 0x04, 0x0d, 0x0e, 0x11, 0x12, 0x29, 0x31, 0x34, 0x3a,
0x45, 0x46, 0x49, 0x4a, 0x4e, 0x4f, 0x64, 0x65, 0x5c, 0xb6, 0xb7, 0x1b,
0x1c, 0x07, 0x08, 0x0a, 0x0b, 0x14, 0x17, 0x36, 0x39, 0x3a, 0xa8, 0xa9,
0xd8, 0xd9, 0x09, 0x37, 0x90, 0x91, 0xa8, 0x07, 0x0a, 0x3b, 0x3e, 0x66,
0x69, 0x8f, 0x92, 0x6f, 0x5f, 0xee, 0xef, 0x5a, 0x62, 0x9a, 0x9b, 0x27,
0x28, 0x55, 0x9d, 0xa0, 0xa1, 0xa3, 0xa4, 0xa7, 0xa8, 0xad, 0xba, 0xbc,
0xc4, 0x06, 0x0b, 0x0c, 0x15, 0x1d, 0x3a, 0x3f, 0x45, 0x51, 0xa6, 0xa7,
0xcc, 0xcd, 0xa0, 0x07, 0x19, 0x1a, 0x22, 0x25, 0x3e, 0x3f, 0xc5, 0xc6,
0x04, 0x20, 0x23, 0x25, 0x26, 0x28, 0x33, 0x38, 0x3a, 0x48, 0x4a, 0x4c,
0x50, 0x53, 0x55, 0x56, 0x58, 0x5a, 0x5c, 0x5e, 0x60, 0x63, 0x65, 0x66,
0x6b, 0x73, 0x78, 0x7d, 0x7f, 0x8a, 0xa4, 0xaa, 0xaf, 0xb0, 0xc0, 0xd0,
0xae, 0xaf, 0x79, 0xcc, 0x6e, 0x6f, 0x93,
};
static constexpr unsigned char normal0[] = {
0x00, 0x20, 0x5f, 0x22, 0x82, 0xdf, 0x04, 0x82, 0x44, 0x08, 0x1b, 0x04,
0x06, 0x11, 0x81, 0xac, 0x0e, 0x80, 0xab, 0x35, 0x28, 0x0b, 0x80, 0xe0,
0x03, 0x19, 0x08, 0x01, 0x04, 0x2f, 0x04, 0x34, 0x04, 0x07, 0x03, 0x01,
0x07, 0x06, 0x07, 0x11, 0x0a, 0x50, 0x0f, 0x12, 0x07, 0x55, 0x07, 0x03,
0x04, 0x1c, 0x0a, 0x09, 0x03, 0x08, 0x03, 0x07, 0x03, 0x02, 0x03, 0x03,
0x03, 0x0c, 0x04, 0x05, 0x03, 0x0b, 0x06, 0x01, 0x0e, 0x15, 0x05, 0x3a,
0x03, 0x11, 0x07, 0x06, 0x05, 0x10, 0x07, 0x57, 0x07, 0x02, 0x07, 0x15,
0x0d, 0x50, 0x04, 0x43, 0x03, 0x2d, 0x03, 0x01, 0x04, 0x11, 0x06, 0x0f,
0x0c, 0x3a, 0x04, 0x1d, 0x25, 0x5f, 0x20, 0x6d, 0x04, 0x6a, 0x25, 0x80,
0xc8, 0x05, 0x82, 0xb0, 0x03, 0x1a, 0x06, 0x82, 0xfd, 0x03, 0x59, 0x07,
0x15, 0x0b, 0x17, 0x09, 0x14, 0x0c, 0x14, 0x0c, 0x6a, 0x06, 0x0a, 0x06,
0x1a, 0x06, 0x59, 0x07, 0x2b, 0x05, 0x46, 0x0a, 0x2c, 0x04, 0x0c, 0x04,
0x01, 0x03, 0x31, 0x0b, 0x2c, 0x04, 0x1a, 0x06, 0x0b, 0x03, 0x80, 0xac,
0x06, 0x0a, 0x06, 0x21, 0x3f, 0x4c, 0x04, 0x2d, 0x03, 0x74, 0x08, 0x3c,
0x03, 0x0f, 0x03, 0x3c, 0x07, 0x38, 0x08, 0x2b, 0x05, 0x82, 0xff, 0x11,
0x18, 0x08, 0x2f, 0x11, 0x2d, 0x03, 0x20, 0x10, 0x21, 0x0f, 0x80, 0x8c,
0x04, 0x82, 0x97, 0x19, 0x0b, 0x15, 0x88, 0x94, 0x05, 0x2f, 0x05, 0x3b,
0x07, 0x02, 0x0e, 0x18, 0x09, 0x80, 0xb3, 0x2d, 0x74, 0x0c, 0x80, 0xd6,
0x1a, 0x0c, 0x05, 0x80, 0xff, 0x05, 0x80, 0xdf, 0x0c, 0xee, 0x0d, 0x03,
0x84, 0x8d, 0x03, 0x37, 0x09, 0x81, 0x5c, 0x14, 0x80, 0xb8, 0x08, 0x80,
0xcb, 0x2a, 0x38, 0x03, 0x0a, 0x06, 0x38, 0x08, 0x46, 0x08, 0x0c, 0x06,
0x74, 0x0b, 0x1e, 0x03, 0x5a, 0x04, 0x59, 0x09, 0x80, 0x83, 0x18, 0x1c,
0x0a, 0x16, 0x09, 0x4c, 0x04, 0x80, 0x8a, 0x06, 0xab, 0xa4, 0x0c, 0x17,
0x04, 0x31, 0xa1, 0x04, 0x81, 0xda, 0x26, 0x07, 0x0c, 0x05, 0x05, 0x80,
0xa5, 0x11, 0x81, 0x6d, 0x10, 0x78, 0x28, 0x2a, 0x06, 0x4c, 0x04, 0x80,
0x8d, 0x04, 0x80, 0xbe, 0x03, 0x1b, 0x03, 0x0f, 0x0d,
};
static constexpr unsigned char normal1[] = {
0x5e, 0x22, 0x7b, 0x05, 0x03, 0x04, 0x2d, 0x03, 0x66, 0x03, 0x01, 0x2f,
0x2e, 0x80, 0x82, 0x1d, 0x03, 0x31, 0x0f, 0x1c, 0x04, 0x24, 0x09, 0x1e,
0x05, 0x2b, 0x05, 0x44, 0x04, 0x0e, 0x2a, 0x80, 0xaa, 0x06, 0x24, 0x04,
0x24, 0x04, 0x28, 0x08, 0x34, 0x0b, 0x01, 0x80, 0x90, 0x81, 0x37, 0x09,
0x16, 0x0a, 0x08, 0x80, 0x98, 0x39, 0x03, 0x63, 0x08, 0x09, 0x30, 0x16,
0x05, 0x21, 0x03, 0x1b, 0x05, 0x01, 0x40, 0x38, 0x04, 0x4b, 0x05, 0x2f,
0x04, 0x0a, 0x07, 0x09, 0x07, 0x40, 0x20, 0x27, 0x04, 0x0c, 0x09, 0x36,
0x03, 0x3a, 0x05, 0x1a, 0x07, 0x04, 0x0c, 0x07, 0x50, 0x49, 0x37, 0x33,
0x0d, 0x33, 0x07, 0x2e, 0x08, 0x0a, 0x81, 0x26, 0x52, 0x4e, 0x28, 0x08,
0x2a, 0x56, 0x1c, 0x14, 0x17, 0x09, 0x4e, 0x04, 0x1e, 0x0f, 0x43, 0x0e,
0x19, 0x07, 0x0a, 0x06, 0x48, 0x08, 0x27, 0x09, 0x75, 0x0b, 0x3f, 0x41,
0x2a, 0x06, 0x3b, 0x05, 0x0a, 0x06, 0x51, 0x06, 0x01, 0x05, 0x10, 0x03,
0x05, 0x80, 0x8b, 0x62, 0x1e, 0x48, 0x08, 0x0a, 0x80, 0xa6, 0x5e, 0x22,
0x45, 0x0b, 0x0a, 0x06, 0x0d, 0x13, 0x39, 0x07, 0x0a, 0x36, 0x2c, 0x04,
0x10, 0x80, 0xc0, 0x3c, 0x64, 0x53, 0x0c, 0x48, 0x09, 0x0a, 0x46, 0x45,
0x1b, 0x48, 0x08, 0x53, 0x1d, 0x39, 0x81, 0x07, 0x46, 0x0a, 0x1d, 0x03,
0x47, 0x49, 0x37, 0x03, 0x0e, 0x08, 0x0a, 0x06, 0x39, 0x07, 0x0a, 0x81,
0x36, 0x19, 0x80, 0xb7, 0x01, 0x0f, 0x32, 0x0d, 0x83, 0x9b, 0x66, 0x75,
0x0b, 0x80, 0xc4, 0x8a, 0xbc, 0x84, 0x2f, 0x8f, 0xd1, 0x82, 0x47, 0xa1,
0xb9, 0x82, 0x39, 0x07, 0x2a, 0x04, 0x02, 0x60, 0x26, 0x0a, 0x46, 0x0a,
0x28, 0x05, 0x13, 0x82, 0xb0, 0x5b, 0x65, 0x4b, 0x04, 0x39, 0x07, 0x11,
0x40, 0x05, 0x0b, 0x02, 0x0e, 0x97, 0xf8, 0x08, 0x84, 0xd6, 0x2a, 0x09,
0xa2, 0xf7, 0x81, 0x1f, 0x31, 0x03, 0x11, 0x04, 0x08, 0x81, 0x8c, 0x89,
0x04, 0x6b, 0x05, 0x0d, 0x03, 0x09, 0x07, 0x10, 0x93, 0x60, 0x80, 0xf6,
0x0a, 0x73, 0x08, 0x6e, 0x17, 0x46, 0x80, 0x9a, 0x14, 0x0c, 0x57, 0x09,
0x19, 0x80, 0x87, 0x81, 0x47, 0x03, 0x85, 0x42, 0x0f, 0x15, 0x85, 0x50,
0x2b, 0x80, 0xd5, 0x2d, 0x03, 0x1a, 0x04, 0x02, 0x81, 0x70, 0x3a, 0x05,
0x01, 0x85, 0x00, 0x80, 0xd7, 0x29, 0x4c, 0x04, 0x0a, 0x04, 0x02, 0x83,
0x11, 0x44, 0x4c, 0x3d, 0x80, 0xc2, 0x3c, 0x06, 0x01, 0x04, 0x55, 0x05,
0x1b, 0x34, 0x02, 0x81, 0x0e, 0x2c, 0x04, 0x64, 0x0c, 0x56, 0x0a, 0x80,
0xae, 0x38, 0x1d, 0x0d, 0x2c, 0x04, 0x09, 0x07, 0x02, 0x0e, 0x06, 0x80,
0x9a, 0x83, 0xd8, 0x08, 0x0d, 0x03, 0x0d, 0x03, 0x74, 0x0c, 0x59, 0x07,
0x0c, 0x14, 0x0c, 0x04, 0x38, 0x08, 0x0a, 0x06, 0x28, 0x08, 0x22, 0x4e,
0x81, 0x54, 0x0c, 0x15, 0x03, 0x03, 0x05, 0x07, 0x09, 0x19, 0x07, 0x07,
0x09, 0x03, 0x0d, 0x07, 0x29, 0x80, 0xcb, 0x25, 0x0a, 0x84, 0x06,
};
auto lower = static_cast<uint16_t>(cp);
if (cp < 0x10000) {
return is_printable(lower, singletons0,
sizeof(singletons0) / sizeof(*singletons0),
singletons0_lower, normal0, sizeof(normal0));
}
if (cp < 0x20000) {
return is_printable(lower, singletons1,
sizeof(singletons1) / sizeof(*singletons1),
singletons1_lower, normal1, sizeof(normal1));
}
if (0x2a6de <= cp && cp < 0x2a700) return false;
if (0x2b735 <= cp && cp < 0x2b740) return false;
if (0x2b81e <= cp && cp < 0x2b820) return false;
if (0x2cea2 <= cp && cp < 0x2ceb0) return false;
if (0x2ebe1 <= cp && cp < 0x2f800) return false;
if (0x2fa1e <= cp && cp < 0x30000) return false;
if (0x3134b <= cp && cp < 0xe0100) return false;
if (0xe01f0 <= cp && cp < 0x110000) return false;
return cp < 0x110000;
}
} // namespace detail
FMT_END_NAMESPACE
#endif // FMT_FORMAT_INL_H_ | c | github | https://github.com/nodejs/node | deps/LIEF/third-party/spdlog/include/spdlog/fmt/bundled/format-inl.h |
# -*- coding: utf-8 -*-
"""
flask.debughelpers
~~~~~~~~~~~~~~~~~~
Various helpers to make the development experience better.
:copyright: (c) 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from ._compat import implements_to_string
class UnexpectedUnicodeError(AssertionError, UnicodeError):
"""Raised in places where we want some better error reporting for
unexpected unicode or binary data.
"""
@implements_to_string
class DebugFilesKeyError(KeyError, AssertionError):
"""Raised from request.files during debugging. The idea is that it can
provide a better error message than just a generic KeyError/BadRequest.
"""
def __init__(self, request, key):
form_matches = request.form.getlist(key)
buf = ['You tried to access the file "%s" in the request.files '
'dictionary but it does not exist. The mimetype for the request '
'is "%s" instead of "multipart/form-data" which means that no '
'file contents were transmitted. To fix this error you should '
'provide enctype="multipart/form-data" in your form.' %
(key, request.mimetype)]
if form_matches:
buf.append('\n\nThe browser instead transmitted some file names. '
'This was submitted: %s' % ', '.join('"%s"' % x
for x in form_matches))
self.msg = ''.join(buf)
def __str__(self):
return self.msg
class FormDataRoutingRedirect(AssertionError):
"""This exception is raised by Flask in debug mode if it detects a
redirect caused by the routing system when the request method is not
GET, HEAD or OPTIONS. Reasoning: form data will be dropped.
"""
def __init__(self, request):
exc = request.routing_exception
buf = ['A request was sent to this URL (%s) but a redirect was '
'issued automatically by the routing system to "%s".'
% (request.url, exc.new_url)]
# In case just a slash was appended we can be extra helpful
if request.base_url + '/' == exc.new_url.split('?')[0]:
buf.append(' The URL was defined with a trailing slash so '
'Flask will automatically redirect to the URL '
'with the trailing slash if it was accessed '
'without one.')
buf.append(' Make sure to directly send your %s-request to this URL '
'since we can\'t make browsers or HTTP clients redirect '
'with form data reliably or without user interaction.' %
request.method)
buf.append('\n\nNote: this exception is only raised in debug mode')
AssertionError.__init__(self, ''.join(buf).encode('utf-8'))
def attach_enctype_error_multidict(request):
"""Since Flask 0.8 we're monkeypatching the files object in case a
request is detected that does not use multipart form data but the files
object is accessed.
"""
oldcls = request.files.__class__
class newcls(oldcls):
def __getitem__(self, key):
try:
return oldcls.__getitem__(self, key)
except KeyError:
if key not in request.form:
raise
raise DebugFilesKeyError(request, key)
newcls.__name__ = oldcls.__name__
newcls.__module__ = oldcls.__module__
request.files.__class__ = newcls | unknown | codeparrot/codeparrot-clean | ||
////////////////////////////////////////////////////////////////////////////
//
// Copyright 2014 Realm Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
////////////////////////////////////////////////////////////////////////////
#import <Realm/RLMConstants.h>
RLM_HEADER_AUDIT_BEGIN(nullability, sendability)
/// :nodoc:
@interface RLMObjectBase : NSObject
@property (nonatomic, readonly, getter = isInvalidated) BOOL invalidated;
- (instancetype)init NS_DESIGNATED_INITIALIZER;
+ (NSString *)className;
// Returns whether the class is included in the default set of classes managed by a Realm.
+ (BOOL)shouldIncludeInDefaultSchema;
+ (nullable NSString *)_realmObjectName;
+ (nullable NSDictionary<NSString *, NSString *> *)_realmColumnNames;
@end
RLM_HEADER_AUDIT_END(nullability, sendability) | c | github | https://github.com/realm/realm-swift | Realm/RLMObjectBase.h |
use core::ops::ControlFlow;
use std::borrow::Cow;
use std::iter;
use hir::def_id::{DefId, DefIdMap, LocalDefId};
use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
use rustc_errors::codes::*;
use rustc_errors::{Applicability, ErrorGuaranteed, MultiSpan, pluralize, struct_span_code_err};
use rustc_hir::def::{DefKind, Res};
use rustc_hir::intravisit::VisitorExt;
use rustc_hir::{self as hir, AmbigArg, GenericParamKind, ImplItemKind, intravisit};
use rustc_infer::infer::{self, BoundRegionConversionTime, InferCtxt, TyCtxtInferExt};
use rustc_infer::traits::util;
use rustc_middle::ty::error::{ExpectedFound, TypeError};
use rustc_middle::ty::{
self, BottomUpFolder, GenericArgs, GenericParamDefKind, Generics, Ty, TyCtxt, TypeFoldable,
TypeFolder, TypeSuperFoldable, TypeVisitable, TypeVisitableExt, TypeVisitor, TypingMode,
Upcast,
};
use rustc_middle::{bug, span_bug};
use rustc_span::{DUMMY_SP, Span};
use rustc_trait_selection::error_reporting::InferCtxtErrorExt;
use rustc_trait_selection::infer::InferCtxtExt;
use rustc_trait_selection::regions::InferCtxtRegionExt;
use rustc_trait_selection::traits::{
self, FulfillmentError, ObligationCause, ObligationCauseCode, ObligationCtxt,
};
use tracing::{debug, instrument};
use super::potentially_plural_count;
use crate::errors::{LifetimesOrBoundsMismatchOnTrait, MethodShouldReturnFuture};
pub(super) mod refine;
/// Call the query `tcx.compare_impl_item()` directly instead.
pub(super) fn compare_impl_item(
tcx: TyCtxt<'_>,
impl_item_def_id: LocalDefId,
) -> Result<(), ErrorGuaranteed> {
let impl_item = tcx.associated_item(impl_item_def_id);
let trait_item = tcx.associated_item(impl_item.expect_trait_impl()?);
let impl_trait_ref = tcx.impl_trait_ref(impl_item.container_id(tcx)).instantiate_identity();
debug!(?impl_trait_ref);
match impl_item.kind {
ty::AssocKind::Fn { .. } => compare_impl_method(tcx, impl_item, trait_item, impl_trait_ref),
ty::AssocKind::Type { .. } => compare_impl_ty(tcx, impl_item, trait_item, impl_trait_ref),
ty::AssocKind::Const { .. } => {
compare_impl_const(tcx, impl_item, trait_item, impl_trait_ref)
}
}
}
/// Checks that a method from an impl conforms to the signature of
/// the same method as declared in the trait.
///
/// # Parameters
///
/// - `impl_m`: type of the method we are checking
/// - `trait_m`: the method in the trait
/// - `impl_trait_ref`: the TraitRef corresponding to the trait implementation
#[instrument(level = "debug", skip(tcx))]
fn compare_impl_method<'tcx>(
tcx: TyCtxt<'tcx>,
impl_m: ty::AssocItem,
trait_m: ty::AssocItem,
impl_trait_ref: ty::TraitRef<'tcx>,
) -> Result<(), ErrorGuaranteed> {
check_method_is_structurally_compatible(tcx, impl_m, trait_m, impl_trait_ref, false)?;
compare_method_predicate_entailment(tcx, impl_m, trait_m, impl_trait_ref)?;
Ok(())
}
/// Checks a bunch of different properties of the impl/trait methods for
/// compatibility, such as asyncness, number of argument, self receiver kind,
/// and number of early- and late-bound generics.
fn check_method_is_structurally_compatible<'tcx>(
tcx: TyCtxt<'tcx>,
impl_m: ty::AssocItem,
trait_m: ty::AssocItem,
impl_trait_ref: ty::TraitRef<'tcx>,
delay: bool,
) -> Result<(), ErrorGuaranteed> {
compare_self_type(tcx, impl_m, trait_m, impl_trait_ref, delay)?;
compare_number_of_generics(tcx, impl_m, trait_m, delay)?;
compare_generic_param_kinds(tcx, impl_m, trait_m, delay)?;
compare_number_of_method_arguments(tcx, impl_m, trait_m, delay)?;
compare_synthetic_generics(tcx, impl_m, trait_m, delay)?;
check_region_bounds_on_impl_item(tcx, impl_m, trait_m, delay)?;
Ok(())
}
/// This function is best explained by example. Consider a trait with its implementation:
///
/// ```rust
/// trait Trait<'t, T> {
/// // `trait_m`
/// fn method<'a, M>(t: &'t T, m: &'a M) -> Self;
/// }
///
/// struct Foo;
///
/// impl<'i, 'j, U> Trait<'j, &'i U> for Foo {
/// // `impl_m`
/// fn method<'b, N>(t: &'j &'i U, m: &'b N) -> Foo { Foo }
/// }
/// ```
///
/// We wish to decide if those two method types are compatible.
/// For this we have to show that, assuming the bounds of the impl hold, the
/// bounds of `trait_m` imply the bounds of `impl_m`.
///
/// We start out with `trait_to_impl_args`, that maps the trait
/// type parameters to impl type parameters. This is taken from the
/// impl trait reference:
///
/// ```rust,ignore (pseudo-Rust)
/// trait_to_impl_args = {'t => 'j, T => &'i U, Self => Foo}
/// ```
///
/// We create a mapping `dummy_args` that maps from the impl type
/// parameters to fresh types and regions. For type parameters,
/// this is the identity transform, but we could as well use any
/// placeholder types. For regions, we convert from bound to free
/// regions (Note: but only early-bound regions, i.e., those
/// declared on the impl or used in type parameter bounds).
///
/// ```rust,ignore (pseudo-Rust)
/// impl_to_placeholder_args = {'i => 'i0, U => U0, N => N0 }
/// ```
///
/// Now we can apply `placeholder_args` to the type of the impl method
/// to yield a new function type in terms of our fresh, placeholder
/// types:
///
/// ```rust,ignore (pseudo-Rust)
/// <'b> fn(t: &'i0 U0, m: &'b N0) -> Foo
/// ```
///
/// We now want to extract and instantiate the type of the *trait*
/// method and compare it. To do so, we must create a compound
/// instantiation by combining `trait_to_impl_args` and
/// `impl_to_placeholder_args`, and also adding a mapping for the method
/// type parameters. We extend the mapping to also include
/// the method parameters.
///
/// ```rust,ignore (pseudo-Rust)
/// trait_to_placeholder_args = { T => &'i0 U0, Self => Foo, M => N0 }
/// ```
///
/// Applying this to the trait method type yields:
///
/// ```rust,ignore (pseudo-Rust)
/// <'a> fn(t: &'i0 U0, m: &'a N0) -> Foo
/// ```
///
/// This type is also the same but the name of the bound region (`'a`
/// vs `'b`). However, the normal subtyping rules on fn types handle
/// this kind of equivalency just fine.
///
/// We now use these generic parameters to ensure that all declared bounds
/// are satisfied by the implementation's method.
///
/// We do this by creating a parameter environment which contains a
/// generic parameter corresponding to `impl_to_placeholder_args`. We then build
/// `trait_to_placeholder_args` and use it to convert the predicates contained
/// in the `trait_m` generics to the placeholder form.
///
/// Finally we register each of these predicates as an obligation and check that
/// they hold.
#[instrument(level = "debug", skip(tcx, impl_trait_ref))]
fn compare_method_predicate_entailment<'tcx>(
tcx: TyCtxt<'tcx>,
impl_m: ty::AssocItem,
trait_m: ty::AssocItem,
impl_trait_ref: ty::TraitRef<'tcx>,
) -> Result<(), ErrorGuaranteed> {
// This node-id should be used for the `body_id` field on each
// `ObligationCause` (and the `FnCtxt`).
//
// FIXME(@lcnr): remove that after removing `cause.body_id` from
// obligations.
let impl_m_def_id = impl_m.def_id.expect_local();
let impl_m_span = tcx.def_span(impl_m_def_id);
let cause = ObligationCause::new(
impl_m_span,
impl_m_def_id,
ObligationCauseCode::CompareImplItem {
impl_item_def_id: impl_m_def_id,
trait_item_def_id: trait_m.def_id,
kind: impl_m.kind,
},
);
// Create mapping from trait method to impl method.
let impl_def_id = impl_m.container_id(tcx);
let trait_to_impl_args = GenericArgs::identity_for_item(tcx, impl_m.def_id).rebase_onto(
tcx,
impl_m.container_id(tcx),
impl_trait_ref.args,
);
debug!(?trait_to_impl_args);
let impl_m_predicates = tcx.predicates_of(impl_m.def_id);
let trait_m_predicates = tcx.predicates_of(trait_m.def_id);
// This is the only tricky bit of the new way we check implementation methods
// We need to build a set of predicates where only the method-level bounds
// are from the trait and we assume all other bounds from the implementation
// to be previously satisfied.
//
// We then register the obligations from the impl_m and check to see
// if all constraints hold.
let impl_predicates = tcx.predicates_of(impl_m_predicates.parent.unwrap());
let mut hybrid_preds = impl_predicates.instantiate_identity(tcx).predicates;
hybrid_preds.extend(
trait_m_predicates.instantiate_own(tcx, trait_to_impl_args).map(|(predicate, _)| predicate),
);
let is_conditionally_const = tcx.is_conditionally_const(impl_m.def_id);
if is_conditionally_const {
// Augment the hybrid param-env with the const conditions
// of the impl header and the trait method.
hybrid_preds.extend(
tcx.const_conditions(impl_def_id)
.instantiate_identity(tcx)
.into_iter()
.chain(
tcx.const_conditions(trait_m.def_id).instantiate_own(tcx, trait_to_impl_args),
)
.map(|(trait_ref, _)| {
trait_ref.to_host_effect_clause(tcx, ty::BoundConstness::Maybe)
}),
);
}
let normalize_cause = traits::ObligationCause::misc(impl_m_span, impl_m_def_id);
let param_env = ty::ParamEnv::new(tcx.mk_clauses(&hybrid_preds));
// FIXME(-Zhigher-ranked-assumptions): The `hybrid_preds`
// should be well-formed. However, using them may result in
// region errors as we currently don't track placeholder
// assumptions.
//
// To avoid being backwards incompatible with the old solver,
// we also eagerly normalize the where-bounds in the new solver
// here while ignoring region constraints. This means we can then
// use where-bounds whose normalization results in placeholder
// errors further down without getting any errors.
//
// It should be sound to do so as the only region errors here
// should be due to missing implied bounds.
//
// cc trait-system-refactor-initiative/issues/166.
let param_env = if tcx.next_trait_solver_globally() {
traits::deeply_normalize_param_env_ignoring_regions(tcx, param_env, normalize_cause)
} else {
traits::normalize_param_env_or_error(tcx, param_env, normalize_cause)
};
debug!(caller_bounds=?param_env.caller_bounds());
let infcx = &tcx.infer_ctxt().build(TypingMode::non_body_analysis());
let ocx = ObligationCtxt::new_with_diagnostics(infcx);
// Create obligations for each predicate declared by the impl
// definition in the context of the hybrid param-env. This makes
// sure that the impl's method's where clauses are not more
// restrictive than the trait's method (and the impl itself).
let impl_m_own_bounds = impl_m_predicates.instantiate_own_identity();
for (predicate, span) in impl_m_own_bounds {
let normalize_cause = traits::ObligationCause::misc(span, impl_m_def_id);
let predicate = ocx.normalize(&normalize_cause, param_env, predicate);
let cause = ObligationCause::new(
span,
impl_m_def_id,
ObligationCauseCode::CompareImplItem {
impl_item_def_id: impl_m_def_id,
trait_item_def_id: trait_m.def_id,
kind: impl_m.kind,
},
);
ocx.register_obligation(traits::Obligation::new(tcx, cause, param_env, predicate));
}
// If we're within a const implementation, we need to make sure that the method
// does not assume stronger `[const]` bounds than the trait definition.
//
// This registers the `[const]` bounds of the impl method, which we will prove
// using the hybrid param-env that we earlier augmented with the const conditions
// from the impl header and trait method declaration.
if is_conditionally_const {
for (const_condition, span) in
tcx.const_conditions(impl_m.def_id).instantiate_own_identity()
{
let normalize_cause = traits::ObligationCause::misc(span, impl_m_def_id);
let const_condition = ocx.normalize(&normalize_cause, param_env, const_condition);
let cause = ObligationCause::new(
span,
impl_m_def_id,
ObligationCauseCode::CompareImplItem {
impl_item_def_id: impl_m_def_id,
trait_item_def_id: trait_m.def_id,
kind: impl_m.kind,
},
);
ocx.register_obligation(traits::Obligation::new(
tcx,
cause,
param_env,
const_condition.to_host_effect_clause(tcx, ty::BoundConstness::Maybe),
));
}
}
// We now need to check that the signature of the impl method is
// compatible with that of the trait method. We do this by
// checking that `impl_fty <: trait_fty`.
//
// FIXME: We manually instantiate the trait method here as we need
// to manually compute its implied bounds. Otherwise this could just
// be `ocx.sub(impl_sig, trait_sig)`.
let mut wf_tys = FxIndexSet::default();
let unnormalized_impl_sig = infcx.instantiate_binder_with_fresh_vars(
impl_m_span,
BoundRegionConversionTime::HigherRankedType,
tcx.fn_sig(impl_m.def_id).instantiate_identity(),
);
let norm_cause = ObligationCause::misc(impl_m_span, impl_m_def_id);
let impl_sig = ocx.normalize(&norm_cause, param_env, unnormalized_impl_sig);
debug!(?impl_sig);
let trait_sig = tcx.fn_sig(trait_m.def_id).instantiate(tcx, trait_to_impl_args);
let trait_sig = tcx.liberate_late_bound_regions(impl_m.def_id, trait_sig);
// Next, add all inputs and output as well-formed tys. Importantly,
// we have to do this before normalization, since the normalized ty may
// not contain the input parameters. See issue #87748.
wf_tys.extend(trait_sig.inputs_and_output.iter());
let trait_sig = ocx.normalize(&norm_cause, param_env, trait_sig);
// We also have to add the normalized trait signature
// as we don't normalize during implied bounds computation.
wf_tys.extend(trait_sig.inputs_and_output.iter());
debug!(?trait_sig);
// FIXME: We'd want to keep more accurate spans than "the method signature" when
// processing the comparison between the trait and impl fn, but we sadly lose them
// and point at the whole signature when a trait bound or specific input or output
// type would be more appropriate. In other places we have a `Vec<Span>`
// corresponding to their `Vec<Predicate>`, but we don't have that here.
// Fixing this would improve the output of test `issue-83765.rs`.
// There's the same issue in compare_eii code.
let result = ocx.sup(&cause, param_env, trait_sig, impl_sig);
if let Err(terr) = result {
debug!(?impl_sig, ?trait_sig, ?terr, "sub_types failed");
let emitted = report_trait_method_mismatch(
infcx,
cause,
param_env,
terr,
(trait_m, trait_sig),
(impl_m, impl_sig),
impl_trait_ref,
);
return Err(emitted);
}
if !(impl_sig, trait_sig).references_error() {
for ty in unnormalized_impl_sig.inputs_and_output {
ocx.register_obligation(traits::Obligation::new(
infcx.tcx,
cause.clone(),
param_env,
ty::ClauseKind::WellFormed(ty.into()),
));
}
}
// Check that all obligations are satisfied by the implementation's
// version.
let errors = ocx.evaluate_obligations_error_on_ambiguity();
if !errors.is_empty() {
let reported = infcx.err_ctxt().report_fulfillment_errors(errors);
return Err(reported);
}
// Finally, resolve all regions. This catches wily misuses of
// lifetime parameters.
let errors = infcx.resolve_regions(impl_m_def_id, param_env, wf_tys);
if !errors.is_empty() {
return Err(infcx
.tainted_by_errors()
.unwrap_or_else(|| infcx.err_ctxt().report_region_errors(impl_m_def_id, &errors)));
}
Ok(())
}
struct RemapLateParam<'tcx> {
tcx: TyCtxt<'tcx>,
mapping: FxIndexMap<ty::LateParamRegionKind, ty::LateParamRegionKind>,
}
impl<'tcx> TypeFolder<TyCtxt<'tcx>> for RemapLateParam<'tcx> {
fn cx(&self) -> TyCtxt<'tcx> {
self.tcx
}
fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
if let ty::ReLateParam(fr) = r.kind() {
ty::Region::new_late_param(
self.tcx,
fr.scope,
self.mapping.get(&fr.kind).copied().unwrap_or(fr.kind),
)
} else {
r
}
}
}
/// Given a method def-id in an impl, compare the method signature of the impl
/// against the trait that it's implementing. In doing so, infer the hidden types
/// that this method's signature provides to satisfy each return-position `impl Trait`
/// in the trait signature.
///
/// The method is also responsible for making sure that the hidden types for each
/// RPITIT actually satisfy the bounds of the `impl Trait`, i.e. that if we infer
/// `impl Trait = Foo`, that `Foo: Trait` holds.
///
/// For example, given the sample code:
///
/// ```
/// use std::ops::Deref;
///
/// trait Foo {
/// fn bar() -> impl Deref<Target = impl Sized>;
/// // ^- RPITIT #1 ^- RPITIT #2
/// }
///
/// impl Foo for () {
/// fn bar() -> Box<String> { Box::new(String::new()) }
/// }
/// ```
///
/// The hidden types for the RPITITs in `bar` would be inferred to:
/// * `impl Deref` (RPITIT #1) = `Box<String>`
/// * `impl Sized` (RPITIT #2) = `String`
///
/// The relationship between these two types is straightforward in this case, but
/// may be more tenuously connected via other `impl`s and normalization rules for
/// cases of more complicated nested RPITITs.
#[instrument(skip(tcx), level = "debug", ret)]
pub(super) fn collect_return_position_impl_trait_in_trait_tys<'tcx>(
tcx: TyCtxt<'tcx>,
impl_m_def_id: LocalDefId,
) -> Result<&'tcx DefIdMap<ty::EarlyBinder<'tcx, Ty<'tcx>>>, ErrorGuaranteed> {
let impl_m = tcx.associated_item(impl_m_def_id.to_def_id());
let trait_m = tcx.associated_item(impl_m.expect_trait_impl()?);
let impl_trait_ref =
tcx.impl_trait_ref(tcx.parent(impl_m_def_id.to_def_id())).instantiate_identity();
// First, check a few of the same things as `compare_impl_method`,
// just so we don't ICE during instantiation later.
check_method_is_structurally_compatible(tcx, impl_m, trait_m, impl_trait_ref, true)?;
let impl_m_hir_id = tcx.local_def_id_to_hir_id(impl_m_def_id);
let return_span = tcx.hir_fn_decl_by_hir_id(impl_m_hir_id).unwrap().output.span();
let cause = ObligationCause::new(
return_span,
impl_m_def_id,
ObligationCauseCode::CompareImplItem {
impl_item_def_id: impl_m_def_id,
trait_item_def_id: trait_m.def_id,
kind: impl_m.kind,
},
);
// Create mapping from trait to impl (i.e. impl trait header + impl method identity args).
let trait_to_impl_args = GenericArgs::identity_for_item(tcx, impl_m.def_id).rebase_onto(
tcx,
impl_m.container_id(tcx),
impl_trait_ref.args,
);
let hybrid_preds = tcx
.predicates_of(impl_m.container_id(tcx))
.instantiate_identity(tcx)
.into_iter()
.chain(tcx.predicates_of(trait_m.def_id).instantiate_own(tcx, trait_to_impl_args))
.map(|(clause, _)| clause);
let param_env = ty::ParamEnv::new(tcx.mk_clauses_from_iter(hybrid_preds));
let param_env = traits::normalize_param_env_or_error(
tcx,
param_env,
ObligationCause::misc(tcx.def_span(impl_m_def_id), impl_m_def_id),
);
let infcx = &tcx.infer_ctxt().build(TypingMode::non_body_analysis());
let ocx = ObligationCtxt::new_with_diagnostics(infcx);
// Check that the where clauses of the impl are satisfied by the hybrid param env.
// You might ask -- what does this have to do with RPITIT inference? Nothing.
// We check these because if the where clauses of the signatures do not match
// up, then we don't want to give spurious other errors that point at the RPITITs.
// They're not necessary to check, though, because we already check them in
// `compare_method_predicate_entailment`.
let impl_m_own_bounds = tcx.predicates_of(impl_m_def_id).instantiate_own_identity();
for (predicate, span) in impl_m_own_bounds {
let normalize_cause = traits::ObligationCause::misc(span, impl_m_def_id);
let predicate = ocx.normalize(&normalize_cause, param_env, predicate);
let cause = ObligationCause::new(
span,
impl_m_def_id,
ObligationCauseCode::CompareImplItem {
impl_item_def_id: impl_m_def_id,
trait_item_def_id: trait_m.def_id,
kind: impl_m.kind,
},
);
ocx.register_obligation(traits::Obligation::new(tcx, cause, param_env, predicate));
}
// Normalize the impl signature with fresh variables for lifetime inference.
let misc_cause = ObligationCause::misc(return_span, impl_m_def_id);
let impl_sig = ocx.normalize(
&misc_cause,
param_env,
infcx.instantiate_binder_with_fresh_vars(
return_span,
BoundRegionConversionTime::HigherRankedType,
tcx.fn_sig(impl_m.def_id).instantiate_identity(),
),
);
impl_sig.error_reported()?;
let impl_return_ty = impl_sig.output();
// Normalize the trait signature with liberated bound vars, passing it through
// the ImplTraitInTraitCollector, which gathers all of the RPITITs and replaces
// them with inference variables.
// We will use these inference variables to collect the hidden types of RPITITs.
let mut collector = ImplTraitInTraitCollector::new(&ocx, return_span, param_env, impl_m_def_id);
let unnormalized_trait_sig = tcx
.liberate_late_bound_regions(
impl_m.def_id,
tcx.fn_sig(trait_m.def_id).instantiate(tcx, trait_to_impl_args),
)
.fold_with(&mut collector);
let trait_sig = ocx.normalize(&misc_cause, param_env, unnormalized_trait_sig);
trait_sig.error_reported()?;
let trait_return_ty = trait_sig.output();
// RPITITs are allowed to use the implied predicates of the method that
// defines them. This is because we want code like:
// ```
// trait Foo {
// fn test<'a, T>(_: &'a T) -> impl Sized;
// }
// impl Foo for () {
// fn test<'a, T>(x: &'a T) -> &'a T { x }
// }
// ```
// .. to compile. However, since we use both the normalized and unnormalized
// inputs and outputs from the instantiated trait signature, we will end up
// seeing the hidden type of an RPIT in the signature itself. Naively, this
// means that we will use the hidden type to imply the hidden type's own
// well-formedness.
//
// To avoid this, we replace the infer vars used for hidden type inference
// with placeholders, which imply nothing about outlives bounds, and then
// prove below that the hidden types are well formed.
let universe = infcx.create_next_universe();
let mut idx = ty::BoundVar::ZERO;
let mapping: FxIndexMap<_, _> = collector
.types
.iter()
.map(|(_, &(ty, _))| {
assert!(
infcx.resolve_vars_if_possible(ty) == ty && ty.is_ty_var(),
"{ty:?} should not have been constrained via normalization",
ty = infcx.resolve_vars_if_possible(ty)
);
idx += 1;
(
ty,
Ty::new_placeholder(
tcx,
ty::PlaceholderType::new(
universe,
ty::BoundTy { var: idx, kind: ty::BoundTyKind::Anon },
),
),
)
})
.collect();
let mut type_mapper = BottomUpFolder {
tcx,
ty_op: |ty| *mapping.get(&ty).unwrap_or(&ty),
lt_op: |lt| lt,
ct_op: |ct| ct,
};
let wf_tys = FxIndexSet::from_iter(
unnormalized_trait_sig
.inputs_and_output
.iter()
.chain(trait_sig.inputs_and_output.iter())
.map(|ty| ty.fold_with(&mut type_mapper)),
);
match ocx.eq(&cause, param_env, trait_return_ty, impl_return_ty) {
Ok(()) => {}
Err(terr) => {
let mut diag = struct_span_code_err!(
tcx.dcx(),
cause.span,
E0053,
"method `{}` has an incompatible return type for trait",
trait_m.name()
);
infcx.err_ctxt().note_type_err(
&mut diag,
&cause,
tcx.hir_get_if_local(impl_m.def_id)
.and_then(|node| node.fn_decl())
.map(|decl| (decl.output.span(), Cow::from("return type in trait"), false)),
Some(param_env.and(infer::ValuePairs::Terms(ExpectedFound {
expected: trait_return_ty.into(),
found: impl_return_ty.into(),
}))),
terr,
false,
None,
);
return Err(diag.emit());
}
}
debug!(?trait_sig, ?impl_sig, "equating function signatures");
// Unify the whole function signature. We need to do this to fully infer
// the lifetimes of the return type, but do this after unifying just the
// return types, since we want to avoid duplicating errors from
// `compare_method_predicate_entailment`.
match ocx.eq(&cause, param_env, trait_sig, impl_sig) {
Ok(()) => {}
Err(terr) => {
// This function gets called during `compare_method_predicate_entailment` when normalizing a
// signature that contains RPITIT. When the method signatures don't match, we have to
// emit an error now because `compare_method_predicate_entailment` will not report the error
// when normalization fails.
let emitted = report_trait_method_mismatch(
infcx,
cause,
param_env,
terr,
(trait_m, trait_sig),
(impl_m, impl_sig),
impl_trait_ref,
);
return Err(emitted);
}
}
if !unnormalized_trait_sig.output().references_error() && collector.types.is_empty() {
tcx.dcx().delayed_bug(
"expect >0 RPITITs in call to `collect_return_position_impl_trait_in_trait_tys`",
);
}
// FIXME: This has the same issue as #108544, but since this isn't breaking
// existing code, I'm not particularly inclined to do the same hack as above
// where we process wf obligations manually. This can be fixed in a forward-
// compatible way later.
let collected_types = collector.types;
for (_, &(ty, _)) in &collected_types {
ocx.register_obligation(traits::Obligation::new(
tcx,
misc_cause.clone(),
param_env,
ty::ClauseKind::WellFormed(ty.into()),
));
}
// Check that all obligations are satisfied by the implementation's
// RPITs.
let errors = ocx.evaluate_obligations_error_on_ambiguity();
if !errors.is_empty() {
if let Err(guar) = try_report_async_mismatch(tcx, infcx, &errors, trait_m, impl_m, impl_sig)
{
return Err(guar);
}
let guar = infcx.err_ctxt().report_fulfillment_errors(errors);
return Err(guar);
}
// Finally, resolve all regions. This catches wily misuses of
// lifetime parameters.
ocx.resolve_regions_and_report_errors(impl_m_def_id, param_env, wf_tys)?;
let mut remapped_types = DefIdMap::default();
for (def_id, (ty, args)) in collected_types {
match infcx.fully_resolve(ty) {
Ok(ty) => {
// `ty` contains free regions that we created earlier while liberating the
// trait fn signature. However, projection normalization expects `ty` to
// contains `def_id`'s early-bound regions.
let id_args = GenericArgs::identity_for_item(tcx, def_id);
debug!(?id_args, ?args);
let map: FxIndexMap<_, _> = std::iter::zip(args, id_args)
.skip(tcx.generics_of(trait_m.def_id).count())
.filter_map(|(a, b)| Some((a.as_region()?, b.as_region()?)))
.collect();
debug!(?map);
// NOTE(compiler-errors): RPITITs, like all other RPITs, have early-bound
// region args that are synthesized during AST lowering. These are args
// that are appended to the parent args (trait and trait method). However,
// we're trying to infer the uninstantiated type value of the RPITIT inside
// the *impl*, so we can later use the impl's method args to normalize
// an RPITIT to a concrete type (`confirm_impl_trait_in_trait_candidate`).
//
// Due to the design of RPITITs, during AST lowering, we have no idea that
// an impl method corresponds to a trait method with RPITITs in it. Therefore,
// we don't have a list of early-bound region args for the RPITIT in the impl.
// Since early region parameters are index-based, we can't just rebase these
// (trait method) early-bound region args onto the impl, and there's no
// guarantee that the indices from the trait args and impl args line up.
// So to fix this, we subtract the number of trait args and add the number of
// impl args to *renumber* these early-bound regions to their corresponding
// indices in the impl's generic parameters list.
//
// Also, we only need to account for a difference in trait and impl args,
// since we previously enforce that the trait method and impl method have the
// same generics.
let num_trait_args = impl_trait_ref.args.len();
let num_impl_args = tcx.generics_of(impl_m.container_id(tcx)).own_params.len();
let ty = match ty.try_fold_with(&mut RemapHiddenTyRegions {
tcx,
map,
num_trait_args,
num_impl_args,
def_id,
impl_m_def_id: impl_m.def_id,
ty,
return_span,
}) {
Ok(ty) => ty,
Err(guar) => Ty::new_error(tcx, guar),
};
remapped_types.insert(def_id, ty::EarlyBinder::bind(ty));
}
Err(err) => {
// This code path is not reached in any tests, but may be
// reachable. If this is triggered, it should be converted to
// `span_delayed_bug` and the triggering case turned into a
// test.
tcx.dcx()
.span_bug(return_span, format!("could not fully resolve: {ty} => {err:?}"));
}
}
}
// We may not collect all RPITITs that we see in the HIR for a trait signature
// because an RPITIT was located within a missing item. Like if we have a sig
// returning `-> Missing<impl Sized>`, that gets converted to `-> {type error}`,
// and when walking through the signature we end up never collecting the def id
// of the `impl Sized`. Insert that here, so we don't ICE later.
for assoc_item in tcx.associated_types_for_impl_traits_in_associated_fn(trait_m.def_id) {
if !remapped_types.contains_key(assoc_item) {
remapped_types.insert(
*assoc_item,
ty::EarlyBinder::bind(Ty::new_error_with_message(
tcx,
return_span,
"missing synthetic item for RPITIT",
)),
);
}
}
Ok(&*tcx.arena.alloc(remapped_types))
}
struct ImplTraitInTraitCollector<'a, 'tcx, E> {
ocx: &'a ObligationCtxt<'a, 'tcx, E>,
types: FxIndexMap<DefId, (Ty<'tcx>, ty::GenericArgsRef<'tcx>)>,
span: Span,
param_env: ty::ParamEnv<'tcx>,
body_id: LocalDefId,
}
impl<'a, 'tcx, E> ImplTraitInTraitCollector<'a, 'tcx, E>
where
E: 'tcx,
{
fn new(
ocx: &'a ObligationCtxt<'a, 'tcx, E>,
span: Span,
param_env: ty::ParamEnv<'tcx>,
body_id: LocalDefId,
) -> Self {
ImplTraitInTraitCollector { ocx, types: FxIndexMap::default(), span, param_env, body_id }
}
}
impl<'tcx, E> TypeFolder<TyCtxt<'tcx>> for ImplTraitInTraitCollector<'_, 'tcx, E>
where
E: 'tcx,
{
fn cx(&self) -> TyCtxt<'tcx> {
self.ocx.infcx.tcx
}
fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
if let ty::Alias(ty::Projection, proj) = ty.kind()
&& self.cx().is_impl_trait_in_trait(proj.def_id)
{
if let Some((ty, _)) = self.types.get(&proj.def_id) {
return *ty;
}
//FIXME(RPITIT): Deny nested RPITIT in args too
if proj.args.has_escaping_bound_vars() {
bug!("FIXME(RPITIT): error here");
}
// Replace with infer var
let infer_ty = self.ocx.infcx.next_ty_var(self.span);
self.types.insert(proj.def_id, (infer_ty, proj.args));
// Recurse into bounds
for (pred, pred_span) in self
.cx()
.explicit_item_bounds(proj.def_id)
.iter_instantiated_copied(self.cx(), proj.args)
{
let pred = pred.fold_with(self);
let pred = self.ocx.normalize(
&ObligationCause::misc(self.span, self.body_id),
self.param_env,
pred,
);
self.ocx.register_obligation(traits::Obligation::new(
self.cx(),
ObligationCause::new(
self.span,
self.body_id,
ObligationCauseCode::WhereClause(proj.def_id, pred_span),
),
self.param_env,
pred,
));
}
infer_ty
} else {
ty.super_fold_with(self)
}
}
}
struct RemapHiddenTyRegions<'tcx> {
tcx: TyCtxt<'tcx>,
/// Map from early/late params of the impl to identity regions of the RPITIT (GAT)
/// in the trait.
map: FxIndexMap<ty::Region<'tcx>, ty::Region<'tcx>>,
num_trait_args: usize,
num_impl_args: usize,
/// Def id of the RPITIT (GAT) in the *trait*.
def_id: DefId,
/// Def id of the impl method which owns the opaque hidden type we're remapping.
impl_m_def_id: DefId,
/// The hidden type we're remapping. Useful for diagnostics.
ty: Ty<'tcx>,
/// Span of the return type. Useful for diagnostics.
return_span: Span,
}
impl<'tcx> ty::FallibleTypeFolder<TyCtxt<'tcx>> for RemapHiddenTyRegions<'tcx> {
type Error = ErrorGuaranteed;
fn cx(&self) -> TyCtxt<'tcx> {
self.tcx
}
fn try_fold_region(
&mut self,
region: ty::Region<'tcx>,
) -> Result<ty::Region<'tcx>, Self::Error> {
match region.kind() {
// Never remap bound regions or `'static`
ty::ReBound(..) | ty::ReStatic | ty::ReError(_) => return Ok(region),
// We always remap liberated late-bound regions from the function.
ty::ReLateParam(_) => {}
// Remap early-bound regions as long as they don't come from the `impl` itself,
// in which case we don't really need to renumber them.
ty::ReEarlyParam(ebr) => {
if ebr.index as usize >= self.num_impl_args {
// Remap
} else {
return Ok(region);
}
}
ty::ReVar(_) | ty::RePlaceholder(_) | ty::ReErased => unreachable!(
"should not have leaked vars or placeholders into hidden type of RPITIT"
),
}
let e = if let Some(id_region) = self.map.get(®ion) {
if let ty::ReEarlyParam(e) = id_region.kind() {
e
} else {
bug!(
"expected to map region {region} to early-bound identity region, but got {id_region}"
);
}
} else {
let guar = match region.opt_param_def_id(self.tcx, self.impl_m_def_id) {
Some(def_id) => {
let return_span = if let ty::Alias(ty::Opaque, opaque_ty) = self.ty.kind() {
self.tcx.def_span(opaque_ty.def_id)
} else {
self.return_span
};
self.tcx
.dcx()
.struct_span_err(
return_span,
"return type captures more lifetimes than trait definition",
)
.with_span_label(self.tcx.def_span(def_id), "this lifetime was captured")
.with_span_note(
self.tcx.def_span(self.def_id),
"hidden type must only reference lifetimes captured by this impl trait",
)
.with_note(format!("hidden type inferred to be `{}`", self.ty))
.emit()
}
None => {
// This code path is not reached in any tests, but may be
// reachable. If this is triggered, it should be converted
// to `delayed_bug` and the triggering case turned into a
// test.
self.tcx.dcx().bug("should've been able to remap region");
}
};
return Err(guar);
};
Ok(ty::Region::new_early_param(
self.tcx,
ty::EarlyParamRegion {
name: e.name,
index: (e.index as usize - self.num_trait_args + self.num_impl_args) as u32,
},
))
}
}
/// Gets the string for an explicit self declaration, e.g. "self", "&self",
/// etc.
fn get_self_string<'tcx, P>(self_arg_ty: Ty<'tcx>, is_self_ty: P) -> String
where
P: Fn(Ty<'tcx>) -> bool,
{
if is_self_ty(self_arg_ty) {
"self".to_owned()
} else if let ty::Ref(_, ty, mutbl) = self_arg_ty.kind()
&& is_self_ty(*ty)
{
match mutbl {
hir::Mutability::Not => "&self".to_owned(),
hir::Mutability::Mut => "&mut self".to_owned(),
}
} else {
format!("self: {self_arg_ty}")
}
}
fn report_trait_method_mismatch<'tcx>(
infcx: &InferCtxt<'tcx>,
mut cause: ObligationCause<'tcx>,
param_env: ty::ParamEnv<'tcx>,
terr: TypeError<'tcx>,
(trait_m, trait_sig): (ty::AssocItem, ty::FnSig<'tcx>),
(impl_m, impl_sig): (ty::AssocItem, ty::FnSig<'tcx>),
impl_trait_ref: ty::TraitRef<'tcx>,
) -> ErrorGuaranteed {
let tcx = infcx.tcx;
let (impl_err_span, trait_err_span) =
extract_spans_for_error_reporting(infcx, terr, &cause, impl_m, trait_m);
let mut diag = struct_span_code_err!(
tcx.dcx(),
impl_err_span,
E0053,
"method `{}` has an incompatible type for trait",
trait_m.name()
);
match &terr {
TypeError::ArgumentMutability(0) | TypeError::ArgumentSorts(_, 0)
if trait_m.is_method() =>
{
let ty = trait_sig.inputs()[0];
let sugg = get_self_string(ty, |ty| ty == impl_trait_ref.self_ty());
// When the `impl` receiver is an arbitrary self type, like `self: Box<Self>`, the
// span points only at the type `Box<Self`>, but we want to cover the whole
// argument pattern and type.
let (sig, body) = tcx.hir_expect_impl_item(impl_m.def_id.expect_local()).expect_fn();
let span = tcx
.hir_body_param_idents(body)
.zip(sig.decl.inputs.iter())
.map(|(param_ident, ty)| {
if let Some(param_ident) = param_ident {
param_ident.span.to(ty.span)
} else {
ty.span
}
})
.next()
.unwrap_or(impl_err_span);
diag.span_suggestion_verbose(
span,
"change the self-receiver type to match the trait",
sugg,
Applicability::MachineApplicable,
);
}
TypeError::ArgumentMutability(i) | TypeError::ArgumentSorts(_, i) => {
if trait_sig.inputs().len() == *i {
// Suggestion to change output type. We do not suggest in `async` functions
// to avoid complex logic or incorrect output.
if let ImplItemKind::Fn(sig, _) =
&tcx.hir_expect_impl_item(impl_m.def_id.expect_local()).kind
&& !sig.header.asyncness.is_async()
{
let msg = "change the output type to match the trait";
let ap = Applicability::MachineApplicable;
match sig.decl.output {
hir::FnRetTy::DefaultReturn(sp) => {
let sugg = format!(" -> {}", trait_sig.output());
diag.span_suggestion_verbose(sp, msg, sugg, ap);
}
hir::FnRetTy::Return(hir_ty) => {
let sugg = trait_sig.output();
diag.span_suggestion_verbose(hir_ty.span, msg, sugg, ap);
}
};
};
} else if let Some(trait_ty) = trait_sig.inputs().get(*i) {
diag.span_suggestion_verbose(
impl_err_span,
"change the parameter type to match the trait",
trait_ty,
Applicability::MachineApplicable,
);
}
}
_ => {}
}
cause.span = impl_err_span;
infcx.err_ctxt().note_type_err(
&mut diag,
&cause,
trait_err_span.map(|sp| (sp, Cow::from("type in trait"), false)),
Some(param_env.and(infer::ValuePairs::PolySigs(ExpectedFound {
expected: ty::Binder::dummy(trait_sig),
found: ty::Binder::dummy(impl_sig),
}))),
terr,
false,
None,
);
diag.emit()
}
fn check_region_bounds_on_impl_item<'tcx>(
tcx: TyCtxt<'tcx>,
impl_m: ty::AssocItem,
trait_m: ty::AssocItem,
delay: bool,
) -> Result<(), ErrorGuaranteed> {
let impl_generics = tcx.generics_of(impl_m.def_id);
let impl_params = impl_generics.own_counts().lifetimes;
let trait_generics = tcx.generics_of(trait_m.def_id);
let trait_params = trait_generics.own_counts().lifetimes;
let Err(CheckNumberOfEarlyBoundRegionsError { span, generics_span, bounds_span, where_span }) =
check_number_of_early_bound_regions(
tcx,
impl_m.def_id.expect_local(),
trait_m.def_id,
impl_generics,
impl_params,
trait_generics,
trait_params,
)
else {
return Ok(());
};
if !delay && let Some(guar) = check_region_late_boundedness(tcx, impl_m, trait_m) {
return Err(guar);
}
let reported = tcx
.dcx()
.create_err(LifetimesOrBoundsMismatchOnTrait {
span,
item_kind: impl_m.descr(),
ident: impl_m.ident(tcx),
generics_span,
bounds_span,
where_span,
})
.emit_unless_delay(delay);
Err(reported)
}
pub(super) struct CheckNumberOfEarlyBoundRegionsError {
pub(super) span: Span,
pub(super) generics_span: Span,
pub(super) bounds_span: Vec<Span>,
pub(super) where_span: Option<Span>,
}
pub(super) fn check_number_of_early_bound_regions<'tcx>(
tcx: TyCtxt<'tcx>,
impl_def_id: LocalDefId,
trait_def_id: DefId,
impl_generics: &Generics,
impl_params: usize,
trait_generics: &Generics,
trait_params: usize,
) -> Result<(), CheckNumberOfEarlyBoundRegionsError> {
debug!(?trait_generics, ?impl_generics);
// Must have same number of early-bound lifetime parameters.
// Unfortunately, if the user screws up the bounds, then this
// will change classification between early and late. E.g.,
// if in trait we have `<'a,'b:'a>`, and in impl we just have
// `<'a,'b>`, then we have 2 early-bound lifetime parameters
// in trait but 0 in the impl. But if we report "expected 2
// but found 0" it's confusing, because it looks like there
// are zero. Since I don't quite know how to phrase things at
// the moment, give a kind of vague error message.
if trait_params == impl_params {
return Ok(());
}
let span = tcx
.hir_get_generics(impl_def_id)
.expect("expected impl item to have generics or else we can't compare them")
.span;
let mut generics_span = tcx.def_span(trait_def_id);
let mut bounds_span = vec![];
let mut where_span = None;
if let Some(trait_node) = tcx.hir_get_if_local(trait_def_id)
&& let Some(trait_generics) = trait_node.generics()
{
generics_span = trait_generics.span;
// FIXME: we could potentially look at the impl's bounds to not point at bounds that
// *are* present in the impl.
for p in trait_generics.predicates {
match p.kind {
hir::WherePredicateKind::BoundPredicate(hir::WhereBoundPredicate {
bounds,
..
})
| hir::WherePredicateKind::RegionPredicate(hir::WhereRegionPredicate {
bounds,
..
}) => {
for b in *bounds {
if let hir::GenericBound::Outlives(lt) = b {
bounds_span.push(lt.ident.span);
}
}
}
_ => {}
}
}
if let Some(impl_node) = tcx.hir_get_if_local(impl_def_id.into())
&& let Some(impl_generics) = impl_node.generics()
{
let mut impl_bounds = 0;
for p in impl_generics.predicates {
match p.kind {
hir::WherePredicateKind::BoundPredicate(hir::WhereBoundPredicate {
bounds,
..
})
| hir::WherePredicateKind::RegionPredicate(hir::WhereRegionPredicate {
bounds,
..
}) => {
for b in *bounds {
if let hir::GenericBound::Outlives(_) = b {
impl_bounds += 1;
}
}
}
_ => {}
}
}
if impl_bounds == bounds_span.len() {
bounds_span = vec![];
} else if impl_generics.has_where_clause_predicates {
where_span = Some(impl_generics.where_clause_span);
}
}
}
Err(CheckNumberOfEarlyBoundRegionsError { span, generics_span, bounds_span, where_span })
}
#[allow(unused)]
enum LateEarlyMismatch<'tcx> {
EarlyInImpl(DefId, DefId, ty::Region<'tcx>),
LateInImpl(DefId, DefId, ty::Region<'tcx>),
}
fn check_region_late_boundedness<'tcx>(
tcx: TyCtxt<'tcx>,
impl_m: ty::AssocItem,
trait_m: ty::AssocItem,
) -> Option<ErrorGuaranteed> {
if !impl_m.is_fn() {
return None;
}
let (infcx, param_env) = tcx
.infer_ctxt()
.build_with_typing_env(ty::TypingEnv::non_body_analysis(tcx, impl_m.def_id));
let impl_m_args = infcx.fresh_args_for_item(DUMMY_SP, impl_m.def_id);
let impl_m_sig = tcx.fn_sig(impl_m.def_id).instantiate(tcx, impl_m_args);
let impl_m_sig = tcx.liberate_late_bound_regions(impl_m.def_id, impl_m_sig);
let trait_m_args = infcx.fresh_args_for_item(DUMMY_SP, trait_m.def_id);
let trait_m_sig = tcx.fn_sig(trait_m.def_id).instantiate(tcx, trait_m_args);
let trait_m_sig = tcx.liberate_late_bound_regions(impl_m.def_id, trait_m_sig);
let ocx = ObligationCtxt::new(&infcx);
// Equate the signatures so that we can infer whether a late-bound param was present where
// an early-bound param was expected, since we replace the late-bound lifetimes with
// `ReLateParam`, and early-bound lifetimes with infer vars, so the early-bound args will
// resolve to `ReLateParam` if there is a mismatch.
let Ok(()) = ocx.eq(
&ObligationCause::dummy(),
param_env,
ty::Binder::dummy(trait_m_sig),
ty::Binder::dummy(impl_m_sig),
) else {
return None;
};
let errors = ocx.try_evaluate_obligations();
if !errors.is_empty() {
return None;
}
let mut mismatched = vec![];
let impl_generics = tcx.generics_of(impl_m.def_id);
for (id_arg, arg) in
std::iter::zip(ty::GenericArgs::identity_for_item(tcx, impl_m.def_id), impl_m_args)
{
if let ty::GenericArgKind::Lifetime(r) = arg.kind()
&& let ty::ReVar(vid) = r.kind()
&& let r = infcx
.inner
.borrow_mut()
.unwrap_region_constraints()
.opportunistic_resolve_var(tcx, vid)
&& let ty::ReLateParam(ty::LateParamRegion {
kind: ty::LateParamRegionKind::Named(trait_param_def_id),
..
}) = r.kind()
&& let ty::ReEarlyParam(ebr) = id_arg.expect_region().kind()
{
mismatched.push(LateEarlyMismatch::EarlyInImpl(
impl_generics.region_param(ebr, tcx).def_id,
trait_param_def_id,
id_arg.expect_region(),
));
}
}
let trait_generics = tcx.generics_of(trait_m.def_id);
for (id_arg, arg) in
std::iter::zip(ty::GenericArgs::identity_for_item(tcx, trait_m.def_id), trait_m_args)
{
if let ty::GenericArgKind::Lifetime(r) = arg.kind()
&& let ty::ReVar(vid) = r.kind()
&& let r = infcx
.inner
.borrow_mut()
.unwrap_region_constraints()
.opportunistic_resolve_var(tcx, vid)
&& let ty::ReLateParam(ty::LateParamRegion {
kind: ty::LateParamRegionKind::Named(impl_param_def_id),
..
}) = r.kind()
&& let ty::ReEarlyParam(ebr) = id_arg.expect_region().kind()
{
mismatched.push(LateEarlyMismatch::LateInImpl(
impl_param_def_id,
trait_generics.region_param(ebr, tcx).def_id,
id_arg.expect_region(),
));
}
}
if mismatched.is_empty() {
return None;
}
let spans: Vec<_> = mismatched
.iter()
.map(|param| {
let (LateEarlyMismatch::EarlyInImpl(impl_param_def_id, ..)
| LateEarlyMismatch::LateInImpl(impl_param_def_id, ..)) = param;
tcx.def_span(impl_param_def_id)
})
.collect();
let mut diag = tcx
.dcx()
.struct_span_err(spans, "lifetime parameters do not match the trait definition")
.with_note("lifetime parameters differ in whether they are early- or late-bound")
.with_code(E0195);
for mismatch in mismatched {
match mismatch {
LateEarlyMismatch::EarlyInImpl(
impl_param_def_id,
trait_param_def_id,
early_bound_region,
) => {
let mut multispan = MultiSpan::from_spans(vec![
tcx.def_span(impl_param_def_id),
tcx.def_span(trait_param_def_id),
]);
multispan
.push_span_label(tcx.def_span(tcx.parent(impl_m.def_id)), "in this impl...");
multispan
.push_span_label(tcx.def_span(tcx.parent(trait_m.def_id)), "in this trait...");
multispan.push_span_label(
tcx.def_span(impl_param_def_id),
format!("`{}` is early-bound", tcx.item_name(impl_param_def_id)),
);
multispan.push_span_label(
tcx.def_span(trait_param_def_id),
format!("`{}` is late-bound", tcx.item_name(trait_param_def_id)),
);
if let Some(span) =
find_region_in_predicates(tcx, impl_m.def_id, early_bound_region)
{
multispan.push_span_label(
span,
format!(
"this lifetime bound makes `{}` early-bound",
tcx.item_name(impl_param_def_id)
),
);
}
diag.span_note(
multispan,
format!(
"`{}` differs between the trait and impl",
tcx.item_name(impl_param_def_id)
),
);
}
LateEarlyMismatch::LateInImpl(
impl_param_def_id,
trait_param_def_id,
early_bound_region,
) => {
let mut multispan = MultiSpan::from_spans(vec![
tcx.def_span(impl_param_def_id),
tcx.def_span(trait_param_def_id),
]);
multispan
.push_span_label(tcx.def_span(tcx.parent(impl_m.def_id)), "in this impl...");
multispan
.push_span_label(tcx.def_span(tcx.parent(trait_m.def_id)), "in this trait...");
multispan.push_span_label(
tcx.def_span(impl_param_def_id),
format!("`{}` is late-bound", tcx.item_name(impl_param_def_id)),
);
multispan.push_span_label(
tcx.def_span(trait_param_def_id),
format!("`{}` is early-bound", tcx.item_name(trait_param_def_id)),
);
if let Some(span) =
find_region_in_predicates(tcx, trait_m.def_id, early_bound_region)
{
multispan.push_span_label(
span,
format!(
"this lifetime bound makes `{}` early-bound",
tcx.item_name(trait_param_def_id)
),
);
}
diag.span_note(
multispan,
format!(
"`{}` differs between the trait and impl",
tcx.item_name(impl_param_def_id)
),
);
}
}
}
Some(diag.emit())
}
fn find_region_in_predicates<'tcx>(
tcx: TyCtxt<'tcx>,
def_id: DefId,
early_bound_region: ty::Region<'tcx>,
) -> Option<Span> {
for (pred, span) in tcx.explicit_predicates_of(def_id).instantiate_identity(tcx) {
if pred.visit_with(&mut FindRegion(early_bound_region)).is_break() {
return Some(span);
}
}
struct FindRegion<'tcx>(ty::Region<'tcx>);
impl<'tcx> TypeVisitor<TyCtxt<'tcx>> for FindRegion<'tcx> {
type Result = ControlFlow<()>;
fn visit_region(&mut self, r: ty::Region<'tcx>) -> Self::Result {
if r == self.0 { ControlFlow::Break(()) } else { ControlFlow::Continue(()) }
}
}
None
}
#[instrument(level = "debug", skip(infcx))]
fn extract_spans_for_error_reporting<'tcx>(
infcx: &infer::InferCtxt<'tcx>,
terr: TypeError<'_>,
cause: &ObligationCause<'tcx>,
impl_m: ty::AssocItem,
trait_m: ty::AssocItem,
) -> (Span, Option<Span>) {
let tcx = infcx.tcx;
let mut impl_args = {
let (sig, _) = tcx.hir_expect_impl_item(impl_m.def_id.expect_local()).expect_fn();
sig.decl.inputs.iter().map(|t| t.span).chain(iter::once(sig.decl.output.span()))
};
let trait_args = trait_m.def_id.as_local().map(|def_id| {
let (sig, _) = tcx.hir_expect_trait_item(def_id).expect_fn();
sig.decl.inputs.iter().map(|t| t.span).chain(iter::once(sig.decl.output.span()))
});
match terr {
TypeError::ArgumentMutability(i) | TypeError::ArgumentSorts(ExpectedFound { .. }, i) => {
(impl_args.nth(i).unwrap(), trait_args.and_then(|mut args| args.nth(i)))
}
_ => (cause.span, tcx.hir_span_if_local(trait_m.def_id)),
}
}
fn compare_self_type<'tcx>(
tcx: TyCtxt<'tcx>,
impl_m: ty::AssocItem,
trait_m: ty::AssocItem,
impl_trait_ref: ty::TraitRef<'tcx>,
delay: bool,
) -> Result<(), ErrorGuaranteed> {
// Try to give more informative error messages about self typing
// mismatches. Note that any mismatch will also be detected
// below, where we construct a canonical function type that
// includes the self parameter as a normal parameter. It's just
// that the error messages you get out of this code are a bit more
// inscrutable, particularly for cases where one method has no
// self.
let self_string = |method: ty::AssocItem| {
let untransformed_self_ty = match method.container {
ty::AssocContainer::InherentImpl | ty::AssocContainer::TraitImpl(_) => {
impl_trait_ref.self_ty()
}
ty::AssocContainer::Trait => tcx.types.self_param,
};
let self_arg_ty = tcx.fn_sig(method.def_id).instantiate_identity().input(0);
let (infcx, param_env) = tcx
.infer_ctxt()
.build_with_typing_env(ty::TypingEnv::non_body_analysis(tcx, method.def_id));
let self_arg_ty = tcx.liberate_late_bound_regions(method.def_id, self_arg_ty);
let can_eq_self = |ty| infcx.can_eq(param_env, untransformed_self_ty, ty);
get_self_string(self_arg_ty, can_eq_self)
};
match (trait_m.is_method(), impl_m.is_method()) {
(false, false) | (true, true) => {}
(false, true) => {
let self_descr = self_string(impl_m);
let impl_m_span = tcx.def_span(impl_m.def_id);
let mut err = struct_span_code_err!(
tcx.dcx(),
impl_m_span,
E0185,
"method `{}` has a `{}` declaration in the impl, but not in the trait",
trait_m.name(),
self_descr
);
err.span_label(impl_m_span, format!("`{self_descr}` used in impl"));
if let Some(span) = tcx.hir_span_if_local(trait_m.def_id) {
err.span_label(span, format!("trait method declared without `{self_descr}`"));
} else {
err.note_trait_signature(trait_m.name(), trait_m.signature(tcx));
}
return Err(err.emit_unless_delay(delay));
}
(true, false) => {
let self_descr = self_string(trait_m);
let impl_m_span = tcx.def_span(impl_m.def_id);
let mut err = struct_span_code_err!(
tcx.dcx(),
impl_m_span,
E0186,
"method `{}` has a `{}` declaration in the trait, but not in the impl",
trait_m.name(),
self_descr
);
err.span_label(impl_m_span, format!("expected `{self_descr}` in impl"));
if let Some(span) = tcx.hir_span_if_local(trait_m.def_id) {
err.span_label(span, format!("`{self_descr}` used in trait"));
} else {
err.note_trait_signature(trait_m.name(), trait_m.signature(tcx));
}
return Err(err.emit_unless_delay(delay));
}
}
Ok(())
}
/// Checks that the number of generics on a given assoc item in a trait impl is the same
/// as the number of generics on the respective assoc item in the trait definition.
///
/// For example this code emits the errors in the following code:
/// ```rust,compile_fail
/// trait Trait {
/// fn foo();
/// type Assoc<T>;
/// }
///
/// impl Trait for () {
/// fn foo<T>() {}
/// //~^ error
/// type Assoc = u32;
/// //~^ error
/// }
/// ```
///
/// Notably this does not error on `foo<T>` implemented as `foo<const N: u8>` or
/// `foo<const N: u8>` implemented as `foo<const N: u32>`. This is handled in
/// [`compare_generic_param_kinds`]. This function also does not handle lifetime parameters
fn compare_number_of_generics<'tcx>(
tcx: TyCtxt<'tcx>,
impl_: ty::AssocItem,
trait_: ty::AssocItem,
delay: bool,
) -> Result<(), ErrorGuaranteed> {
let trait_own_counts = tcx.generics_of(trait_.def_id).own_counts();
let impl_own_counts = tcx.generics_of(impl_.def_id).own_counts();
// This avoids us erroring on `foo<T>` implemented as `foo<const N: u8>` as this is implemented
// in `compare_generic_param_kinds` which will give a nicer error message than something like:
// "expected 1 type parameter, found 0 type parameters"
if (trait_own_counts.types + trait_own_counts.consts)
== (impl_own_counts.types + impl_own_counts.consts)
{
return Ok(());
}
// We never need to emit a separate error for RPITITs, since if an RPITIT
// has mismatched type or const generic arguments, then the method that it's
// inheriting the generics from will also have mismatched arguments, and
// we'll report an error for that instead. Delay a bug for safety, though.
if trait_.is_impl_trait_in_trait() {
// FIXME: no tests trigger this. If you find example code that does
// trigger this, please add it to the test suite.
tcx.dcx()
.bug("errors comparing numbers of generics of trait/impl functions were not emitted");
}
let matchings = [
("type", trait_own_counts.types, impl_own_counts.types),
("const", trait_own_counts.consts, impl_own_counts.consts),
];
let item_kind = impl_.descr();
let mut err_occurred = None;
for (kind, trait_count, impl_count) in matchings {
if impl_count != trait_count {
let arg_spans = |item: &ty::AssocItem, generics: &hir::Generics<'_>| {
let mut spans = generics
.params
.iter()
.filter(|p| match p.kind {
hir::GenericParamKind::Lifetime {
kind: hir::LifetimeParamKind::Elided(_),
} => {
// A fn can have an arbitrary number of extra elided lifetimes for the
// same signature.
!item.is_fn()
}
_ => true,
})
.map(|p| p.span)
.collect::<Vec<Span>>();
if spans.is_empty() {
spans = vec![generics.span]
}
spans
};
let (trait_spans, impl_trait_spans) = if let Some(def_id) = trait_.def_id.as_local() {
let trait_item = tcx.hir_expect_trait_item(def_id);
let arg_spans: Vec<Span> = arg_spans(&trait_, trait_item.generics);
let impl_trait_spans: Vec<Span> = trait_item
.generics
.params
.iter()
.filter_map(|p| match p.kind {
GenericParamKind::Type { synthetic: true, .. } => Some(p.span),
_ => None,
})
.collect();
(Some(arg_spans), impl_trait_spans)
} else {
let trait_span = tcx.hir_span_if_local(trait_.def_id);
(trait_span.map(|s| vec![s]), vec![])
};
let impl_item = tcx.hir_expect_impl_item(impl_.def_id.expect_local());
let impl_item_impl_trait_spans: Vec<Span> = impl_item
.generics
.params
.iter()
.filter_map(|p| match p.kind {
GenericParamKind::Type { synthetic: true, .. } => Some(p.span),
_ => None,
})
.collect();
let spans = arg_spans(&impl_, impl_item.generics);
let span = spans.first().copied();
let mut err = tcx.dcx().struct_span_err(
spans,
format!(
"{} `{}` has {} {kind} parameter{} but its trait \
declaration has {} {kind} parameter{}",
item_kind,
trait_.name(),
impl_count,
pluralize!(impl_count),
trait_count,
pluralize!(trait_count),
kind = kind,
),
);
err.code(E0049);
let msg =
format!("expected {trait_count} {kind} parameter{}", pluralize!(trait_count),);
if let Some(spans) = trait_spans {
let mut spans = spans.iter();
if let Some(span) = spans.next() {
err.span_label(*span, msg);
}
for span in spans {
err.span_label(*span, "");
}
} else {
err.span_label(tcx.def_span(trait_.def_id), msg);
}
if let Some(span) = span {
err.span_label(
span,
format!("found {} {} parameter{}", impl_count, kind, pluralize!(impl_count),),
);
}
for span in impl_trait_spans.iter().chain(impl_item_impl_trait_spans.iter()) {
err.span_label(*span, "`impl Trait` introduces an implicit type parameter");
}
let reported = err.emit_unless_delay(delay);
err_occurred = Some(reported);
}
}
if let Some(reported) = err_occurred { Err(reported) } else { Ok(()) }
}
fn compare_number_of_method_arguments<'tcx>(
tcx: TyCtxt<'tcx>,
impl_m: ty::AssocItem,
trait_m: ty::AssocItem,
delay: bool,
) -> Result<(), ErrorGuaranteed> {
let impl_m_fty = tcx.fn_sig(impl_m.def_id);
let trait_m_fty = tcx.fn_sig(trait_m.def_id);
let trait_number_args = trait_m_fty.skip_binder().inputs().skip_binder().len();
let impl_number_args = impl_m_fty.skip_binder().inputs().skip_binder().len();
if trait_number_args != impl_number_args {
let trait_span = trait_m
.def_id
.as_local()
.and_then(|def_id| {
let (trait_m_sig, _) = &tcx.hir_expect_trait_item(def_id).expect_fn();
let pos = trait_number_args.saturating_sub(1);
trait_m_sig.decl.inputs.get(pos).map(|arg| {
if pos == 0 {
arg.span
} else {
arg.span.with_lo(trait_m_sig.decl.inputs[0].span.lo())
}
})
})
.or_else(|| tcx.hir_span_if_local(trait_m.def_id));
let (impl_m_sig, _) = &tcx.hir_expect_impl_item(impl_m.def_id.expect_local()).expect_fn();
let pos = impl_number_args.saturating_sub(1);
let impl_span = impl_m_sig
.decl
.inputs
.get(pos)
.map(|arg| {
if pos == 0 {
arg.span
} else {
arg.span.with_lo(impl_m_sig.decl.inputs[0].span.lo())
}
})
.unwrap_or_else(|| tcx.def_span(impl_m.def_id));
let mut err = struct_span_code_err!(
tcx.dcx(),
impl_span,
E0050,
"method `{}` has {} but the declaration in trait `{}` has {}",
trait_m.name(),
potentially_plural_count(impl_number_args, "parameter"),
tcx.def_path_str(trait_m.def_id),
trait_number_args
);
if let Some(trait_span) = trait_span {
err.span_label(
trait_span,
format!(
"trait requires {}",
potentially_plural_count(trait_number_args, "parameter")
),
);
} else {
err.note_trait_signature(trait_m.name(), trait_m.signature(tcx));
}
err.span_label(
impl_span,
format!(
"expected {}, found {}",
potentially_plural_count(trait_number_args, "parameter"),
impl_number_args
),
);
return Err(err.emit_unless_delay(delay));
}
Ok(())
}
fn compare_synthetic_generics<'tcx>(
tcx: TyCtxt<'tcx>,
impl_m: ty::AssocItem,
trait_m: ty::AssocItem,
delay: bool,
) -> Result<(), ErrorGuaranteed> {
// FIXME(chrisvittal) Clean up this function, list of FIXME items:
// 1. Better messages for the span labels
// 2. Explanation as to what is going on
// If we get here, we already have the same number of generics, so the zip will
// be okay.
let mut error_found = None;
let impl_m_generics = tcx.generics_of(impl_m.def_id);
let trait_m_generics = tcx.generics_of(trait_m.def_id);
let impl_m_type_params =
impl_m_generics.own_params.iter().filter_map(|param| match param.kind {
GenericParamDefKind::Type { synthetic, .. } => Some((param.def_id, synthetic)),
GenericParamDefKind::Lifetime | GenericParamDefKind::Const { .. } => None,
});
let trait_m_type_params =
trait_m_generics.own_params.iter().filter_map(|param| match param.kind {
GenericParamDefKind::Type { synthetic, .. } => Some((param.def_id, synthetic)),
GenericParamDefKind::Lifetime | GenericParamDefKind::Const { .. } => None,
});
for ((impl_def_id, impl_synthetic), (trait_def_id, trait_synthetic)) in
iter::zip(impl_m_type_params, trait_m_type_params)
{
if impl_synthetic != trait_synthetic {
let impl_def_id = impl_def_id.expect_local();
let impl_span = tcx.def_span(impl_def_id);
let trait_span = tcx.def_span(trait_def_id);
let mut err = struct_span_code_err!(
tcx.dcx(),
impl_span,
E0643,
"method `{}` has incompatible signature for trait",
trait_m.name()
);
err.span_label(trait_span, "declaration in trait here");
if impl_synthetic {
// The case where the impl method uses `impl Trait` but the trait method uses
// explicit generics
err.span_label(impl_span, "expected generic parameter, found `impl Trait`");
try {
// try taking the name from the trait impl
// FIXME: this is obviously suboptimal since the name can already be used
// as another generic argument
let new_name = tcx.opt_item_name(trait_def_id)?;
let trait_m = trait_m.def_id.as_local()?;
let trait_m = tcx.hir_expect_trait_item(trait_m);
let impl_m = impl_m.def_id.as_local()?;
let impl_m = tcx.hir_expect_impl_item(impl_m);
// in case there are no generics, take the spot between the function name
// and the opening paren of the argument list
let new_generics_span = tcx.def_ident_span(impl_def_id)?.shrink_to_hi();
// in case there are generics, just replace them
let generics_span = impl_m.generics.span.substitute_dummy(new_generics_span);
// replace with the generics from the trait
let new_generics =
tcx.sess.source_map().span_to_snippet(trait_m.generics.span).ok()?;
err.multipart_suggestion(
"try changing the `impl Trait` argument to a generic parameter",
vec![
// replace `impl Trait` with `T`
(impl_span, new_name.to_string()),
// replace impl method generics with trait method generics
// This isn't quite right, as users might have changed the names
// of the generics, but it works for the common case
(generics_span, new_generics),
],
Applicability::MaybeIncorrect,
);
};
} else {
// The case where the trait method uses `impl Trait`, but the impl method uses
// explicit generics.
err.span_label(impl_span, "expected `impl Trait`, found generic parameter");
try {
let impl_m = impl_m.def_id.as_local()?;
let impl_m = tcx.hir_expect_impl_item(impl_m);
let (sig, _) = impl_m.expect_fn();
let input_tys = sig.decl.inputs;
struct Visitor(hir::def_id::LocalDefId);
impl<'v> intravisit::Visitor<'v> for Visitor {
type Result = ControlFlow<Span>;
fn visit_ty(&mut self, ty: &'v hir::Ty<'v, AmbigArg>) -> Self::Result {
if let hir::TyKind::Path(hir::QPath::Resolved(None, path)) = ty.kind
&& let Res::Def(DefKind::TyParam, def_id) = path.res
&& def_id == self.0.to_def_id()
{
ControlFlow::Break(ty.span)
} else {
intravisit::walk_ty(self, ty)
}
}
}
let span = input_tys
.iter()
.find_map(|ty| Visitor(impl_def_id).visit_ty_unambig(ty).break_value())?;
let bounds = impl_m.generics.bounds_for_param(impl_def_id).next()?.bounds;
let bounds = bounds.first()?.span().to(bounds.last()?.span());
let bounds = tcx.sess.source_map().span_to_snippet(bounds).ok()?;
err.multipart_suggestion(
"try removing the generic parameter and using `impl Trait` instead",
vec![
// delete generic parameters
(impl_m.generics.span, String::new()),
// replace param usage with `impl Trait`
(span, format!("impl {bounds}")),
],
Applicability::MaybeIncorrect,
);
};
}
error_found = Some(err.emit_unless_delay(delay));
}
}
if let Some(reported) = error_found { Err(reported) } else { Ok(()) }
}
/// Checks that all parameters in the generics of a given assoc item in a trait impl have
/// the same kind as the respective generic parameter in the trait def.
///
/// For example all 4 errors in the following code are emitted here:
/// ```rust,ignore (pseudo-Rust)
/// trait Foo {
/// fn foo<const N: u8>();
/// type Bar<const N: u8>;
/// fn baz<const N: u32>();
/// type Blah<T>;
/// }
///
/// impl Foo for () {
/// fn foo<const N: u64>() {}
/// //~^ error
/// type Bar<const N: u64> = ();
/// //~^ error
/// fn baz<T>() {}
/// //~^ error
/// type Blah<const N: i64> = u32;
/// //~^ error
/// }
/// ```
///
/// This function does not handle lifetime parameters
fn compare_generic_param_kinds<'tcx>(
tcx: TyCtxt<'tcx>,
impl_item: ty::AssocItem,
trait_item: ty::AssocItem,
delay: bool,
) -> Result<(), ErrorGuaranteed> {
assert_eq!(impl_item.tag(), trait_item.tag());
let ty_const_params_of = |def_id| {
tcx.generics_of(def_id).own_params.iter().filter(|param| {
matches!(
param.kind,
GenericParamDefKind::Const { .. } | GenericParamDefKind::Type { .. }
)
})
};
for (param_impl, param_trait) in
iter::zip(ty_const_params_of(impl_item.def_id), ty_const_params_of(trait_item.def_id))
{
use GenericParamDefKind::*;
if match (¶m_impl.kind, ¶m_trait.kind) {
(Const { .. }, Const { .. })
if tcx.type_of(param_impl.def_id) != tcx.type_of(param_trait.def_id) =>
{
true
}
(Const { .. }, Type { .. }) | (Type { .. }, Const { .. }) => true,
// this is exhaustive so that anyone adding new generic param kinds knows
// to make sure this error is reported for them.
(Const { .. }, Const { .. }) | (Type { .. }, Type { .. }) => false,
(Lifetime { .. }, _) | (_, Lifetime { .. }) => {
bug!("lifetime params are expected to be filtered by `ty_const_params_of`")
}
} {
let param_impl_span = tcx.def_span(param_impl.def_id);
let param_trait_span = tcx.def_span(param_trait.def_id);
let mut err = struct_span_code_err!(
tcx.dcx(),
param_impl_span,
E0053,
"{} `{}` has an incompatible generic parameter for trait `{}`",
impl_item.descr(),
trait_item.name(),
&tcx.def_path_str(tcx.parent(trait_item.def_id))
);
let make_param_message = |prefix: &str, param: &ty::GenericParamDef| match param.kind {
Const { .. } => {
format!(
"{} const parameter of type `{}`",
prefix,
tcx.type_of(param.def_id).instantiate_identity()
)
}
Type { .. } => format!("{prefix} type parameter"),
Lifetime { .. } => span_bug!(
tcx.def_span(param.def_id),
"lifetime params are expected to be filtered by `ty_const_params_of`"
),
};
let trait_header_span = tcx.def_ident_span(tcx.parent(trait_item.def_id)).unwrap();
err.span_label(trait_header_span, "");
err.span_label(param_trait_span, make_param_message("expected", param_trait));
let impl_header_span = tcx.def_span(tcx.parent(impl_item.def_id));
err.span_label(impl_header_span, "");
err.span_label(param_impl_span, make_param_message("found", param_impl));
let reported = err.emit_unless_delay(delay);
return Err(reported);
}
}
Ok(())
}
fn compare_impl_const<'tcx>(
tcx: TyCtxt<'tcx>,
impl_const_item: ty::AssocItem,
trait_const_item: ty::AssocItem,
impl_trait_ref: ty::TraitRef<'tcx>,
) -> Result<(), ErrorGuaranteed> {
compare_type_const(tcx, impl_const_item, trait_const_item)?;
compare_number_of_generics(tcx, impl_const_item, trait_const_item, false)?;
compare_generic_param_kinds(tcx, impl_const_item, trait_const_item, false)?;
check_region_bounds_on_impl_item(tcx, impl_const_item, trait_const_item, false)?;
compare_const_predicate_entailment(tcx, impl_const_item, trait_const_item, impl_trait_ref)
}
fn compare_type_const<'tcx>(
tcx: TyCtxt<'tcx>,
impl_const_item: ty::AssocItem,
trait_const_item: ty::AssocItem,
) -> Result<(), ErrorGuaranteed> {
let impl_is_type_const = tcx.is_type_const(impl_const_item.def_id);
let trait_type_const_span = tcx.type_const_span(trait_const_item.def_id);
if let Some(trait_type_const_span) = trait_type_const_span
&& !impl_is_type_const
{
return Err(tcx
.dcx()
.struct_span_err(
tcx.def_span(impl_const_item.def_id),
"implementation of a `type const` must also be marked as `type const`",
)
.with_span_note(
MultiSpan::from_spans(vec![
tcx.def_span(trait_const_item.def_id),
trait_type_const_span,
]),
"trait declaration of const is marked as `type const`",
)
.emit());
}
Ok(())
}
/// The equivalent of [compare_method_predicate_entailment], but for associated constants
/// instead of associated functions.
// FIXME(generic_const_items): If possible extract the common parts of `compare_{type,const}_predicate_entailment`.
#[instrument(level = "debug", skip(tcx))]
fn compare_const_predicate_entailment<'tcx>(
tcx: TyCtxt<'tcx>,
impl_ct: ty::AssocItem,
trait_ct: ty::AssocItem,
impl_trait_ref: ty::TraitRef<'tcx>,
) -> Result<(), ErrorGuaranteed> {
let impl_ct_def_id = impl_ct.def_id.expect_local();
let impl_ct_span = tcx.def_span(impl_ct_def_id);
// The below is for the most part highly similar to the procedure
// for methods above. It is simpler in many respects, especially
// because we shouldn't really have to deal with lifetimes or
// predicates. In fact some of this should probably be put into
// shared functions because of DRY violations...
let trait_to_impl_args = GenericArgs::identity_for_item(tcx, impl_ct.def_id).rebase_onto(
tcx,
impl_ct.container_id(tcx),
impl_trait_ref.args,
);
// Create a parameter environment that represents the implementation's
// associated const.
let impl_ty = tcx.type_of(impl_ct_def_id).instantiate_identity();
let trait_ty = tcx.type_of(trait_ct.def_id).instantiate(tcx, trait_to_impl_args);
let code = ObligationCauseCode::CompareImplItem {
impl_item_def_id: impl_ct_def_id,
trait_item_def_id: trait_ct.def_id,
kind: impl_ct.kind,
};
let mut cause = ObligationCause::new(impl_ct_span, impl_ct_def_id, code.clone());
let impl_ct_predicates = tcx.predicates_of(impl_ct.def_id);
let trait_ct_predicates = tcx.predicates_of(trait_ct.def_id);
// The predicates declared by the impl definition, the trait and the
// associated const in the trait are assumed.
let impl_predicates = tcx.predicates_of(impl_ct_predicates.parent.unwrap());
let mut hybrid_preds = impl_predicates.instantiate_identity(tcx).predicates;
hybrid_preds.extend(
trait_ct_predicates
.instantiate_own(tcx, trait_to_impl_args)
.map(|(predicate, _)| predicate),
);
let param_env = ty::ParamEnv::new(tcx.mk_clauses(&hybrid_preds));
let param_env = traits::normalize_param_env_or_error(
tcx,
param_env,
ObligationCause::misc(impl_ct_span, impl_ct_def_id),
);
let infcx = tcx.infer_ctxt().build(TypingMode::non_body_analysis());
let ocx = ObligationCtxt::new_with_diagnostics(&infcx);
let impl_ct_own_bounds = impl_ct_predicates.instantiate_own_identity();
for (predicate, span) in impl_ct_own_bounds {
let cause = ObligationCause::misc(span, impl_ct_def_id);
let predicate = ocx.normalize(&cause, param_env, predicate);
let cause = ObligationCause::new(span, impl_ct_def_id, code.clone());
ocx.register_obligation(traits::Obligation::new(tcx, cause, param_env, predicate));
}
// There is no "body" here, so just pass dummy id.
let impl_ty = ocx.normalize(&cause, param_env, impl_ty);
debug!(?impl_ty);
let trait_ty = ocx.normalize(&cause, param_env, trait_ty);
debug!(?trait_ty);
let err = ocx.sup(&cause, param_env, trait_ty, impl_ty);
if let Err(terr) = err {
debug!(?impl_ty, ?trait_ty);
// Locate the Span containing just the type of the offending impl
let (ty, _) = tcx.hir_expect_impl_item(impl_ct_def_id).expect_const();
cause.span = ty.span;
let mut diag = struct_span_code_err!(
tcx.dcx(),
cause.span,
E0326,
"implemented const `{}` has an incompatible type for trait",
trait_ct.name()
);
let trait_c_span = trait_ct.def_id.as_local().map(|trait_ct_def_id| {
// Add a label to the Span containing just the type of the const
let (ty, _) = tcx.hir_expect_trait_item(trait_ct_def_id).expect_const();
ty.span
});
infcx.err_ctxt().note_type_err(
&mut diag,
&cause,
trait_c_span.map(|span| (span, Cow::from("type in trait"), false)),
Some(param_env.and(infer::ValuePairs::Terms(ExpectedFound {
expected: trait_ty.into(),
found: impl_ty.into(),
}))),
terr,
false,
None,
);
return Err(diag.emit());
};
// Check that all obligations are satisfied by the implementation's
// version.
let errors = ocx.evaluate_obligations_error_on_ambiguity();
if !errors.is_empty() {
return Err(infcx.err_ctxt().report_fulfillment_errors(errors));
}
ocx.resolve_regions_and_report_errors(impl_ct_def_id, param_env, [])
}
#[instrument(level = "debug", skip(tcx))]
fn compare_impl_ty<'tcx>(
tcx: TyCtxt<'tcx>,
impl_ty: ty::AssocItem,
trait_ty: ty::AssocItem,
impl_trait_ref: ty::TraitRef<'tcx>,
) -> Result<(), ErrorGuaranteed> {
compare_number_of_generics(tcx, impl_ty, trait_ty, false)?;
compare_generic_param_kinds(tcx, impl_ty, trait_ty, false)?;
check_region_bounds_on_impl_item(tcx, impl_ty, trait_ty, false)?;
compare_type_predicate_entailment(tcx, impl_ty, trait_ty, impl_trait_ref)?;
check_type_bounds(tcx, trait_ty, impl_ty, impl_trait_ref)
}
/// The equivalent of [compare_method_predicate_entailment], but for associated types
/// instead of associated functions.
#[instrument(level = "debug", skip(tcx))]
fn compare_type_predicate_entailment<'tcx>(
tcx: TyCtxt<'tcx>,
impl_ty: ty::AssocItem,
trait_ty: ty::AssocItem,
impl_trait_ref: ty::TraitRef<'tcx>,
) -> Result<(), ErrorGuaranteed> {
let impl_def_id = impl_ty.container_id(tcx);
let trait_to_impl_args = GenericArgs::identity_for_item(tcx, impl_ty.def_id).rebase_onto(
tcx,
impl_def_id,
impl_trait_ref.args,
);
let impl_ty_predicates = tcx.predicates_of(impl_ty.def_id);
let trait_ty_predicates = tcx.predicates_of(trait_ty.def_id);
let impl_ty_own_bounds = impl_ty_predicates.instantiate_own_identity();
// If there are no bounds, then there are no const conditions, so no need to check that here.
if impl_ty_own_bounds.len() == 0 {
// Nothing to check.
return Ok(());
}
// This `DefId` should be used for the `body_id` field on each
// `ObligationCause` (and the `FnCtxt`). This is what
// `regionck_item` expects.
let impl_ty_def_id = impl_ty.def_id.expect_local();
debug!(?trait_to_impl_args);
// The predicates declared by the impl definition, the trait and the
// associated type in the trait are assumed.
let impl_predicates = tcx.predicates_of(impl_ty_predicates.parent.unwrap());
let mut hybrid_preds = impl_predicates.instantiate_identity(tcx).predicates;
hybrid_preds.extend(
trait_ty_predicates
.instantiate_own(tcx, trait_to_impl_args)
.map(|(predicate, _)| predicate),
);
debug!(?hybrid_preds);
let impl_ty_span = tcx.def_span(impl_ty_def_id);
let normalize_cause = ObligationCause::misc(impl_ty_span, impl_ty_def_id);
let is_conditionally_const = tcx.is_conditionally_const(impl_ty.def_id);
if is_conditionally_const {
// Augment the hybrid param-env with the const conditions
// of the impl header and the trait assoc type.
hybrid_preds.extend(
tcx.const_conditions(impl_ty_predicates.parent.unwrap())
.instantiate_identity(tcx)
.into_iter()
.chain(
tcx.const_conditions(trait_ty.def_id).instantiate_own(tcx, trait_to_impl_args),
)
.map(|(trait_ref, _)| {
trait_ref.to_host_effect_clause(tcx, ty::BoundConstness::Maybe)
}),
);
}
let param_env = ty::ParamEnv::new(tcx.mk_clauses(&hybrid_preds));
let param_env = traits::normalize_param_env_or_error(tcx, param_env, normalize_cause);
debug!(caller_bounds=?param_env.caller_bounds());
let infcx = tcx.infer_ctxt().build(TypingMode::non_body_analysis());
let ocx = ObligationCtxt::new_with_diagnostics(&infcx);
for (predicate, span) in impl_ty_own_bounds {
let cause = ObligationCause::misc(span, impl_ty_def_id);
let predicate = ocx.normalize(&cause, param_env, predicate);
let cause = ObligationCause::new(
span,
impl_ty_def_id,
ObligationCauseCode::CompareImplItem {
impl_item_def_id: impl_ty.def_id.expect_local(),
trait_item_def_id: trait_ty.def_id,
kind: impl_ty.kind,
},
);
ocx.register_obligation(traits::Obligation::new(tcx, cause, param_env, predicate));
}
if is_conditionally_const {
// Validate the const conditions of the impl associated type.
let impl_ty_own_const_conditions =
tcx.const_conditions(impl_ty.def_id).instantiate_own_identity();
for (const_condition, span) in impl_ty_own_const_conditions {
let normalize_cause = traits::ObligationCause::misc(span, impl_ty_def_id);
let const_condition = ocx.normalize(&normalize_cause, param_env, const_condition);
let cause = ObligationCause::new(
span,
impl_ty_def_id,
ObligationCauseCode::CompareImplItem {
impl_item_def_id: impl_ty_def_id,
trait_item_def_id: trait_ty.def_id,
kind: impl_ty.kind,
},
);
ocx.register_obligation(traits::Obligation::new(
tcx,
cause,
param_env,
const_condition.to_host_effect_clause(tcx, ty::BoundConstness::Maybe),
));
}
}
// Check that all obligations are satisfied by the implementation's
// version.
let errors = ocx.evaluate_obligations_error_on_ambiguity();
if !errors.is_empty() {
let reported = infcx.err_ctxt().report_fulfillment_errors(errors);
return Err(reported);
}
// Finally, resolve all regions. This catches wily misuses of
// lifetime parameters.
ocx.resolve_regions_and_report_errors(impl_ty_def_id, param_env, [])
}
/// Validate that `ProjectionCandidate`s created for this associated type will
/// be valid.
///
/// Usually given
///
/// trait X { type Y: Copy } impl X for T { type Y = S; }
///
/// We are able to normalize `<T as X>::Y` to `S`, and so when we check the
/// impl is well-formed we have to prove `S: Copy`.
///
/// For default associated types the normalization is not possible (the value
/// from the impl could be overridden). We also can't normalize generic
/// associated types (yet) because they contain bound parameters.
#[instrument(level = "debug", skip(tcx))]
pub(super) fn check_type_bounds<'tcx>(
tcx: TyCtxt<'tcx>,
trait_ty: ty::AssocItem,
impl_ty: ty::AssocItem,
impl_trait_ref: ty::TraitRef<'tcx>,
) -> Result<(), ErrorGuaranteed> {
// Avoid bogus "type annotations needed `Foo: Bar`" errors on `impl Bar for Foo` in case
// other `Foo` impls are incoherent.
tcx.ensure_ok().coherent_trait(impl_trait_ref.def_id)?;
let param_env = tcx.param_env(impl_ty.def_id);
debug!(?param_env);
let container_id = impl_ty.container_id(tcx);
let impl_ty_def_id = impl_ty.def_id.expect_local();
let impl_ty_args = GenericArgs::identity_for_item(tcx, impl_ty.def_id);
let rebased_args = impl_ty_args.rebase_onto(tcx, container_id, impl_trait_ref.args);
let infcx = tcx.infer_ctxt().build(TypingMode::non_body_analysis());
let ocx = ObligationCtxt::new_with_diagnostics(&infcx);
// A synthetic impl Trait for RPITIT desugaring or assoc type for effects desugaring has no HIR,
// which we currently use to get the span for an impl's associated type. Instead, for these,
// use the def_span for the synthesized associated type.
let impl_ty_span = if impl_ty.is_impl_trait_in_trait() {
tcx.def_span(impl_ty_def_id)
} else {
match tcx.hir_node_by_def_id(impl_ty_def_id) {
hir::Node::TraitItem(hir::TraitItem {
kind: hir::TraitItemKind::Type(_, Some(ty)),
..
}) => ty.span,
hir::Node::ImplItem(hir::ImplItem { kind: hir::ImplItemKind::Type(ty), .. }) => ty.span,
item => span_bug!(
tcx.def_span(impl_ty_def_id),
"cannot call `check_type_bounds` on item: {item:?}",
),
}
};
let assumed_wf_types = ocx.assumed_wf_types_and_report_errors(param_env, impl_ty_def_id)?;
let normalize_cause = ObligationCause::new(
impl_ty_span,
impl_ty_def_id,
ObligationCauseCode::CheckAssociatedTypeBounds {
impl_item_def_id: impl_ty.def_id.expect_local(),
trait_item_def_id: trait_ty.def_id,
},
);
let mk_cause = |span: Span| {
let code = ObligationCauseCode::WhereClause(trait_ty.def_id, span);
ObligationCause::new(impl_ty_span, impl_ty_def_id, code)
};
let mut obligations: Vec<_> = util::elaborate(
tcx,
tcx.explicit_item_bounds(trait_ty.def_id).iter_instantiated_copied(tcx, rebased_args).map(
|(concrete_ty_bound, span)| {
debug!(?concrete_ty_bound);
traits::Obligation::new(tcx, mk_cause(span), param_env, concrete_ty_bound)
},
),
)
.collect();
// Only in a const implementation do we need to check that the `[const]` item bounds hold.
if tcx.is_conditionally_const(impl_ty_def_id) {
obligations.extend(util::elaborate(
tcx,
tcx.explicit_implied_const_bounds(trait_ty.def_id)
.iter_instantiated_copied(tcx, rebased_args)
.map(|(c, span)| {
traits::Obligation::new(
tcx,
mk_cause(span),
param_env,
c.to_host_effect_clause(tcx, ty::BoundConstness::Maybe),
)
}),
));
}
debug!(item_bounds=?obligations);
// Normalize predicates with the assumption that the GAT may always normalize
// to its definition type. This should be the param-env we use to *prove* the
// predicate too, but we don't do that because of performance issues.
// See <https://github.com/rust-lang/rust/pull/117542#issue-1976337685>.
let normalize_param_env = param_env_with_gat_bounds(tcx, impl_ty, impl_trait_ref);
for obligation in &mut obligations {
match ocx.deeply_normalize(&normalize_cause, normalize_param_env, obligation.predicate) {
Ok(pred) => obligation.predicate = pred,
Err(e) => {
return Err(infcx.err_ctxt().report_fulfillment_errors(e));
}
}
}
// Check that all obligations are satisfied by the implementation's
// version.
ocx.register_obligations(obligations);
let errors = ocx.evaluate_obligations_error_on_ambiguity();
if !errors.is_empty() {
let reported = infcx.err_ctxt().report_fulfillment_errors(errors);
return Err(reported);
}
// Finally, resolve all regions. This catches wily misuses of
// lifetime parameters.
ocx.resolve_regions_and_report_errors(impl_ty_def_id, param_env, assumed_wf_types)
}
/// Install projection predicates that allow GATs to project to their own
/// definition types. This is not allowed in general in cases of default
/// associated types in trait definitions, or when specialization is involved,
/// but is needed when checking these definition types actually satisfy the
/// trait bounds of the GAT.
///
/// # How it works
///
/// ```ignore (example)
/// impl<A, B> Foo<u32> for (A, B) {
/// type Bar<C> = Wrapper<A, B, C>
/// }
/// ```
///
/// - `impl_trait_ref` would be `<(A, B) as Foo<u32>>`
/// - `normalize_impl_ty_args` would be `[A, B, ^0.0]` (`^0.0` here is the bound var with db 0 and index 0)
/// - `normalize_impl_ty` would be `Wrapper<A, B, ^0.0>`
/// - `rebased_args` would be `[(A, B), u32, ^0.0]`, combining the args from
/// the *trait* with the generic associated type parameters (as bound vars).
///
/// A note regarding the use of bound vars here:
/// Imagine as an example
/// ```
/// trait Family {
/// type Member<C: Eq>;
/// }
///
/// impl Family for VecFamily {
/// type Member<C: Eq> = i32;
/// }
/// ```
/// Here, we would generate
/// ```ignore (pseudo-rust)
/// forall<C> { Normalize(<VecFamily as Family>::Member<C> => i32) }
/// ```
///
/// when we really would like to generate
/// ```ignore (pseudo-rust)
/// forall<C> { Normalize(<VecFamily as Family>::Member<C> => i32) :- Implemented(C: Eq) }
/// ```
///
/// But, this is probably fine, because although the first clause can be used with types `C` that
/// do not implement `Eq`, for it to cause some kind of problem, there would have to be a
/// `VecFamily::Member<X>` for some type `X` where `!(X: Eq)`, that appears in the value of type
/// `Member<C: Eq> = ....` That type would fail a well-formedness check that we ought to be doing
/// elsewhere, which would check that any `<T as Family>::Member<X>` meets the bounds declared in
/// the trait (notably, that `X: Eq` and `T: Family`).
fn param_env_with_gat_bounds<'tcx>(
tcx: TyCtxt<'tcx>,
impl_ty: ty::AssocItem,
impl_trait_ref: ty::TraitRef<'tcx>,
) -> ty::ParamEnv<'tcx> {
let param_env = tcx.param_env(impl_ty.def_id);
let container_id = impl_ty.container_id(tcx);
let mut predicates = param_env.caller_bounds().to_vec();
// for RPITITs, we should install predicates that allow us to project all
// of the RPITITs associated with the same body. This is because checking
// the item bounds of RPITITs often involves nested RPITITs having to prove
// bounds about themselves.
let impl_tys_to_install = match impl_ty.kind {
ty::AssocKind::Type {
data:
ty::AssocTypeData::Rpitit(
ty::ImplTraitInTraitData::Impl { fn_def_id }
| ty::ImplTraitInTraitData::Trait { fn_def_id, .. },
),
} => tcx
.associated_types_for_impl_traits_in_associated_fn(fn_def_id)
.iter()
.map(|def_id| tcx.associated_item(*def_id))
.collect(),
_ => vec![impl_ty],
};
for impl_ty in impl_tys_to_install {
let trait_ty = match impl_ty.container {
ty::AssocContainer::InherentImpl => bug!(),
ty::AssocContainer::Trait => impl_ty,
ty::AssocContainer::TraitImpl(Err(_)) => continue,
ty::AssocContainer::TraitImpl(Ok(trait_item_def_id)) => {
tcx.associated_item(trait_item_def_id)
}
};
let mut bound_vars: smallvec::SmallVec<[ty::BoundVariableKind<'tcx>; 8]> =
smallvec::SmallVec::with_capacity(tcx.generics_of(impl_ty.def_id).own_params.len());
// Extend the impl's identity args with late-bound GAT vars
let normalize_impl_ty_args = ty::GenericArgs::identity_for_item(tcx, container_id)
.extend_to(tcx, impl_ty.def_id, |param, _| match param.kind {
GenericParamDefKind::Type { .. } => {
let kind = ty::BoundTyKind::Param(param.def_id);
let bound_var = ty::BoundVariableKind::Ty(kind);
bound_vars.push(bound_var);
Ty::new_bound(
tcx,
ty::INNERMOST,
ty::BoundTy { var: ty::BoundVar::from_usize(bound_vars.len() - 1), kind },
)
.into()
}
GenericParamDefKind::Lifetime => {
let kind = ty::BoundRegionKind::Named(param.def_id);
let bound_var = ty::BoundVariableKind::Region(kind);
bound_vars.push(bound_var);
ty::Region::new_bound(
tcx,
ty::INNERMOST,
ty::BoundRegion {
var: ty::BoundVar::from_usize(bound_vars.len() - 1),
kind,
},
)
.into()
}
GenericParamDefKind::Const { .. } => {
let bound_var = ty::BoundVariableKind::Const;
bound_vars.push(bound_var);
ty::Const::new_bound(
tcx,
ty::INNERMOST,
ty::BoundConst::new(ty::BoundVar::from_usize(bound_vars.len() - 1)),
)
.into()
}
});
// When checking something like
//
// trait X { type Y: PartialEq<<Self as X>::Y> }
// impl X for T { default type Y = S; }
//
// We will have to prove the bound S: PartialEq<<T as X>::Y>. In this case
// we want <T as X>::Y to normalize to S. This is valid because we are
// checking the default value specifically here. Add this equality to the
// ParamEnv for normalization specifically.
let normalize_impl_ty =
tcx.type_of(impl_ty.def_id).instantiate(tcx, normalize_impl_ty_args);
let rebased_args =
normalize_impl_ty_args.rebase_onto(tcx, container_id, impl_trait_ref.args);
let bound_vars = tcx.mk_bound_variable_kinds(&bound_vars);
match normalize_impl_ty.kind() {
ty::Alias(ty::Projection, proj)
if proj.def_id == trait_ty.def_id && proj.args == rebased_args =>
{
// Don't include this predicate if the projected type is
// exactly the same as the projection. This can occur in
// (somewhat dubious) code like this:
//
// impl<T> X for T where T: X { type Y = <T as X>::Y; }
}
_ => predicates.push(
ty::Binder::bind_with_vars(
ty::ProjectionPredicate {
projection_term: ty::AliasTerm::new_from_args(
tcx,
trait_ty.def_id,
rebased_args,
),
term: normalize_impl_ty.into(),
},
bound_vars,
)
.upcast(tcx),
),
};
}
ty::ParamEnv::new(tcx.mk_clauses(&predicates))
}
/// Manually check here that `async fn foo()` wasn't matched against `fn foo()`,
/// and extract a better error if so.
fn try_report_async_mismatch<'tcx>(
tcx: TyCtxt<'tcx>,
infcx: &InferCtxt<'tcx>,
errors: &[FulfillmentError<'tcx>],
trait_m: ty::AssocItem,
impl_m: ty::AssocItem,
impl_sig: ty::FnSig<'tcx>,
) -> Result<(), ErrorGuaranteed> {
if !tcx.asyncness(trait_m.def_id).is_async() {
return Ok(());
}
let ty::Alias(ty::Projection, ty::AliasTy { def_id: async_future_def_id, .. }) =
*tcx.fn_sig(trait_m.def_id).skip_binder().skip_binder().output().kind()
else {
bug!("expected `async fn` to return an RPITIT");
};
for error in errors {
if let ObligationCauseCode::WhereClause(def_id, _) = *error.root_obligation.cause.code()
&& def_id == async_future_def_id
&& let Some(proj) = error.root_obligation.predicate.as_projection_clause()
&& let Some(proj) = proj.no_bound_vars()
&& infcx.can_eq(
error.root_obligation.param_env,
proj.term.expect_type(),
impl_sig.output(),
)
{
// FIXME: We should suggest making the fn `async`, but extracting
// the right span is a bit difficult.
return Err(tcx.sess.dcx().emit_err(MethodShouldReturnFuture {
span: tcx.def_span(impl_m.def_id),
method_name: tcx.item_ident(impl_m.def_id),
trait_item_span: tcx.hir_span_if_local(trait_m.def_id),
}));
}
}
Ok(())
} | rust | github | https://github.com/rust-lang/rust | compiler/rustc_hir_analysis/src/check/compare_impl_item.rs |
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package jsonformat
import (
"fmt"
"slices"
"github.com/hashicorp/terraform/internal/command/jsonformat/computed"
"github.com/hashicorp/terraform/internal/command/jsonformat/differ"
"github.com/hashicorp/terraform/internal/command/jsonformat/structured"
"github.com/hashicorp/terraform/internal/command/jsonformat/structured/attribute_path"
"github.com/hashicorp/terraform/internal/command/jsonplan"
"github.com/hashicorp/terraform/internal/command/jsonprovider"
"github.com/hashicorp/terraform/internal/configs"
"github.com/hashicorp/terraform/internal/plans"
)
func precomputeDiffs(plan Plan, mode plans.Mode) diffs {
diffs := diffs{
outputs: make(map[string]computed.Diff),
}
for _, drift := range plan.ResourceDrift {
var relevantAttrs attribute_path.Matcher
if mode == plans.RefreshOnlyMode {
// For a refresh only plan, we show all the drift.
relevantAttrs = attribute_path.AlwaysMatcher()
} else {
matcher := attribute_path.Empty(true)
// Otherwise we only want to show the drift changes that are
// relevant.
for _, attr := range plan.RelevantAttributes {
if len(attr.Resource) == 0 || attr.Resource == drift.Address {
matcher = attribute_path.AppendSingle(matcher, attr.Attr)
}
}
if len(matcher.Paths) > 0 {
relevantAttrs = matcher
}
}
if relevantAttrs == nil {
// If we couldn't build a relevant attribute matcher, then we are
// not going to show anything for this drift.
continue
}
schema := plan.getSchema(drift)
change := structured.FromJsonChange(drift.Change, relevantAttrs)
diffs.drift = append(diffs.drift, diff{
change: drift,
diff: differ.ComputeDiffForBlock(change, schema.Block),
})
}
for _, change := range plan.ResourceChanges {
schema := plan.getSchema(change)
structuredChange := structured.FromJsonChange(change.Change, attribute_path.AlwaysMatcher())
before := []jsonplan.ActionInvocation{}
after := []jsonplan.ActionInvocation{}
for _, action := range plan.ActionInvocations {
if action.LifecycleActionTrigger == nil || action.LifecycleActionTrigger.TriggeringResourceAddress != change.Address {
continue
}
switch action.LifecycleActionTrigger.ActionTriggerEvent {
case configs.BeforeCreate.String(), configs.BeforeUpdate.String(), configs.BeforeDestroy.String():
before = append(before, action)
case configs.AfterCreate.String(), configs.AfterUpdate.String(), configs.AfterDestroy.String():
after = append(after, action)
default:
// The switch should be exhaustive.
panic(fmt.Sprintf("Unexpected triggering event when rendering action %s", action.LifecycleActionTrigger.ActionTriggerEvent))
}
}
slices.SortFunc(before, jsonplan.ActionInvocationCompare)
slices.SortFunc(after, jsonplan.ActionInvocationCompare)
var beforeActionsTriggered []actionInvocation
var afterActionsTriggered []actionInvocation
for _, action := range before {
schema := plan.getActionSchema(action)
beforeActionsTriggered = append(beforeActionsTriggered, actionInvocation{
invocation: action,
schema: schema,
})
}
for _, action := range after {
schema := plan.getActionSchema(action)
afterActionsTriggered = append(afterActionsTriggered, actionInvocation{
invocation: action,
schema: schema,
})
}
diffs.changes = append(diffs.changes, diff{
change: change,
diff: differ.ComputeDiffForBlock(structuredChange, schema.Block),
beforeActionsTriggered: beforeActionsTriggered,
afterActionsTriggered: afterActionsTriggered,
})
}
for _, action := range plan.ActionInvocations {
if action.InvokeActionTrigger == nil {
// lifecycle actions are handled within the resource
continue
}
diffs.actions = append(diffs.actions, actionInvocation{
invocation: action,
schema: plan.getActionSchema(action),
})
}
for _, change := range plan.DeferredChanges {
schema := plan.getSchema(change.ResourceChange)
structuredChange := structured.FromJsonChange(change.ResourceChange.Change, attribute_path.AlwaysMatcher())
diffs.deferred = append(diffs.deferred, deferredDiff{
reason: change.Reason,
diff: diff{
change: change.ResourceChange,
diff: differ.ComputeDiffForBlock(structuredChange, schema.Block),
},
})
}
for key, output := range plan.OutputChanges {
change := structured.FromJsonChange(output, attribute_path.AlwaysMatcher())
diffs.outputs[key] = differ.ComputeDiffForOutput(change)
}
return diffs
}
type diffs struct {
drift []diff
changes []diff
deferred []deferredDiff
actions []actionInvocation
outputs map[string]computed.Diff
}
func (d diffs) Empty() bool {
for _, change := range d.changes {
if change.diff.Action != plans.NoOp || change.Moved() {
return false
}
}
for _, output := range d.outputs {
if output.Action != plans.NoOp {
return false
}
}
return true
}
type diff struct {
change jsonplan.ResourceChange
diff computed.Diff
beforeActionsTriggered []actionInvocation
afterActionsTriggered []actionInvocation
}
type actionInvocation struct {
invocation jsonplan.ActionInvocation
schema *jsonprovider.ActionSchema
}
func (d diff) Moved() bool {
return len(d.change.PreviousAddress) > 0 && d.change.PreviousAddress != d.change.Address
}
func (d diff) Importing() bool {
return d.change.Change.Importing != nil
}
type deferredDiff struct {
diff diff
reason string
} | go | github | https://github.com/hashicorp/terraform | internal/command/jsonformat/diff.go |
## common/zeroconf/zeroconf.py
##
## Copyright (C) 2006 Stefan Bethge <stefan@lanpartei.de>
##
## This file is part of Gajim.
##
## Gajim is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published
## by the Free Software Foundation; version 3 only.
##
## Gajim is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Gajim. If not, see <http://www.gnu.org/licenses/>.
##
C_NAME, C_DOMAIN, C_INTERFACE, C_PROTOCOL, C_HOST, \
C_ADDRESS, C_PORT, C_BARE_NAME, C_TXT = range(9)
def test_avahi():
try:
import avahi
except ImportError:
return False
return True
def test_bonjour():
try:
import pybonjour
except ImportError:
return False
except WindowsError:
return False
return True
def test_zeroconf():
return test_avahi() or test_bonjour()
if test_avahi():
from common.zeroconf import zeroconf_avahi
Zeroconf = zeroconf_avahi.Zeroconf
elif test_bonjour():
from common.zeroconf import zeroconf_bonjour
Zeroconf = zeroconf_bonjour.Zeroconf
# vim: se ts=3: | unknown | codeparrot/codeparrot-clean | ||
"""Sanity test using yamllint."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import os
from .. import types as t
from ..sanity import (
SanitySingleVersion,
SanityMessage,
SanityFailure,
SanitySuccess,
SANITY_ROOT,
)
from ..target import (
TestTarget,
)
from ..util import (
SubprocessError,
display,
is_subdir,
find_python,
)
from ..util_common import (
run_command,
)
from ..config import (
SanityConfig,
)
from ..data import (
data_context,
)
class YamllintTest(SanitySingleVersion):
"""Sanity test using yamllint."""
@property
def error_code(self): # type: () -> t.Optional[str]
"""Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes."""
return 'ansible-test'
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
"""Return the given list of test targets, filtered to include only those relevant for the test."""
yaml_targets = [target for target in targets if os.path.splitext(target.path)[1] in ('.yml', '.yaml')]
for plugin_type, plugin_path in sorted(data_context().content.plugin_paths.items()):
if plugin_type == 'module_utils':
continue
yaml_targets.extend([target for target in targets if
os.path.splitext(target.path)[1] == '.py' and
os.path.basename(target.path) != '__init__.py' and
is_subdir(target.path, plugin_path)])
return yaml_targets
def test(self, args, targets, python_version):
"""
:type args: SanityConfig
:type targets: SanityTargets
:type python_version: str
:rtype: TestResult
"""
settings = self.load_processor(args)
paths = [target.path for target in targets.include]
python = find_python(python_version)
results = self.test_paths(args, paths, python)
results = settings.process_errors(results, paths)
if results:
return SanityFailure(self.name, messages=results)
return SanitySuccess(self.name)
@staticmethod
def test_paths(args, paths, python):
"""
:type args: SanityConfig
:type paths: list[str]
:type python: str
:rtype: list[SanityMessage]
"""
cmd = [
python,
os.path.join(SANITY_ROOT, 'yamllint', 'yamllinter.py'),
]
data = '\n'.join(paths)
display.info(data, verbosity=4)
try:
stdout, stderr = run_command(args, cmd, data=data, capture=True)
status = 0
except SubprocessError as ex:
stdout = ex.stdout
stderr = ex.stderr
status = ex.status
if stderr:
raise SubprocessError(cmd=cmd, status=status, stderr=stderr, stdout=stdout)
if args.explain:
return []
results = json.loads(stdout)['messages']
results = [SanityMessage(
code=r['code'],
message=r['message'],
path=r['path'],
line=int(r['line']),
column=int(r['column']),
level=r['level'],
) for r in results]
return results | unknown | codeparrot/codeparrot-clean | ||
import sys
import numpy as np
from PyQt5.QtGui import QIntValidator, QDoubleValidator
from PyQt5.QtWidgets import QApplication, QSizePolicy
from orangewidget import gui
from orangewidget.settings import Setting
from orangewidget import widget
from oasys.widgets import widget as oasyswidget, gui as oasysgui
import orangecanvas.resources as resources
import sys,os
class OWfunctions1D(oasyswidget.OWWidget):
name = "functions1D"
id = "orange.widgets.datafunctions1D"
description = "Application to compute..."
icon = "icons/functions1D.png"
author = "create_widget.py"
maintainer_email = "srio@esrf.eu"
priority = 10
category = ""
keywords = ["oasysaddontemplate", "functions1D"]
outputs = [{"name": "oasysaddontemplate-data",
"type": np.ndarray,
"doc": "transfer numpy arrays"},
# another possible output
# {"name": "oasysaddontemplate-file",
# "type": str,
# "doc": "transfer a file"},
]
# widget input (if needed)
#inputs = [{"name": "Name",
# "type": type,
# "handler": None,
# "doc": ""}]
want_main_area = False
FROM = Setting(-100.0)
TO = Setting(100.0)
NPOINTS = Setting(500)
FUNCTION_NAME = Setting(3)
CUSTOM = Setting("np.sin(x)")
DUMP_TO_FILE = Setting(0)
FILE_NAME = Setting("tmp.dat")
def __init__(self):
super().__init__(self)
self.runaction = widget.OWAction("Compute", self)
self.runaction.triggered.connect(self.compute)
self.addAction(self.runaction)
box0 = gui.widgetBox(self.controlArea, " ",orientation="horizontal")
#widget buttons: compute, set defaults, help
gui.button(box0, self, "Compute", callback=self.compute)
gui.button(box0, self, "Defaults", callback=self.defaults)
gui.button(box0, self, "Help", callback=self.get_doc)
self.process_showers()
box = gui.widgetBox(self.controlArea, " ",orientation="vertical")
idx = -1
#widget index 0
idx += 1
box1 = gui.widgetBox(box)
gui.lineEdit(box1, self, "FROM",
label=self.unitLabels()[idx], addSpace=True,
valueType=float, validator=QDoubleValidator())
self.show_at(self.unitFlags()[idx], box1)
#widget index 1
idx += 1
box1 = gui.widgetBox(box)
gui.lineEdit(box1, self, "TO",
label=self.unitLabels()[idx], addSpace=True,
valueType=float, validator=QDoubleValidator())
self.show_at(self.unitFlags()[idx], box1)
#widget index 2
idx += 1
box1 = gui.widgetBox(box)
gui.lineEdit(box1, self, "NPOINTS",
label=self.unitLabels()[idx], addSpace=True,
valueType=int, validator=QIntValidator())
self.show_at(self.unitFlags()[idx], box1)
#widget index 3
idx += 1
box1 = gui.widgetBox(box)
gui.comboBox(box1, self, "FUNCTION_NAME",
label=self.unitLabels()[idx], addSpace=True,
items=['sin(x)', 'cos(x)', 'x^2+x+1', 'Custom'],
valueType=int, orientation="horizontal")
self.show_at(self.unitFlags()[idx], box1)
#widget index 4
idx += 1
box1 = gui.widgetBox(box)
gui.lineEdit(box1, self, "CUSTOM",
label=self.unitLabels()[idx], addSpace=True)
self.show_at(self.unitFlags()[idx], box1)
#widget index 5
idx += 1
box1 = gui.widgetBox(box)
gui.comboBox(box1, self, "DUMP_TO_FILE",
label=self.unitLabels()[idx], addSpace=True,
items=['Yes', 'No'],
valueType=int, orientation="horizontal")
self.show_at(self.unitFlags()[idx], box1)
#widget index 6
idx += 1
box1 = gui.widgetBox(box)
gui.lineEdit(box1, self, "FILE_NAME",
label=self.unitLabels()[idx], addSpace=True)
self.show_at(self.unitFlags()[idx], box1)
gui.rubber(self.controlArea)
def unitLabels(self):
return ['Abscissa from ','Abscissa to','Number of points','Function','Custom expression', 'Dump to file','File name']
def unitFlags(self):
return ['True', 'True', 'True', 'True', 'self.FUNCTION_NAME == 3','True', 'self.DUMP_TO_FILE == 0']
def compute(self):
dataArray = OWfunctions1D.calculate_external_functions1D(FROM=self.FROM,TO=self.TO,NPOINTS=self.NPOINTS,FUNCTION_NAME=self.FUNCTION_NAME,CUSTOM=self.CUSTOM,DUMP_TO_FILE=self.DUMP_TO_FILE,FILE_NAME=self.FILE_NAME)
# if fileName == None:
# print("No file to send")
# else:
# self.send("oasysaddontemplate-file",fileName)
self.send("oasysaddontemplate-data",dataArray)
def defaults(self):
self.resetSettings()
self.compute()
return
def get_doc(self):
print("help pressed.")
home_doc = resources.package_dirname("orangecontrib.oasysaddontemplate") + "/doc_files/"
filename1 = os.path.join(home_doc,'functions1D'+'.txt')
print("Opening file %s"%filename1)
if sys.platform == 'darwin':
command = "open -a TextEdit "+filename1+" &"
elif sys.platform == 'linux':
command = "gedit "+filename1+" &"
os.system(command)
#
# this is the calculation method to be implemented by the user
# It is defined as static method to get all inputs from the arguments so it
# can easily moved outside the class
#
@staticmethod
def calculate_external_functions1D(FROM=-100.0,TO=100.0,NPOINTS=500,FUNCTION_NAME=3,CUSTOM="np.sin(x)",DUMP_TO_FILE=0,FILE_NAME="tmp.dat"):
print("Inside calculate_external_functions1D. ")
# A MERE EXAMPLE
a = np.array([
[ 8.47091837e+04, 8.57285714e+04, 8.67479592e+04, 8.77673469e+04,] ,
[ 1.16210756e+12, 1.10833975e+12, 1.05700892e+12, 1.00800805e+12]
])
return a
if __name__ == "__main__":
app = QApplication(sys.argv)
w = OWfunctions1D()
w.show()
app.exec()
w.saveSettings() | unknown | codeparrot/codeparrot-clean | ||
/* Copyright 2022 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_CORE_TRANSFORMS_UTILS_PDLL_UTILS_H_
#define TENSORFLOW_CORE_TRANSFORMS_UTILS_PDLL_UTILS_H_
#include "mlir/IR/PatternMatch.h" // from @llvm-project
namespace mlir {
namespace tfg {
// Register the common utils.
void RegisterPDLLUtils(RewritePatternSet &patterns);
} // namespace tfg
} // namespace mlir
#endif // TENSORFLOW_CORE_TRANSFORMS_UTILS_PDLL_UTILS_H_ | c | github | https://github.com/tensorflow/tensorflow | tensorflow/core/transforms/utils/pdll/utils.h |
# -*- coding: iso-8859-1 -*-
"""
MoinMoin - inter-thread communication commands
This file defines command objects used by notification
bot's threads to communicate among each other.
@copyright: 2007 by Karol Nowak <grywacz@gmail.com>
@license: GNU GPL, see COPYING for details.
"""
from pyxmpp.jid import JID
# First, XML RPC -> XMPP commands
class NotificationCommand:
"""Class representing a notification request"""
def __init__(self, jids, notification, msg_type=u"normal", async=True):
"""A constructor
@param jids: a list of jids to sent this message to
@param notification: dictionary with notification data
@param async: async notifications get queued if contact is DnD
@type jids: list
"""
if type(jids) != list:
raise Exception("jids argument must be a list!")
self.notification = notification
self.jids = jids
self.async = async
self.msg_type = msg_type
class NotificationCommandI18n(NotificationCommand):
"""Notification request that should be translated by the XMPP bot"""
def __init__(self, jids, notification, msg_type="normal", async=True):
"""A constructor
Params as in NotificationCommand.
"""
NotificationCommand.__init__(self, jids, notification, msg_type, async)
def translate(self, gettext_func):
"""Translate the message using a provided gettext function
@param gettext_func: a unary gettext function
@return: translated message and subject
@rtype: tuple
"""
if self.notification.has_key('data'):
msg = gettext_func(self.notification['text']) % self.notification['data']
else:
msg = gettext_func(self.notification['text'])
return (msg, gettext_func(self.notification.get('subject', '')))
class AddJIDToRosterCommand:
"""Class representing a request to add a new jid to roster"""
def __init__(self, jid):
self.jid = jid
class RemoveJIDFromRosterCommand:
"""Class representing a request to remove a jid from roster"""
def __init__(self, jid):
self.jid = jid
# XMPP <-> XML RPC commands
# These commands are passed in both directions, with added data
# payload when they return to the XMPP code. Naming convention
# follows method names defined by the Wiki RPC Interface v2.
class BaseDataCommand(object):
"""Base class for all commands used by the XMPP component.
It has to support an optional data payload and store JID the
request has come from and provide a help string for its parameters.
"""
# Description of what the command does
description = u""
# Parameter list in a human-readable format
parameter_list = u""
def __init__(self, jid, presentation=u"text"):
"""A constructor
@param jid: Jabber ID to send the reply to
@param presentation: how to display results; "text" or "dataforms"
@type jid: unicode
@type presentation: unicode
"""
self.jid = jid
self.data = None
self.presentation = presentation
class GetPage(BaseDataCommand):
description = u"retrieve raw content of a named page"
parameter_list = u"pagename"
def __init__(self, jid, pagename):
BaseDataCommand.__init__(self, jid)
self.pagename = pagename
class GetPageHTML(BaseDataCommand):
description = u"retrieve HTML-formatted content of a named page"
parameter_list = u"pagename"
def __init__(self, jid, pagename):
BaseDataCommand.__init__(self, jid)
self.pagename = pagename
class GetPageList(BaseDataCommand):
description = u"get a list of accesible pages"
parameter_list = u""
def __init__(self, jid):
BaseDataCommand.__init__(self, jid)
class GetPageInfo(BaseDataCommand):
description = u"show detailed information about a page"
parameter_list = u"pagename"
def __init__(self, jid, pagename, presentation=u"text"):
BaseDataCommand.__init__(self, jid, presentation)
self.pagename = pagename
class Search(BaseDataCommand):
description = u"perform a wiki search"
parameter_list = u"{title|text} term"
def __init__(self, jid, search_type, *args, **kwargs):
BaseDataCommand.__init__(self, jid)
if not JID(jid).resource:
raise ValueError("The jid argument must be a full jabber id!")
self.term = ' '.join(args)
self.search_type = search_type
self.presentation = kwargs.get('presentation', 'text') # "text" or "dataforms"
self.case = kwargs.get('case', False)
self.mtime = None
self.regexp = kwargs.get('regexp', False)
class RevertPage(BaseDataCommand):
description = u"revert a page to previous revision"
parameter_list = u"page_name revision"
def __init__(self, jid, pagename, revision):
BaseDataCommand.__init__(self, jid)
self.pagename = pagename
self.revision = revision
class GetUserLanguage:
"""Request user's language information from wiki"""
def __init__(self, jid):
"""
@param jid: user's (bare) Jabber ID
"""
self.jid = jid
self.language = None | unknown | codeparrot/codeparrot-clean | ||
"""
Thread server class implementation
"""
import logging
import os
import signal
import subprocess
import traceback
import time
import psutil
from retry import retry
from pytest_server_fixtures import CONFIG
from pytest_server_fixtures.base import ProcessReader
from .common import ServerClass, is_debug
log = logging.getLogger(__name__)
# ThreadServer will attempt to kill all child processes recursively.
KILL_RETRY_COUNT=15 # Total retry count to kill if not all child processes are terminated.
KILL_RETRY_WAIT_SECS=1 # Wait time between two retries
KILL_WAIT_SECS=5 # Time to wait for processes to terminate in a single retry.
class ProcessStillRunningException(Exception):
pass
@retry(ProcessStillRunningException,
tries=KILL_RETRY_COUNT,
delay=KILL_RETRY_WAIT_SECS)
def _kill_all(procs, sig):
log.debug("Killing %d processes with signal %s" % (len(procs), sig))
for p in procs:
p.send_signal(sig)
log.debug("Waiting for %d processes to die" % len(procs))
gone, alive = psutil.wait_procs(procs, timeout=KILL_WAIT_SECS)
if len(alive) == 0:
log.debug("All processes are terminated")
return
log.warning("%d processes remainings: %s" % (len(alive), ",".join([p.name() for p in alive])))
raise ProcessStillRunningException()
def _kill_proc_tree(pid, sig=signal.SIGKILL, timeout=None):
parent = psutil.Process(pid)
children = parent.children(recursive=True)
children.append(parent)
log.debug("Killing process tree for %d (total_procs_to_kill=%d)" % (parent.pid, len(children)))
_kill_all(children, sig)
class ThreadServer(ServerClass):
"""Thread server class."""
def __init__(self,
cmd,
get_args,
env,
workspace,
cwd=None,
listen_hostname=None):
super(ThreadServer, self).__init__(cmd, get_args, env)
self.exit = False
self._workspace = workspace
self._cwd = cwd
self._hostname = listen_hostname
self._proc = None
def launch(self):
log.debug("Launching thread server.")
run_cmd = [self._cmd] + self._get_args(workspace=self._workspace)
debug = is_debug()
extra_args = dict()
if debug:
extra_args['stdout'] = subprocess.PIPE
extra_args['stderr'] = subprocess.PIPE
self._proc = subprocess.Popen(run_cmd, env=self._env, cwd=self._cwd, **extra_args)
log.debug("Running server: %s" % ' '.join(run_cmd))
log.debug("CWD: %s" % self._cwd)
if debug:
ProcessReader(self._proc, self._proc.stdout, False).start()
ProcessReader(self._proc, self._proc.stderr, True).start()
self.start()
def run(self):
"""Run in thread"""
try:
self._proc.wait()
except OSError:
if not self.exit:
traceback.print_exc()
@property
def is_running(self):
"""Check if the main process is still running."""
# return False if the process is not started yet
if not self._proc:
return False
# return False if there is a return code from the main process
return self._proc.poll() is None
@property
def hostname(self):
return self._hostname
def teardown(self):
if not self._proc:
log.warning("No process is running, skip teardown.")
return
_kill_proc_tree(self._proc.pid)
self._proc = None | unknown | codeparrot/codeparrot-clean | ||
import numpy as np
import pylab
from mpl_toolkits.mplot3d import Axes3D
from .TransitFlux import FluxQuad
from . import Cnst
from . import PlanetOrbit
###############################################################################
def EccLightCurve(par,t,plot=False):
"""Lightcurve function for eccentric orbits."""
#read in parameters
T0,P,Mstar,Mplanet,Rstar,p,i,c1,c2,e,w,foot,Tgrad,Sec_depth = par
#convert units
Rstar *= Cnst.RSun
Mstar *= Cnst.MSun
Mplanet *= Cnst.MJup
i *= np.pi / 180.
w *= np.pi / 180.
#make w lie in range 0-2pi
if w >= 2*np.pi:
w -= 2*np.pi #make f lie in range 0-2pi
elif w < 0:
w += 2*np.pi #make f lie in range 0-2pi
#true anomaly of central transit time
f = 1.*np.pi/2. + w
if f >= 2*np.pi:
f -= 2*np.pi #make f lie in range 0-2pi
elif f < 0:
f += 2*np.pi #make f lie in range 0-2pi
if f < np.pi:
E = np.arccos( (np.cos(f) + e) / (e*np.cos(f)+1.) )
M_tr = E - e*np.sin(E)
T_peri = T0 + M_tr * P/(2*np.pi)
if f >= np.pi:
E = np.arccos( (np.cos(f) + e) / (e*np.cos(f)+1.) )
M_tr = E - e*np.sin(E)
T_peri = T0 - M_tr * P/(2*np.pi)
#calculate mean anomaly
M = (2*np.pi/P) * (t - T_peri)
#semi-major axis
a = np.power( ( ((P*24.*60.*60./(2*np.pi))**2 * Cnst.G * (Mstar+Mplanet) ) ) , (1./3.) )
#calc normalised separation z using PlanetOrbit functions
norm = PlanetOrbit.get_norm(M,a/Rstar,e,w,i)
y_coord = PlanetOrbit.get_y(M,a/Rstar,e,w,i) #get y coord to separate primary and secondary transits
#print "a/R =",a/Rstar
#calculate flux
f = np.ones(norm.size)
#primary transit
index = np.where(y_coord<0) #ie planet closer than star
f[index] = FluxQuad(norm[index],p,c1,c2) * (foot + (t[index] - T0) * 24. * Tgrad) #time in hours for foot and Tgrad!
#secondary transit
index = np.where(y_coord>0) #ie star closer than planet
f[index] = FluxQuad(norm[index],p,0,0) * (foot + (t[index] - T0) * 24. * Tgrad) #time in hours for foot and Tgrad!
f[index] = (f[index]-1.) * Sec_depth/p**2 + 1 #scale the transit depth to secondary depth
f[index] *= (foot + (t[index] - T0)*24. * Tgrad)
if plot:
pylab.plot(t,f,'r-')
pylab.xlabel('Time / days')
pylab.ylabel('Relative Flux')
pylab.ylim(f.min()-(f.max()-f.min())/2.,f.max()+(f.max()-f.min())/2.)
pylab.xlim(t.min(),t.max())
#return flux
return f
###############################################################################
def EccLightCurve_aRs(par,t,plot=False):
"""Lightcurve function for eccentric orbits with aRs parameterisation."""
#read in parameters
T0,P,a_Rstar,p,b,c1,c2,e,w,foot,Tgrad,Sec_depth = par
#ensure b and p >= 0
if b<0.: b=-b
if p<0.: p=-p
w *= np.pi / 180.
i = np.arccos(b/a_Rstar)
#make w lie in range 0-2pi
if w >= 2*np.pi:
w -= 2*np.pi #make f lie in range 0-2pi
elif w < 0:
w += 2*np.pi #make f lie in range 0-2pi
#true anomaly of central transit time
f = 1.*np.pi/2. + w
if f >= 2*np.pi:
f -= 2*np.pi #make f lie in range 0-2pi
elif f < 0:
f += 2*np.pi #make f lie in range 0-2pi
if f < np.pi:
E = np.arccos( (np.cos(f) + e) / (e*np.cos(f)+1.) )
M_tr = E - e*np.sin(E)
T_peri = T0 + M_tr * P/(2*np.pi)
if f >= np.pi:
E = np.arccos( (np.cos(f) + e) / (e*np.cos(f)+1.) )
M_tr = E - e*np.sin(E)
T_peri = T0 - M_tr * P/(2*np.pi)
#calculate mean anomaly
M = (2*np.pi/P) * (t - T_peri)
#calc normalised separation z using PlanetOrbit functions
norm = PlanetOrbit.get_norm(M,a_Rstar,e,w,i)
y_coord = PlanetOrbit.get_y(M,a_Rstar,e,w,i) #get y coord to separate primary and secondary transits
#print "a/R =",a/Rstar
#calculate flux
f = np.ones(norm.size)
#primary transit
index = np.where(y_coord<0) #ie planet closer than star
f[index] = FluxQuad(norm[index],p,c1,c2) * (foot + (t[index] - T0) * 24. * Tgrad) #time in hours for foot and Tgrad!
#secondary transit
index = np.where(y_coord>0) #ie star closer than planet
f[index] = FluxQuad(norm[index],p,0,0) * (foot + (t[index] - T0) * 24. * Tgrad) #time in hours for foot and Tgrad!
f[index] = (f[index]-1.) * Sec_depth/p**2 + 1 #scale the transit depth to secondary depth
f[index] *= (foot + (t[index] - T0)*24. * Tgrad)
if plot:
pylab.plot(t,f,'r-')
pylab.xlabel('Time / days')
pylab.ylabel('Relative Flux')
pylab.ylim(f.min()-(f.max()-f.min())/2.,f.max()+(f.max()-f.min())/2.)
pylab.xlim(t.min(),t.max())
return f
###############################################################################
def PlotEccOrbit(par,t):
"""Function to plot planet orbit in 3D"""
#read in parameters
T0,P,Mstar,Mplanet,Rstar,p,i,c1,c2,e,w,foot,Tgrad,Sec_depth = par
#convert units
Rstar *= Cnst.RSun
Mstar *= Cnst.MSun
Mplanet *= Cnst.MJup
i *= np.pi / 180.
w *= np.pi / 180.
#semi-major axis
a = np.power( ( ((P*24.*60.*60./(2*np.pi))**2 * Cnst.G * (Mstar+Mplanet) ) ) , (1./3.) )
#make w lie in range 0-2pi
if w >= 2*np.pi:
w -= 2*np.pi #make f lie in range 0-2pi
elif w < 0:
w += 2*np.pi #make f lie in range 0-2pi
#true anomaly of central transit time
f = 1.*np.pi/2. + w
if f >= 2*np.pi:
f -= 2*np.pi #make f lie in range 0-2pi
elif f < 0:
f += 2*np.pi #make f lie in range 0-2pi
if f < np.pi:
E = np.arccos( (np.cos(f) + e) / (e*np.cos(f)+1.) )
M_tr = E - e*np.sin(E)
T_peri = T0 + M_tr * P/(2*np.pi)
if f >= np.pi:
#f = np.pi - f #correct for acos calc
E = np.arccos( (np.cos(f) + e) / (e*np.cos(f)+1.) )
M_tr = E - e*np.sin(E)
#M_tr = 2*np.pi - M_tr
T_peri = T0 - M_tr * P/(2*np.pi)
#calculate mean anomaly
M = (2*np.pi/P) * (t - T_peri)
#get coords
x = PlanetOrbit.get_x(M,a/Rstar,e,w)
y = PlanetOrbit.get_y(M,a/Rstar,e,w,i)
z = PlanetOrbit.get_z(M,a/Rstar,e,w,i)
#make plot
ax = Axes3D(pylab.gcf())
ax.plot(x, y, z, c='k')
ax.scatter(x, y, z, c='r', s=50)
ax.scatter([0],[0],[0],c='y', s=500) #plot star position
ax.scatter([x[0],],[y[0],],[z[0],],c='g', s=100) #plot initial planet position
ax.scatter([x[1],],[y[1],],[z[1],],c='y', s=100) #plot initial planet position
ax.set_xlabel('X')
ax.set_ylabel('Y')
ax.set_zlabel('Z')
range = abs(np.array([x,y,z])).max()
ax.set_xlim3d(-range,range)
ax.set_ylim3d(-range,range)
ax.set_zlim3d(-range,range)
###############################################################################
def PlotEccOrbit_aRs(par,t):
"""Function to plot planet orbit in 3D"""
#read in parameters
T0,P,a_Rstar,p,b,c1,c2,e,w,foot,Tgrad,Sec_depth = par
#ensure b and p >= 0
if b<0.: b=-b
if p<0.: p=-p
w *= np.pi / 180.
i = np.arccos(b/a_Rstar)
#make w lie in range 0-2pi
if w >= 2*np.pi:
w -= 2*np.pi #make f lie in range 0-2pi
elif w < 0:
w += 2*np.pi #make f lie in range 0-2pi
#true anomaly of central transit time
f = 1.*np.pi/2. + w
if f >= 2*np.pi:
f -= 2*np.pi #make f lie in range 0-2pi
elif f < 0:
f += 2*np.pi #make f lie in range 0-2pi
if f < np.pi:
E = np.arccos( (np.cos(f) + e) / (e*np.cos(f)+1.) )
M_tr = E - e*np.sin(E)
T_peri = T0 + M_tr * P/(2*np.pi)
if f >= np.pi:
#f = np.pi - f #correct for acos calc
E = np.arccos( (np.cos(f) + e) / (e*np.cos(f)+1.) )
M_tr = E - e*np.sin(E)
#M_tr = 2*np.pi - M_tr
T_peri = T0 - M_tr * P/(2*np.pi)
#calculate mean anomaly
M = (2*np.pi/P) * (t - T_peri)
#get coords
x = PlanetOrbit.get_x(M,a_Rstar,e,w)
y = PlanetOrbit.get_y(M,a_Rstar,e,w,i)
z = PlanetOrbit.get_z(M,a_Rstar,e,w,i)
#make plot
ax = Axes3D(pylab.gcf())
ax.plot(x, y, z, c='k')
ax.scatter(x, y, z, c='r', s=50)
ax.scatter([0],[0],[0],c='y', s=500) #plot star position
ax.scatter([x[0],],[y[0],],[z[0],],c='g', s=100) #plot initial planet position
ax.scatter([x[1],],[y[1],],[z[1],],c='y', s=100) #plot initial planet position
ax.set_xlabel('X')
ax.set_ylabel('Y')
ax.set_zlabel('Z')
range = abs(np.array([x,y,z])).max()
ax.set_xlim3d(-range,range)
ax.set_ylim3d(-range,range)
ax.set_zlim3d(-range,range)
############################################################################### | unknown | codeparrot/codeparrot-clean | ||
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import csvn.core as svn
from csvn.core import *
import csvn.types as _types
from csvn.ext.callback_receiver import CallbackReceiver
from txn import Txn
from auth import User
import os
class RepositoryURI(object):
"""A URI to an object in a Subversion repository, stored internally in
encoded format.
When you supply URIs to a RemoteClient, or a transaction"""
def __init__(self, uri, encoded=True):
"""Create a RepositoryURI object from a URI. If encoded=True, the
input string may be URI-encoded."""
pool = Pool()
if not encoded:
uri = svn_path_uri_encode(uri, pool)
self._as_parameter_ = str(svn_path_canonicalize(uri, pool))
def join(self, uri):
"""Join this URI and the specified relative URI,
adding a slash if necessary."""
pool = Pool()
return RepositoryURI(svn_path_join(self, uri, pool))
def dirname(self):
"""Get the parent directory of this URI"""
pool = Pool()
return RepositoryURI(svn_path_dirname(self, pool))
def relative_path(self, uri, encoded=True):
"""Convert the supplied URI to a decoded path, relative to me."""
pool = Pool()
if not encoded:
uri = svn_path_uri_encode(uri, pool)
child_path = svn_path_is_child(self, uri, pool) or uri
return str(svn_path_uri_decode(child_path, pool))
def longest_ancestor(self, uri):
"""Get the longest ancestor of this URI and another URI"""
pool = Pool()
return RepositoryURI(svn_path_get_longest_ancestor(self, uri, pool))
def __str__(self):
"""Return the URI as a string"""
return self._as_parameter_
class RemoteRepository(object):
"""This class represents a connection from the client to a remote
Subversion repository."""
# The interface corresponds roughly to the svn_ra API, and an object of
# this type basically represents the C type 'svn_ra_session_t'.
def __init__(self, url, user=None):
"""Open a new session to URL with the specified USER.
USER must be an object that implements the
'csvn.auth.User' interface."""
if user is None:
user = User()
self.pool = Pool()
self.iterpool = Pool()
self.url = RepositoryURI(url)
self.user = user
self.client = POINTER(svn_client_ctx_t)()
svn_client_create_context(byref(self.client), self.pool)
self.user.setup_auth_baton(pointer(self.client.contents.auth_baton))
self._as_parameter_ = POINTER(svn_ra_session_t)()
svn_client_open_ra_session(byref(self._as_parameter_), url,
self.client, self.pool)
self.client[0].log_msg_func2 = \
svn_client_get_commit_log2_t(self._log_func_wrapper)
self.client[0].log_msg_baton2 = c_void_p()
self._log_func = None
def close(self):
"""Close this RemoteRepository object, releasing any resources."""
self.pool.clear()
def txn(self):
"""Create a transaction"""
return Txn(self)
def latest_revnum(self):
"""Get the latest revision number in the repository"""
revnum = svn_revnum_t()
svn_ra_get_latest_revnum(self, byref(revnum), self.iterpool)
self.iterpool.clear()
return revnum.value
def check_path(self, path, rev = None, encoded=True):
"""Check the status of PATH@REV. If REV is not specified,
look at the latest revision in the repository.
If the path is ...
... absent, then we return svn_node_none.
... a regular file, then we return svn_node_file.
... a directory, then we return svn_node_dir
... unknown, then we return svn_node_unknown
If ENCODED is True, the path may be URI-encoded.
"""
path = self._relative_path(path, encoded)
if rev is None:
rev = self.latest_revnum()
kind = svn_node_kind_t()
svn_ra_check_path(self, path, svn_revnum_t(rev), byref(kind),
self.iterpool)
self.iterpool.clear()
return kind.value
def list(self, path, rev = SVN_INVALID_REVNUM, fields = SVN_DIRENT_ALL):
"""List the contents of the specified directory PATH@REV. This
function returns a dictionary, which maps entry names to
directory entries (svn_dirent_t objects).
If REV is not specified, we look at the latest revision of the
repository.
FIELDS controls what portions of the svn_dirent_t object are
filled in. To have them completely filled in, just pass in
SVN_DIRENT_ALL (which is the default); otherwise, pass the
bitwise OR of all the SVN_DIRENT_ fields you would like to
have returned to you.
"""
dirents = _types.Hash(POINTER(svn_dirent_t), None)
svn_ra_get_dir2(self, dirents.byref(), NULL, NULL, path,
rev, fields, dirents.pool)
self.iterpool.clear()
# Create a Python dict of svn_dirent_t objects from this Hash of
# pointers to svn_dirent_t.
result = {}
for path, dirent_p in dirents.items():
result[path] = dirent_p[0]
return result
def cat(self, buffer, path, rev = SVN_INVALID_REVNUM):
"""Get PATH@REV and save it to BUFFER. BUFFER must be a Python file
or a StringIO object.
If REV is not specified, we look at the latest revision of the
repository."""
stream = _types.Stream(buffer)
svn_ra_get_file(self, path, rev, stream, NULL, NULL, stream.pool)
self.iterpool.clear()
def info(self, path, rev = None):
"""Get a pointer to a svn_dirent_t object associated with PATH@REV.
If PATH does not exist, return None.
If REV is not specified, we look at the latest revision of the
file."""
dirent = POINTER(svn_dirent_t)()
dirent.pool = Pool()
if rev is None:
rev = self.latest_revnum()
svn_ra_stat(self, path, rev, byref(dirent), dirent.pool)
self.iterpool.clear()
return dirent[0]
def proplist(self, path, rev = SVN_INVALID_REVNUM):
"""Return a dictionary containing the properties on PATH@REV
If REV is not specified, we look at the latest revision of the
repository."""
props = _types.Hash(POINTER(svn_string_t), None,
wrapper=_types.SvnStringPtr)
status = self.check_path(path, rev)
if status == svn_node_dir:
svn_ra_get_dir2(self, NULL, NULL, props.byref(), path,
rev, 0, props.pool)
else:
svn_ra_get_file(self, path, rev, NULL, NULL, props.byref(),
props.pool)
self.iterpool.clear()
return props
def propget(self, name, path, rev = SVN_INVALID_REVNUM):
"""Get property NAME from PATH@REV.
If REV is not specified, we look at the latest revision of the
repository."""
return self.proplist(path, rev)[name]
def log(self, start_rev, end_rev, paths=None, limit=0,
discover_changed_paths=FALSE, stop_on_copy=FALSE):
"""A generator function which returns information about the revisions
between START_REV and END_REV. Each return value is a
csvn.types.LogEntry object which describes a revision.
For details on what fields are contained in a LogEntry object,
please see the documentation from csvn.types.LogEntry.
You can iterate through the log information for several revisions
using a regular for loop. For example:
for entry in session.log(start_rev, end_rev):
print("Revision %d" % entry.revision)
...
ARGUMENTS:
If PATHS is not None and has one or more elements, then only
show revisions in which at least one of PATHS was changed (i.e.,
if file, text or props changed; if dir, props changed or an entry
was added or deleted). Each PATH should be relative to the current
session's root.
If LIMIT is non-zero, only the first LIMIT logs are returned.
If DISCOVER_CHANGED_PATHS is True, then changed_paths will contain
a list of paths affected by this revision.
If STOP_ON_COPY is True, then this function will not cross
copies while traversing history.
If START_REV or END_REV is a non-existent revision, we throw
a SVN_ERR_FS_NO_SUCH_REVISION SubversionException, without
returning any logs.
"""
paths = _types.Array(c_char_p, paths is None and [""] or paths)
return iter(_LogMessageReceiver(self, start_rev, end_rev, paths,
limit, discover_changed_paths, stop_on_copy))
# Private. Produces a delta editor for the commit, so that the Txn
# class can commit its changes over the RA layer.
def _get_commit_editor(self, message, commit_callback, commit_baton, pool):
editor = POINTER(svn_delta_editor_t)()
editor_baton = c_void_p()
svn_ra_get_commit_editor2(self, byref(editor),
byref(editor_baton), message, commit_callback,
commit_baton, NULL, FALSE, pool)
return (editor, editor_baton)
# Private. Convert a URI to a repository-relative path
def _relative_path(self, path, encoded=True):
return self.url.relative_path(path, encoded)
# Private. Convert a repository-relative copyfrom path into a proper
# copyfrom URI
def _abs_copyfrom_path(self, path):
return self.url.join(RepositoryURI(path, False))
def revprop_list(self, revnum=None):
"""Returns a hash of the revision properties of REVNUM. If REVNUM is
not provided, it defaults to the head revision."""
rev = svn_opt_revision_t()
if revnum is not None:
rev.kind = svn_opt_revision_number
rev.value.number = revnum
else:
rev.kind = svn_opt_revision_head
props = _types.Hash(POINTER(svn_string_t), None,
wrapper=_types.SvnStringPtr)
set_rev = svn_revnum_t()
svn_client_revprop_list(props.byref(),
self.url,
byref(rev),
byref(set_rev),
self.client,
props.pool)
self.iterpool.clear()
return props
def revprop_get(self, propname, revnum=None):
"""Returns the value of PROPNAME at REVNUM. If REVNUM is not
provided, it defaults to the head revision."""
return self.revprop_list(revnum)[propname]
def revprop_set(self, propname, propval=NULL, revnum=None, force=False):
"""Set PROPNAME to PROPVAL for REVNUM. If REVNUM is not given, it
defaults to the head revision. Returns the actual revision number
effected.
If PROPVAL is not provided, the property will be deleted.
If FORCE is True (False by default), newlines will be allowed in the
author property.
Be careful, this is a lossy operation."""
rev = svn_opt_revision_t()
if revnum is not None:
rev.kind = svn_opt_revision_number
rev.value.number = revnum
else:
rev.kind = svn_opt_revision_head
set_rev = svn_revnum_t()
svn_client_revprop_set(propname,
svn_string_create(propval, self.iterpool), self.url,
byref(rev), byref(set_rev), force, self.client,
self.iterpool)
try:
return set_rev.value
finally:
self.iterpool.clear()
def set_log_func(self, log_func):
"""Register a callback to get a log message for commit and
commit-like operations. LOG_FUNC should take an array as an argument,
which holds the files to be committed. It should return a list of the
form [LOG, FILE] where LOG is a log message and FILE is the temporary
file, if one was created instead of a log message. If LOG is None,
the operation will be canceled and FILE will be treated as the
temporary file holding the temporary commit message."""
self._log_func = log_func
def _log_func_wrapper(self, log_msg, tmp_file, commit_items, baton, pool):
log_msg[0].raw = NULL
tmp_file[0] = NULL
if self._log_func:
[log, file] = self._log_func(_types.Array(String, commit_items))
if log:
log_msg[0].raw = apr_pstrdup(pool, String(log)).raw
if file:
tmp_file[0] = apr_pstrdup(pool, String(file)).raw
def svnimport(self, path, url=None, nonrecursive=False, no_ignore=True, log_func=None):
if not url:
url = self.url
if log_func:
self.set_log_func(log_func)
pool = Pool()
commit_info = POINTER(svn_commit_info_t)()
svn_client_import2(byref(commit_info), path, url, nonrecursive,
no_ignore, self.client, pool)
commit_info[0].pool = pool
return commit_info[0]
class LocalRepository(object):
"""A client which accesses the repository directly. This class
may allow you to perform some administrative actions which
cannot be performed remotely (e.g. create repositories,
dump repositories, etc.)
Unlike RemoteRepository, the functions in this class do not
accept URIs, and instead only accept local filesystem
paths.
By default, this class does not perform any checks to verify
permissions, assuming that the specified user has full
administrative access to the repository. To teach this class
to enforce an authz policy, you must subclass csvn.auth.User
and implement the allow_access function.
"""
def __init__(self, path, create=False, user=None):
"""Open the repository at PATH. If create is True,
create a new repository.
If specified, user must be a csvn.auth.User instance.
"""
if user is None:
user = User()
self.pool = Pool()
self.iterpool = Pool()
self._as_parameter_ = POINTER(svn_repos_t)()
self.user = user
if create:
svn_repos_create(byref(self._as_parameter_), path,
None, None, None, None, self.pool)
else:
svn_repos_open(byref(self._as_parameter_), path, self.pool)
self.fs = _fs(self)
def __del__(self):
self.close()
def close(self):
"""Close this LocalRepository object, releasing any resources. In
particular, this closes the rep-cache DB."""
self.pool.clear()
def latest_revnum(self):
"""Get the latest revision in the repository"""
return self.fs.latest_revnum()
def check_path(self, path, rev = None, encoded=False):
"""Check whether the given PATH exists in the specified REV. If REV
is not specified, look at the latest revision.
If the path is ...
... absent, then we return svn_node_none.
... a regular file, then we return svn_node_file.
... a directory, then we return svn_node_dir
... unknown, then we return svn_node_unknown
"""
assert(not encoded)
root = self.fs.root(rev=rev, pool=self.iterpool)
try:
return root.check_path(path)
finally:
self.iterpool.clear()
def uuid(self):
"""Return a universally-unique ID for this repository"""
return self.fs.uuid()
def set_rev_prop(self, rev, name, value, author=NULL):
"""Set the NAME property to VALUE in the specified
REV, attribute the change to AUTHOR if provided."""
rev = svn_revnum_t(rev)
svn_repos_fs_change_rev_prop2(self, rev, author, name, value,
svn_repos_authz_func_t(),
None, self.iterpool)
self.iterpool.clear()
def get_rev_prop(self, rev, name):
"""Returns the value of NAME in REV. If NAME does not exist in REV,
returns None."""
rev = svn_revnum_t(rev)
value = POINTER(svn_string_t)()
svn_repos_fs_revision_prop(byref(value), self, rev, name,
svn_repos_authz_func_t(), None,
self.iterpool)
try:
if value:
return _types.SvnStringPtr.from_param(value)
else:
return None
finally:
self.iterpool.clear()
def txn(self):
"""Open up a new transaction, so that you can commit a change
to the repository"""
assert self.user is not None, (
"If you would like to commit changes to the repository, "
"you must supply a user object when you initialize "
"the repository object")
return Txn(self)
# Private. Produces a delta editor for the commit, so that the Txn
# class can commit its changes over the RA layer.
def _get_commit_editor(self, message, commit_callback, commit_baton, pool):
editor = POINTER(svn_delta_editor_t)()
editor_baton = c_void_p()
svn_repos_get_commit_editor4(byref(editor),
byref(editor_baton), self, None, "", "",
self.user.username(), message,
commit_callback, commit_baton, svn_repos_authz_callback_t(),
None, pool)
return (editor, editor_baton)
def _relative_path(self, path):
return path
# Private. Convert a repository-relative copyfrom path into a proper
# copyfrom URI
def _abs_copyfrom_path(self, path):
return path
def load(self, dumpfile, feedbackfile=None,
uuid_action=svn_repos_load_uuid_default, parent_dir="",
use_pre_commit_hook=False, use_post_commit_hook=False,
cancel_func=None):
"""Read and parse dumpfile-formatted DUMPFILE, reconstructing
filesystem revisions. Dumpfile should be an open python file object
or file like object. UUID will be handled according to UUID_ACTION
which defaults to svn_repos_load_uuid_default.
If FEEDBACKFILE is provided (in the form of a python file object or
file like object), feedback will be sent to it.
If PARENT_DIR is provided, everything loaded from the dump will be
reparented to PARENT_DIR.
USE_PRE_COMMIT_HOOK and USE_POST_COMMIT_HOOK are False by default,
if either is set to True that hook will be used.
If CANCEL_FUNC is provided, it will be called at various points to
allow the operation to be cancelled. The cancel baton will be the
LocalRepository object."""
if not cancel_func:
cancel_func = svn_cancel_func_t()
apr_dump = _types.APRFile(dumpfile)
stream_dump = svn_stream_from_aprfile2(apr_dump._as_parameter_,
False, self.iterpool)
if feedbackfile:
apr_feedback = _types.APRFile(feedbackfile)
stream_feedback = svn_stream_from_aprfile2(
apr_feedback._as_parameter_, False,
self.iterpool)
else:
stream_feedback = NULL
svn_repos_load_fs2(self._as_parameter_, stream_dump, stream_feedback,
uuid_action, parent_dir, use_pre_commit_hook,
use_post_commit_hook, cancel_func,
c_void_p(), self.iterpool)
apr_dump.close()
if feedbackfile:
apr_feedback.close()
self.iterpool.clear()
class _fs(object):
"""NOTE: This is a private class. Don't use it outside of
this module. Use the Repos class instead.
This class represents an svn_fs_t object"""
def __init__(self, repos):
self.iterpool = Pool()
self._as_parameter_ = svn_repos_fs(repos)
def latest_revnum(self):
"""See Repos.latest_revnum"""
rev = svn_revnum_t()
svn_fs_youngest_rev(byref(rev), self, self.iterpool)
self.iterpool.clear()
return rev.value
def uuid(self):
"""See Repos.uuid"""
uuid_buffer = String()
svn_fs_get_uuid(self, byref(uuid_buffer), self.iterpool)
uuid_str = str(uuid_buffer)
self.iterpool.clear()
return uuid_str
def root(self, rev = None, txn = None, pool = None,
iterpool = None):
"""Create a new svn_fs_root_t object from txn or rev.
If neither txn nor rev or set, this root object will
point to the latest revision root.
The svn_fs_root object itself will be allocated in pool.
If iterpool is supplied, iterpool will be used for any
temporary allocations. Otherwise, pool will be used for
temporary allocations."""
return _fs_root(self, rev, txn, pool, iterpool)
class _fs_root(object):
"""NOTE: This is a private class. Don't use it outside of
this module. Use the Repos.txn() method instead.
This class represents an svn_fs_root_t object"""
def __init__(self, fs, rev = None, txn = None, pool = None,
iterpool = None):
"""See _fs.root()"""
assert(pool)
self.pool = pool
self.iterpool = iterpool or pool
self.fs = fs
self._as_parameter_ = POINTER(svn_fs_root_t)()
if txn and rev:
raise Exception("You can't specify both a txn and a rev")
if txn:
svn_fs_txn_root(byref(self._as_parameter_), txn, self.pool)
else:
if not rev:
rev = fs.latest_revnum()
svn_fs_revision_root(byref(self._as_parameter_), fs, rev, self.pool)
def check_path(self, path):
"""Check whether the specified path exists in this root.
See Repos.check_path() for details."""
kind = svn_node_kind_t()
svn_fs_check_path(byref(kind), self, path, self.iterpool)
return kind.value
class LogEntry(object):
"""REVISION, AUTHOR, DATE, and MESSAGE are straightforward, and
contain what you expect. DATE is a csvn.types.SvnDate object.
If no information about the paths changed in this revision is
available, CHANGED_PATHS will be None. Otherwise, CHANGED_PATHS
will contain a dictionary which maps every path committed
in REVISION to svn_log_changed_path_t pointers."""
__slots__ = ['changed_paths', 'revision',
'author', 'date', 'message']
class _LogMessageReceiver(CallbackReceiver):
def collect(self, session, start_rev, end_rev, paths, limit,
discover_changed_paths, stop_on_copy):
self.discover_changed_paths = discover_changed_paths
pool = Pool()
baton = c_void_p()
receiver = svn_log_message_receiver_t(self.receive)
svn_ra_get_log(session, paths, start_rev, end_rev,
limit, discover_changed_paths, stop_on_copy, receiver,
baton, pool)
def receive(self, baton, changed_paths, revision, author, date, message, pool):
entry = LogEntry()
# Save information about the log entry
entry.revision = revision
entry.author = str(author)
entry.date = _types.SvnDate(date)
entry.message = str(message)
if self.discover_changed_paths:
entry.changed_paths = _types.Hash(POINTER(svn_log_changed_path_t),
changed_paths, dup = svn_log_changed_path_dup)
else:
entry.changed_paths = None
self.send(entry) | unknown | codeparrot/codeparrot-clean | ||
/*
* Copyright 2002-present the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.ui;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.Map;
import org.jspecify.annotations.Nullable;
import org.springframework.core.Conventions;
import org.springframework.util.Assert;
/**
* Implementation of {@link java.util.Map} for use when building model data for use
* with UI tools. Supports chained calls and generation of model attribute names.
*
* <p>This class serves as generic model holder for Servlet MVC but is not tied to it.
* Check out the {@link Model} interface for an interface variant.
*
* @author Rob Harrop
* @author Juergen Hoeller
* @since 2.0
* @see Conventions#getVariableName
* @see org.springframework.web.servlet.ModelAndView
*/
@SuppressWarnings("serial")
public class ModelMap extends LinkedHashMap<String, Object> {
/**
* Construct a new, empty {@code ModelMap}.
*/
public ModelMap() {
}
/**
* Construct a new {@code ModelMap} containing the supplied attribute
* under the supplied name.
* @see #addAttribute(String, Object)
*/
public ModelMap(String attributeName, @Nullable Object attributeValue) {
addAttribute(attributeName, attributeValue);
}
/**
* Construct a new {@code ModelMap} containing the supplied attribute.
* Uses attribute name generation to generate the key for the supplied model
* object.
* @see #addAttribute(Object)
*/
public ModelMap(Object attributeValue) {
addAttribute(attributeValue);
}
/**
* Add the supplied attribute under the supplied name.
* @param attributeName the name of the model attribute (never {@code null})
* @param attributeValue the model attribute value (can be {@code null})
*/
public ModelMap addAttribute(String attributeName, @Nullable Object attributeValue) {
Assert.notNull(attributeName, "Model attribute name must not be null");
put(attributeName, attributeValue);
return this;
}
/**
* Add the supplied attribute to this {@code Map} using a
* {@link org.springframework.core.Conventions#getVariableName generated name}.
* <p><i>Note: Empty {@link Collection Collections} are not added to
* the model when using this method because we cannot correctly determine
* the true convention name. View code should check for {@code null} rather
* than for empty collections as is already done by JSTL tags.</i>
* @param attributeValue the model attribute value (never {@code null})
*/
public ModelMap addAttribute(Object attributeValue) {
Assert.notNull(attributeValue, "Model object must not be null");
if (attributeValue instanceof Collection<?> collection && collection.isEmpty()) {
return this;
}
return addAttribute(Conventions.getVariableName(attributeValue), attributeValue);
}
/**
* Copy all attributes in the supplied {@code Collection} into this
* {@code Map}, using attribute name generation for each element.
* @see #addAttribute(Object)
*/
public ModelMap addAllAttributes(@Nullable Collection<?> attributeValues) {
if (attributeValues != null) {
for (Object attributeValue : attributeValues) {
addAttribute(attributeValue);
}
}
return this;
}
/**
* Copy all attributes in the supplied {@code Map} into this {@code Map}.
* @see #addAttribute(String, Object)
*/
public ModelMap addAllAttributes(@Nullable Map<String, ?> attributes) {
if (attributes != null) {
putAll(attributes);
}
return this;
}
/**
* Copy all attributes in the supplied {@code Map} into this {@code Map},
* with existing objects of the same name taking precedence (i.e. not getting
* replaced).
*/
public ModelMap mergeAttributes(@Nullable Map<String, ?> attributes) {
if (attributes != null) {
attributes.forEach((key, value) -> {
if (!containsKey(key)) {
put(key, value);
}
});
}
return this;
}
/**
* Does this model contain an attribute of the given name?
* @param attributeName the name of the model attribute (never {@code null})
* @return whether this model contains a corresponding attribute
*/
public boolean containsAttribute(String attributeName) {
return containsKey(attributeName);
}
/**
* Return the attribute value for the given name, if any.
* @param attributeName the name of the model attribute (never {@code null})
* @return the corresponding attribute value, or {@code null} if none
* @since 5.2
*/
public @Nullable Object getAttribute(String attributeName) {
return get(attributeName);
}
} | java | github | https://github.com/spring-projects/spring-framework | spring-context/src/main/java/org/springframework/ui/ModelMap.java |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# The gui
#
import gettext
import os
from config import Config
from PyQt5.QtWidgets import QLabel, QLineEdit, QMainWindow, QPushButton, QWidget, QVBoxLayout, QAction
from PyQt5.QtCore import pyqtSlot
localedir = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'locales')
translate = gettext.translation('gui', localedir, fallback=True)
_ = translate.gettext
class MainWindow(QMainWindow):
def __init__(self, *args):
QMainWindow.__init__(self, *args)
self.setWindowTitle(_('Pyblion'))
# load config
self.config = Config()
self.config.load()
self.move(self.config.mainwindow_x, self.config.mainwindow_y)
self.resize(self.config.mainwindow_witdh, self.config.mainwindow_height)
# Menu Bar
self.open_file_action = QAction(_('Settings'), self)
self.exit_action = QAction(_('Exit'), self)
self.exit_action.setMenuRole(QAction.QuitRole)
file_menu = self.menuBar().addMenu(_('File'))
file_menu.addAction(self.open_file_action)
file_menu.addSeparator()
file_menu.addAction(self.exit_action)
self.about_action = QAction(_('About'), self)
help_menu = self.menuBar().addMenu(_('Help'))
help_menu.addAction(self.about_action)
# Widgets
self.hello_world_label = QLabel(_('Hello World!'))
self.update_text_button = QPushButton(_('Update'))
self.edit_text = QLineEdit()
# Connects
self.update_text_button.clicked.connect(self.update_text)
self.about_action.triggered.connect(self.show_about_dialog)
# Layout
main_widget = QWidget()
self.setCentralWidget(main_widget)
main_layout = QVBoxLayout()
main_widget.setLayout(main_layout)
main_layout.addWidget(self.hello_world_label)
main_layout.addWidget(self.edit_text)
main_layout.addWidget(self.update_text_button)
@pyqtSlot()
def update_text(self):
self.hello_world_label.setText(self.edit_text.text())
@pyqtSlot()
def show_about_dialog(self):
print('akjsdghkjhgasd')
def closeEvent(self, event):
self.config.mainwindow_witdh = self.width()
self.config.mainwindow_height = self.height()
self.config.mainwindow_x = self.x()
self.config.mainwindow_y = self.y()
self.config.save()
super(MainWindow, self).closeEvent(event) | unknown | codeparrot/codeparrot-clean | ||
# coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import compat_str
from ..utils import (
ExtractorError,
int_or_none,
url_or_none,
urlencode_postdata,
)
class HiDiveIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?hidive\.com/stream/(?P<title>[^/]+)/(?P<key>[^/?#&]+)'
# Using X-Forwarded-For results in 403 HTTP error for HLS fragments,
# so disabling geo bypass completely
_GEO_BYPASS = False
_NETRC_MACHINE = 'hidive'
_LOGIN_URL = 'https://www.hidive.com/account/login'
_TESTS = [{
'url': 'https://www.hidive.com/stream/the-comic-artist-and-his-assistants/s01e001',
'info_dict': {
'id': 'the-comic-artist-and-his-assistants/s01e001',
'ext': 'mp4',
'title': 'the-comic-artist-and-his-assistants/s01e001',
'series': 'the-comic-artist-and-his-assistants',
'season_number': 1,
'episode_number': 1,
},
'params': {
'skip_download': True,
},
'skip': 'Requires Authentication',
}]
def _real_initialize(self):
email, password = self._get_login_info()
if email is None:
return
webpage = self._download_webpage(self._LOGIN_URL, None)
form = self._search_regex(
r'(?s)<form[^>]+action="/account/login"[^>]*>(.+?)</form>',
webpage, 'login form')
data = self._hidden_inputs(form)
data.update({
'Email': email,
'Password': password,
})
self._download_webpage(
self._LOGIN_URL, None, 'Logging in', data=urlencode_postdata(data))
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
title, key = mobj.group('title', 'key')
video_id = '%s/%s' % (title, key)
settings = self._download_json(
'https://www.hidive.com/play/settings', video_id,
data=urlencode_postdata({
'Title': title,
'Key': key,
'PlayerId': 'f4f895ce1ca713ba263b91caeb1daa2d08904783',
}))
restriction = settings.get('restrictionReason')
if restriction == 'RegionRestricted':
self.raise_geo_restricted()
if restriction and restriction != 'None':
raise ExtractorError(
'%s said: %s' % (self.IE_NAME, restriction), expected=True)
formats = []
subtitles = {}
for rendition_id, rendition in settings['renditions'].items():
bitrates = rendition.get('bitrates')
if not isinstance(bitrates, dict):
continue
m3u8_url = url_or_none(bitrates.get('hls'))
if not m3u8_url:
continue
formats.extend(self._extract_m3u8_formats(
m3u8_url, video_id, 'mp4', entry_protocol='m3u8_native',
m3u8_id='%s-hls' % rendition_id, fatal=False))
cc_files = rendition.get('ccFiles')
if not isinstance(cc_files, list):
continue
for cc_file in cc_files:
if not isinstance(cc_file, list) or len(cc_file) < 3:
continue
cc_lang = cc_file[0]
cc_url = url_or_none(cc_file[2])
if not isinstance(cc_lang, compat_str) or not cc_url:
continue
subtitles.setdefault(cc_lang, []).append({
'url': cc_url,
})
self._sort_formats(formats)
season_number = int_or_none(self._search_regex(
r's(\d+)', key, 'season number', default=None))
episode_number = int_or_none(self._search_regex(
r'e(\d+)', key, 'episode number', default=None))
return {
'id': video_id,
'title': video_id,
'subtitles': subtitles,
'formats': formats,
'series': title,
'season_number': season_number,
'episode_number': episode_number,
} | unknown | codeparrot/codeparrot-clean | ||
import lru
class ormcache(object):
""" LRU cache decorator for orm methods,
"""
def __init__(self, skiparg=2, size=8192, multi=None, timeout=None):
self.skiparg = skiparg
self.size = size
self.method = None
self.stat_miss = 0
self.stat_hit = 0
self.stat_err = 0
def __call__(self,m):
self.method = m
def lookup(self2, cr, *args):
r = self.lookup(self2, cr, *args)
return r
lookup.clear_cache = self.clear
return lookup
def stat(self):
return "lookup-stats hit=%s miss=%s err=%s ratio=%.1f" % (self.stat_hit,self.stat_miss,self.stat_err, (100*float(self.stat_hit))/(self.stat_miss+self.stat_hit) )
def lru(self, self2):
try:
ormcache = getattr(self2, '_ormcache')
except AttributeError:
ormcache = self2._ormcache = {}
try:
d = ormcache[self.method]
except KeyError:
d = ormcache[self.method] = lru.LRU(self.size)
return d
def lookup(self, self2, cr, *args):
d = self.lru(self2)
key = args[self.skiparg-2:]
try:
r = d[key]
self.stat_hit += 1
return r
except KeyError:
self.stat_miss += 1
value = d[key] = self.method(self2, cr, *args)
return value
except TypeError:
self.stat_err += 1
return self.method(self2, cr, *args)
def clear(self, self2, *args):
""" Remove *args entry from the cache or all keys if *args is undefined
"""
d = self.lru(self2)
if args:
try:
key = args[self.skiparg-2:]
del d[key]
self2.pool._any_cache_cleared = True
except KeyError:
pass
else:
d.clear()
self2.pool._any_cache_cleared = True
class ormcache_multi(ormcache):
def __init__(self, skiparg=2, size=8192, multi=3):
super(ormcache_multi,self).__init__(skiparg,size)
self.multi = multi - 2
def lookup(self, self2, cr, *args):
d = self.lru(self2)
args = list(args)
multi = self.multi
ids = args[multi]
r = {}
miss = []
for i in ids:
args[multi] = i
key = tuple(args[self.skiparg-2:])
try:
r[i] = d[key]
self.stat_hit += 1
except Exception:
self.stat_miss += 1
miss.append(i)
if miss:
args[multi] = miss
r.update(self.method(self2, cr, *args))
for i in miss:
args[multi] = i
key = tuple(args[self.skiparg-2:])
d[key] = r[i]
return r
class dummy_cache(object):
""" Cache decorator replacement to actually do no caching.
"""
def __init__(self, *l, **kw):
pass
def __call__(self, fn):
fn.clear_cache = self.clear
return fn
def clear(self, *l, **kw):
pass
if __name__ == '__main__':
class A():
@ormcache()
def m(self,a,b):
print "A::m(", self,a,b
return 1
@ormcache_multi(multi=3)
def n(self,cr,uid,ids):
print "m", self,cr,uid,ids
return dict([(i,i) for i in ids])
a=A()
r=a.m(1,2)
r=a.m(1,2)
r=a.n("cr",1,[1,2,3,4])
r=a.n("cr",1,[1,2])
print r
for i in a._ormcache:
print a._ormcache[i].d
a.n.clear_cache(a,1,1)
r=a.n("cr",1,[1,2])
print r
r=a.n("cr",1,[1,2])
# For backward compatibility
cache = ormcache
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: | unknown | codeparrot/codeparrot-clean | ||
/* quirc -- QR-code recognition library
* Copyright (C) 2010-2012 Daniel Beer <dlbeer@gmail.com>
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#include <quirc_internal.h>
const struct quirc_version_info quirc_version_db[QUIRC_MAX_VERSION + 1] = {
{0},
{ /* Version 1 */
.data_bytes = 26,
.apat = {0},
.ecc = {
{.bs = 26, .dw = 16, .ns = 1},
{.bs = 26, .dw = 19, .ns = 1},
{.bs = 26, .dw = 9, .ns = 1},
{.bs = 26, .dw = 13, .ns = 1}
}
},
{ /* Version 2 */
.data_bytes = 44,
.apat = {6, 18, 0},
.ecc = {
{.bs = 44, .dw = 28, .ns = 1},
{.bs = 44, .dw = 34, .ns = 1},
{.bs = 44, .dw = 16, .ns = 1},
{.bs = 44, .dw = 22, .ns = 1}
}
},
{ /* Version 3 */
.data_bytes = 70,
.apat = {6, 22, 0},
.ecc = {
{.bs = 70, .dw = 44, .ns = 1},
{.bs = 70, .dw = 55, .ns = 1},
{.bs = 35, .dw = 13, .ns = 2},
{.bs = 35, .dw = 17, .ns = 2}
}
},
{ /* Version 4 */
.data_bytes = 100,
.apat = {6, 26, 0},
.ecc = {
{.bs = 50, .dw = 32, .ns = 2},
{.bs = 100, .dw = 80, .ns = 1},
{.bs = 25, .dw = 9, .ns = 4},
{.bs = 50, .dw = 24, .ns = 2}
}
},
{ /* Version 5 */
.data_bytes = 134,
.apat = {6, 30, 0},
.ecc = {
{.bs = 67, .dw = 43, .ns = 2},
{.bs = 134, .dw = 108, .ns = 1},
{.bs = 33, .dw = 11, .ns = 2},
{.bs = 33, .dw = 15, .ns = 2}
}
},
{ /* Version 6 */
.data_bytes = 172,
.apat = {6, 34, 0},
.ecc = {
{.bs = 43, .dw = 27, .ns = 4},
{.bs = 86, .dw = 68, .ns = 2},
{.bs = 43, .dw = 15, .ns = 4},
{.bs = 43, .dw = 19, .ns = 4}
}
},
{ /* Version 7 */
.data_bytes = 196,
.apat = {6, 22, 38, 0},
.ecc = {
{.bs = 49, .dw = 31, .ns = 4},
{.bs = 98, .dw = 78, .ns = 2},
{.bs = 39, .dw = 13, .ns = 4},
{.bs = 32, .dw = 14, .ns = 2}
}
},
{ /* Version 8 */
.data_bytes = 242,
.apat = {6, 24, 42, 0},
.ecc = {
{.bs = 60, .dw = 38, .ns = 2},
{.bs = 121, .dw = 97, .ns = 2},
{.bs = 40, .dw = 14, .ns = 4},
{.bs = 40, .dw = 18, .ns = 4}
}
},
{ /* Version 9 */
.data_bytes = 292,
.apat = {6, 26, 46, 0},
.ecc = {
{.bs = 58, .dw = 36, .ns = 3},
{.bs = 146, .dw = 116, .ns = 2},
{.bs = 36, .dw = 12, .ns = 4},
{.bs = 36, .dw = 16, .ns = 4}
}
},
{ /* Version 10 */
.data_bytes = 346,
.apat = {6, 28, 50, 0},
.ecc = {
{.bs = 69, .dw = 43, .ns = 4},
{.bs = 86, .dw = 68, .ns = 2},
{.bs = 43, .dw = 15, .ns = 6},
{.bs = 43, .dw = 19, .ns = 6}
}
},
{ /* Version 11 */
.data_bytes = 404,
.apat = {6, 30, 54, 0},
.ecc = {
{.bs = 80, .dw = 50, .ns = 1},
{.bs = 101, .dw = 81, .ns = 4},
{.bs = 36, .dw = 12, .ns = 3},
{.bs = 50, .dw = 22, .ns = 4}
}
},
{ /* Version 12 */
.data_bytes = 466,
.apat = {6, 32, 58, 0},
.ecc = {
{.bs = 58, .dw = 36, .ns = 6},
{.bs = 116, .dw = 92, .ns = 2},
{.bs = 42, .dw = 14, .ns = 7},
{.bs = 46, .dw = 20, .ns = 4}
}
},
{ /* Version 13 */
.data_bytes = 532,
.apat = {6, 34, 62, 0},
.ecc = {
{.bs = 59, .dw = 37, .ns = 8},
{.bs = 133, .dw = 107, .ns = 4},
{.bs = 33, .dw = 11, .ns = 12},
{.bs = 44, .dw = 20, .ns = 8}
}
},
{ /* Version 14 */
.data_bytes = 581,
.apat = {6, 26, 46, 66, 0},
.ecc = {
{.bs = 64, .dw = 40, .ns = 4},
{.bs = 145, .dw = 115, .ns = 3},
{.bs = 36, .dw = 12, .ns = 11},
{.bs = 36, .dw = 16, .ns = 11}
}
},
{ /* Version 15 */
.data_bytes = 655,
.apat = {6, 26, 48, 70, 0},
.ecc = {
{.bs = 65, .dw = 41, .ns = 5},
{.bs = 109, .dw = 87, .ns = 5},
{.bs = 36, .dw = 12, .ns = 11},
{.bs = 54, .dw = 24, .ns = 5}
}
},
{ /* Version 16 */
.data_bytes = 733,
.apat = {6, 26, 50, 74, 0},
.ecc = {
{.bs = 73, .dw = 45, .ns = 7},
{.bs = 122, .dw = 98, .ns = 5},
{.bs = 45, .dw = 15, .ns = 3},
{.bs = 43, .dw = 19, .ns = 15}
}
},
{ /* Version 17 */
.data_bytes = 815,
.apat = {6, 30, 54, 78, 0},
.ecc = {
{.bs = 74, .dw = 46, .ns = 10},
{.bs = 135, .dw = 107, .ns = 1},
{.bs = 42, .dw = 14, .ns = 2},
{.bs = 50, .dw = 22, .ns = 1}
}
},
{ /* Version 18 */
.data_bytes = 901,
.apat = {6, 30, 56, 82, 0},
.ecc = {
{.bs = 69, .dw = 43, .ns = 9},
{.bs = 150, .dw = 120, .ns = 5},
{.bs = 42, .dw = 14, .ns = 2},
{.bs = 50, .dw = 22, .ns = 17}
}
},
{ /* Version 19 */
.data_bytes = 991,
.apat = {6, 30, 58, 86, 0},
.ecc = {
{.bs = 70, .dw = 44, .ns = 3},
{.bs = 141, .dw = 113, .ns = 3},
{.bs = 39, .dw = 13, .ns = 9},
{.bs = 47, .dw = 21, .ns = 17}
}
},
{ /* Version 20 */
.data_bytes = 1085,
.apat = {6, 34, 62, 90, 0},
.ecc = {
{.bs = 67, .dw = 41, .ns = 3},
{.bs = 135, .dw = 107, .ns = 3},
{.bs = 43, .dw = 15, .ns = 15},
{.bs = 54, .dw = 24, .ns = 15}
}
},
{ /* Version 21 */
.data_bytes = 1156,
.apat = {6, 28, 50, 72, 92, 0},
.ecc = {
{.bs = 68, .dw = 42, .ns = 17},
{.bs = 144, .dw = 116, .ns = 4},
{.bs = 46, .dw = 16, .ns = 19},
{.bs = 50, .dw = 22, .ns = 17}
}
},
{ /* Version 22 */
.data_bytes = 1258,
.apat = {6, 26, 50, 74, 98, 0},
.ecc = {
{.bs = 74, .dw = 46, .ns = 17},
{.bs = 139, .dw = 111, .ns = 2},
{.bs = 37, .dw = 13, .ns = 34},
{.bs = 54, .dw = 24, .ns = 7}
}
},
{ /* Version 23 */
.data_bytes = 1364,
.apat = {6, 30, 54, 78, 102, 0},
.ecc = {
{.bs = 75, .dw = 47, .ns = 4},
{.bs = 151, .dw = 121, .ns = 4},
{.bs = 45, .dw = 15, .ns = 16},
{.bs = 54, .dw = 24, .ns = 11}
}
},
{ /* Version 24 */
.data_bytes = 1474,
.apat = {6, 28, 54, 80, 106, 0},
.ecc = {
{.bs = 73, .dw = 45, .ns = 6},
{.bs = 147, .dw = 117, .ns = 6},
{.bs = 46, .dw = 16, .ns = 30},
{.bs = 54, .dw = 24, .ns = 11}
}
},
{ /* Version 25 */
.data_bytes = 1588,
.apat = {6, 32, 58, 84, 110, 0},
.ecc = {
{.bs = 75, .dw = 47, .ns = 8},
{.bs = 132, .dw = 106, .ns = 8},
{.bs = 45, .dw = 15, .ns = 22},
{.bs = 54, .dw = 24, .ns = 7}
}
},
{ /* Version 26 */
.data_bytes = 1706,
.apat = {6, 30, 58, 86, 114, 0},
.ecc = {
{.bs = 74, .dw = 46, .ns = 19},
{.bs = 142, .dw = 114, .ns = 10},
{.bs = 46, .dw = 16, .ns = 33},
{.bs = 50, .dw = 22, .ns = 28}
}
},
{ /* Version 27 */
.data_bytes = 1828,
.apat = {6, 34, 62, 90, 118, 0},
.ecc = {
{.bs = 73, .dw = 45, .ns = 22},
{.bs = 152, .dw = 122, .ns = 8},
{.bs = 45, .dw = 15, .ns = 12},
{.bs = 53, .dw = 23, .ns = 8}
}
},
{ /* Version 28 */
.data_bytes = 1921,
.apat = {6, 26, 50, 74, 98, 122, 0},
.ecc = {
{.bs = 73, .dw = 45, .ns = 3},
{.bs = 147, .dw = 117, .ns = 3},
{.bs = 45, .dw = 15, .ns = 11},
{.bs = 54, .dw = 24, .ns = 4}
}
},
{ /* Version 29 */
.data_bytes = 2051,
.apat = {6, 30, 54, 78, 102, 126, 0},
.ecc = {
{.bs = 73, .dw = 45, .ns = 21},
{.bs = 146, .dw = 116, .ns = 7},
{.bs = 45, .dw = 15, .ns = 19},
{.bs = 53, .dw = 23, .ns = 1}
}
},
{ /* Version 30 */
.data_bytes = 2185,
.apat = {6, 26, 52, 78, 104, 130, 0},
.ecc = {
{.bs = 75, .dw = 47, .ns = 19},
{.bs = 145, .dw = 115, .ns = 5},
{.bs = 45, .dw = 15, .ns = 23},
{.bs = 54, .dw = 24, .ns = 15}
}
},
{ /* Version 31 */
.data_bytes = 2323,
.apat = {6, 30, 56, 82, 108, 134, 0},
.ecc = {
{.bs = 74, .dw = 46, .ns = 2},
{.bs = 145, .dw = 115, .ns = 13},
{.bs = 45, .dw = 15, .ns = 23},
{.bs = 54, .dw = 24, .ns = 42}
}
},
{ /* Version 32 */
.data_bytes = 2465,
.apat = {6, 34, 60, 86, 112, 138, 0},
.ecc = {
{.bs = 74, .dw = 46, .ns = 10},
{.bs = 145, .dw = 115, .ns = 17},
{.bs = 45, .dw = 15, .ns = 19},
{.bs = 54, .dw = 24, .ns = 10}
}
},
{ /* Version 33 */
.data_bytes = 2611,
.apat = {6, 30, 58, 86, 114, 142, 0},
.ecc = {
{.bs = 74, .dw = 46, .ns = 14},
{.bs = 145, .dw = 115, .ns = 17},
{.bs = 45, .dw = 15, .ns = 11},
{.bs = 54, .dw = 24, .ns = 29}
}
},
{ /* Version 34 */
.data_bytes = 2761,
.apat = {6, 34, 62, 90, 118, 146, 0},
.ecc = {
{.bs = 74, .dw = 46, .ns = 14},
{.bs = 145, .dw = 115, .ns = 13},
{.bs = 46, .dw = 16, .ns = 59},
{.bs = 54, .dw = 24, .ns = 44}
}
},
{ /* Version 35 */
.data_bytes = 2876,
.apat = {6, 30, 54, 78, 102, 126, 150},
.ecc = {
{.bs = 75, .dw = 47, .ns = 12},
{.bs = 151, .dw = 121, .ns = 12},
{.bs = 45, .dw = 15, .ns = 22},
{.bs = 54, .dw = 24, .ns = 39}
}
},
{ /* Version 36 */
.data_bytes = 3034,
.apat = {6, 24, 50, 76, 102, 128, 154},
.ecc = {
{.bs = 75, .dw = 47, .ns = 6},
{.bs = 151, .dw = 121, .ns = 6},
{.bs = 45, .dw = 15, .ns = 2},
{.bs = 54, .dw = 24, .ns = 46}
}
},
{ /* Version 37 */
.data_bytes = 3196,
.apat = {6, 28, 54, 80, 106, 132, 158},
.ecc = {
{.bs = 74, .dw = 46, .ns = 29},
{.bs = 152, .dw = 122, .ns = 17},
{.bs = 45, .dw = 15, .ns = 24},
{.bs = 54, .dw = 24, .ns = 49}
}
},
{ /* Version 38 */
.data_bytes = 3362,
.apat = {6, 32, 58, 84, 110, 136, 162},
.ecc = {
{.bs = 74, .dw = 46, .ns = 13},
{.bs = 152, .dw = 122, .ns = 4},
{.bs = 45, .dw = 15, .ns = 42},
{.bs = 54, .dw = 24, .ns = 48}
}
},
{ /* Version 39 */
.data_bytes = 3532,
.apat = {6, 26, 54, 82, 110, 138, 166},
.ecc = {
{.bs = 75, .dw = 47, .ns = 40},
{.bs = 147, .dw = 117, .ns = 20},
{.bs = 45, .dw = 15, .ns = 10},
{.bs = 54, .dw = 24, .ns = 43}
}
},
{ /* Version 40 */
.data_bytes = 3706,
.apat = {6, 30, 58, 86, 114, 142, 170},
.ecc = {
{.bs = 75, .dw = 47, .ns = 18},
{.bs = 148, .dw = 118, .ns = 19},
{.bs = 45, .dw = 15, .ns = 20},
{.bs = 54, .dw = 24, .ns = 34}
}
}
}; | c | github | https://github.com/opencv/opencv | 3rdparty/quirc/src/version_db.c |
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.dev/license
*/
import {FilterFn, FilterFnGenerator, FilterMatch} from './filter.component';
//
// Types & Constants
//
type TokenType =
| 'opening_bracket'
| 'closing_bracket'
| 'chevron_left'
| 'chevron_right'
| 'slash'
| 'space'
| 'text';
interface Token {
type: TokenType;
value: string;
idx: number;
}
const TERMINAL_CHAR = ['[', ']', '<', '>', '/', ' '];
const CHAR_TO_TOKEN: {[key: string]: TokenType} = {
'[': 'opening_bracket',
']': 'closing_bracket',
'<': 'chevron_left',
'>': 'chevron_right',
'/': 'slash',
' ': 'space',
};
interface ParsedValue {
value: string;
idx: number;
}
export interface ParsedFilter {
component?: ParsedValue;
directives: ParsedValue[];
element?: ParsedValue;
}
//
// Helpers
//
function toParserValue(token: Token): ParsedValue {
return {
value: token.value,
idx: token.idx,
};
}
function checkForMatch(filter?: ParsedValue, target?: ParsedValue): FilterMatch | null {
if (!filter || !target) {
return null;
}
const startIdx = target.value.indexOf(filter.value);
if (startIdx > -1) {
const start = startIdx + target.idx;
return {
startIdx: start,
endIdx: start + filter.value.length,
};
}
return null;
}
//
// Lexer/Tokenizer
//
export function tokenizeDirectiveForestFilter(text: string): Token[] {
const tokens: Token[] = [];
let buffer = '';
const attemptToPushToken = (i: number) => {
if (buffer) {
tokens.push({
value: buffer,
type: 'text',
idx: i - buffer.length,
});
buffer = '';
}
};
for (let i = 0; i < text.length; i++) {
const char = text[i];
if (TERMINAL_CHAR.includes(char)) {
attemptToPushToken(i);
tokens.push({
type: CHAR_TO_TOKEN[char],
value: char,
idx: i,
});
} else {
buffer += char;
}
}
attemptToPushToken(text.length);
return tokens;
}
//
// Parser
//
export function parseDirectiveForestFilter(tokens: Token[]): ParsedFilter {
const filter: ParsedFilter = {
directives: [],
};
if (!tokens.length) {
return filter;
}
let tokenIdx = 0;
let token: Token | undefined;
const nextToken = () => tokens[tokenIdx++];
const hasTokens = () => tokenIdx < tokens.length;
const parseComponent = () => {
return token ? toParserValue(token) : undefined;
};
const parseDirectives = () => {
const directives = [];
while (hasTokens()) {
token = nextToken();
if (token.type === 'text') {
directives.push(toParserValue(token));
}
if (token.type === 'closing_bracket') {
break;
}
}
return directives;
};
const parseElement = () => {
while (hasTokens()) {
token = nextToken();
if (token.type === 'text') {
return toParserValue(token);
}
}
return;
};
while (hasTokens()) {
token = nextToken();
switch (token.type) {
case 'opening_bracket':
filter.directives = filter.directives.concat(parseDirectives());
break;
case 'text':
filter.component = parseComponent();
break;
case 'chevron_left':
filter.element = parseElement();
break;
}
}
return filter;
}
//
// `FilterFn` Generator
//
/** Generates a `FilterFn`, that performs token matching, for the directive-forest filter. */
export const directiveForestFilterFnGenerator: FilterFnGenerator = (filter: string): FilterFn => {
const filterTokens = tokenizeDirectiveForestFilter(filter.toLowerCase());
const parsedFilter = parseDirectiveForestFilter(filterTokens);
return (target: string) => {
if (!filter) {
return [];
}
if (!parsedFilter.element && !parsedFilter.component && !parsedFilter.directives.length) {
// Fallback – standard string search.
const match = checkForMatch(
{value: filter.toLowerCase(), idx: 0},
{value: target.toLowerCase(), idx: 0},
);
return match ? [match] : [];
}
const matches = [];
const targetTokens = tokenizeDirectiveForestFilter(target.toLowerCase());
const parsedTarget = parseDirectiveForestFilter(targetTokens);
if (parsedFilter.element) {
const elementMatch = checkForMatch(parsedFilter.element, parsedTarget.element);
// The element cannot have component and/or directive(s).
if (elementMatch) {
return [elementMatch];
}
}
if (parsedFilter.component) {
const componentMatch = checkForMatch(parsedFilter.component, parsedTarget.component);
if (!componentMatch) {
return [];
}
matches.push(componentMatch);
}
if (parsedFilter.directives.length) {
let matchesCount = 0;
for (const targetDir of parsedTarget.directives) {
for (const filterDir of parsedFilter.directives) {
const dirMatch = checkForMatch(filterDir, targetDir);
if (dirMatch) {
matches.push(dirMatch);
matchesCount++;
}
}
}
// Should have full directives match.
if (matchesCount < parsedFilter.directives.length) {
return [];
}
}
return matches;
};
}; | typescript | github | https://github.com/angular/angular | devtools/projects/ng-devtools/src/lib/devtools-tabs/directive-explorer/directive-forest/filter/directive-forest-filter-fn-generator.ts |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is part of the Shiboken Python Bindings Generator project.
#
# Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
#
# Contact: PySide team <contact@pyside.org>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# version 2.1 as published by the Free Software Foundation. Please
# review the following information to ensure the GNU Lesser General
# Public License version 2.1 requirements will be met:
# http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
# #
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
'''Test cases for multiple inheritance'''
import sys
import unittest
from sample import *
class SimpleUseCase(ObjectType, Str):
def __init__(self, name):
ObjectType.__init__(self)
Str.__init__(self, name)
class SimpleUseCaseReverse(Str, ObjectType):
def __init__(self, name):
ObjectType.__init__(self)
Str.__init__(self, name)
class SimpleUseCase2(SimpleUseCase):
def __init__(self, name):
SimpleUseCase.__init__(self, name)
class ComplexUseCase(SimpleUseCase2, Point):
def __init__(self, name):
SimpleUseCase2.__init__(self, name)
Point.__init__(self)
class ComplexUseCaseReverse(Point, SimpleUseCase2):
def __init__(self, name):
SimpleUseCase2.__init__(self, name)
Point.__init__(self)
class MultipleCppDerivedTest(unittest.TestCase):
def testInstanciation(self):
s = SimpleUseCase("Hi")
self.assertEqual(s, "Hi")
s.setObjectName(s)
self.assertEqual(s.objectName(), "Hi")
def testInstanciation2(self):
s = SimpleUseCase2("Hi")
self.assertEqual(s, "Hi")
s.setObjectName(s)
self.assertEqual(s.objectName(), "Hi")
def testComplexInstanciation(self):
c = ComplexUseCase("Hi")
self.assertEqual(c, "Hi")
c.setObjectName(c)
self.assertEqual(c.objectName(), "Hi")
c.setX(2);
self.assertEqual(c.x(), 2)
class MultipleCppDerivedReverseTest(unittest.TestCase):
def testInstanciation(self):
s = SimpleUseCaseReverse("Hi")
self.assertEqual(s, "Hi")
s.setObjectName(s)
self.assertEqual(s.objectName(), "Hi")
def testInstanciation2(self):
s = SimpleUseCase2("Hi")
self.assertEqual(s, "Hi")
s.setObjectName(s)
self.assertEqual(s.objectName(), "Hi")
def testComplexInstanciation(self):
c = ComplexUseCaseReverse("Hi")
c.setObjectName(c)
self.assertEqual(c.objectName(), "Hi")
c.setX(2);
self.assertEqual(c, Point(2, 0))
if __name__ == '__main__':
unittest.main() | unknown | codeparrot/codeparrot-clean | ||
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test configs for constant ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow.compat.v1 as tf
from tensorflow.lite.testing.zip_test_utils import create_tensor_data
from tensorflow.lite.testing.zip_test_utils import make_zip_of_tests
from tensorflow.lite.testing.zip_test_utils import register_make_test_function
from tensorflow.lite.testing.zip_test_utils import TF_TYPE_INFO
# This function tests various TensorFLow functions that generates Const op,
# including `tf.ones`, `tf.zeros` and random functions.
@register_make_test_function()
def make_constant_tests(options):
"""Make a set of tests to do constant ops."""
test_parameters = [{
"dtype": [tf.float32, tf.int32],
"input_shape": [[], [1], [2], [1, 1, 1, 1], [2, 2, 2, 2]],
"constant_is_also_output": [True, False],
# This is a regression test for a bug where Toco rejects models with
# unread inputs.
"has_unread_input": [True, False],
}]
def build_graph(parameters):
"""Build a constant graph given `parameters`."""
dummy_input = tf.compat.v1.placeholder(
dtype=parameters["dtype"],
name="input1",
shape=parameters["input_shape"])
constant = tf.constant(
create_tensor_data(parameters["dtype"], parameters["input_shape"]))
outputs = [tf.maximum(dummy_input, constant)]
if parameters["constant_is_also_output"]:
outputs.append(constant)
inputs = [dummy_input]
if parameters["has_unread_input"]:
unread_input = tf.compat.v1.placeholder(
dtype=parameters["dtype"],
name="unread_input",
shape=parameters["input_shape"])
inputs.append(unread_input)
return inputs, outputs
def build_inputs(parameters, sess, inputs, outputs):
dummy_input = np.zeros(
parameters["input_shape"], dtype=TF_TYPE_INFO[parameters["dtype"]][0])
return [dummy_input], sess.run(outputs, feed_dict={inputs[0]: dummy_input})
make_zip_of_tests(options, test_parameters, build_graph, build_inputs) | unknown | codeparrot/codeparrot-clean | ||
# Copyright 1999 by Jeffrey Chang. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""Index.py
This module provides a way to create indexes to text files.
Classes:
Index Dictionary-like class used to store index information.
_ShelveIndex An Index class based on the shelve module.
_InMemoryIndex An in-memory Index class.
"""
import os
import array
import shelve
try:
import cPickle as pickle # Only available under Python 2
except ImportError:
import pickle # Python 3
class _ShelveIndex(dict):
"""An index file wrapped around shelve.
"""
# Without a good dbm module installed, this is pretty slow and
# generates large files. When generating an index on a FASTA-
# formatted file with 82000 sequences (37Mb), the
# index 'dat' file is 42Mb and 'dir' file is 8Mb.
__version = 2
__version_key = '__version'
def __init__(self, indexname, truncate=None):
dict.__init__(self)
try:
if truncate:
# In python 1.52 and before, dumbdbm (under shelve)
# doesn't clear the old database.
files = [indexname + '.dir',
indexname + '.dat',
indexname + '.bak'
]
for file in files:
if os.path.exists(file):
os.unlink(file)
raise Exception("open a new shelf")
self.data = shelve.open(indexname, flag='r')
except Exception: # TODO: Which exception?
# No database exists.
self.data = shelve.open(indexname, flag='n')
self.data[self.__version_key] = self.__version
else:
# Check to make sure the database is the correct version.
version = self.data.get(self.__version_key)
if version is None:
raise IOError("Unrecognized index format")
elif version != self.__version:
raise IOError("Version %s doesn't match my version %s"
% (version, self.__version))
def __del__(self):
if 'data' in self.__dict__:
self.data.close()
class _InMemoryIndex(dict):
"""This creates an in-memory index file.
"""
# File Format:
# version
# key value
# [...]
__version = 3
__version_key = '__version'
def __init__(self, indexname, truncate=None):
self._indexname = indexname
dict.__init__(self)
self.__changed = 0 # the index hasn't changed
# Remove the database if truncate is true.
if truncate and os.path.exists(indexname):
os.unlink(indexname)
self.__changed = 1
# Load the database if it exists
if os.path.exists(indexname):
with open(indexname) as handle:
version = self._toobj(handle.readline().rstrip())
if version != self.__version:
raise IOError("Version %s doesn't match my version %s"
% (version, self.__version))
for line in handle:
key, value = line.split()
key, value = self._toobj(key), self._toobj(value)
self[key] = value
self.__changed = 0
def update(self, dict):
self.__changed = 1
dict.update(self, dict)
def __setitem__(self, key, value):
self.__changed = 1
dict.__setitem__(self, key, value)
def __delitem__(self, key):
self.__changed = 1
dict.__delitem__(self, key)
def clear(self):
self.__changed = 1
dict.clear(self)
def __del__(self):
if self.__changed:
with open(self._indexname, 'w') as handle:
handle.write("%s\n" % self._tostr(self.__version))
for key, value in self.items():
handle.write("%s %s\n" %
(self._tostr(key), self._tostr(value)))
def _tostr(self, obj):
# I need a representation of the object that's saveable to
# a file that uses whitespace as delimiters. Thus, I'm
# going to pickle the object, and then convert each character of
# the string to its ASCII integer value. Then, I'm going to convert
# the integers into strings and join them together with commas.
# It's not the most efficient way of storing things, but it's
# relatively fast.
s = pickle.dumps(obj)
intlist = array.array('b', s)
return ','.join(str(i) for i in intlist)
def _toobj(self, str):
intlist = [int(i) for i in str.split(',')]
intlist = array.array('b', intlist)
return pickle.loads(''.join(chr(i) for i in intlist))
Index = _InMemoryIndex | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'CourseOverview.cert_html_view_enabled'
# The default value for the cert_html_view_eanbled column is False.
# However, for courses in the table for which cert_html_view_enabled
# should be True, this would be invalid. So, we must clear the
# table before adding the new column.
db.clear_table('course_overviews_courseoverview')
db.add_column('course_overviews_courseoverview', 'cert_html_view_enabled',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
def backwards(self, orm):
# Deleting field 'CourseOverview.cert_html_view_enabled'
db.delete_column('course_overviews_courseoverview', 'cert_html_view_enabled')
models = {
'course_overviews.courseoverview': {
'Meta': {'object_name': 'CourseOverview'},
'_location': ('xmodule_django.models.UsageKeyField', [], {'max_length': '255'}),
'_pre_requisite_courses_json': ('django.db.models.fields.TextField', [], {}),
'advertised_start': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'cert_html_view_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'cert_name_long': ('django.db.models.fields.TextField', [], {}),
'cert_name_short': ('django.db.models.fields.TextField', [], {}),
'certificates_display_behavior': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'certificates_show_before_end': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'course_image_url': ('django.db.models.fields.TextField', [], {}),
'days_early_for_beta': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'display_name': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'display_number_with_default': ('django.db.models.fields.TextField', [], {}),
'display_org_with_default': ('django.db.models.fields.TextField', [], {}),
'end': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'end_of_course_survey_url': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'facebook_url': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'has_any_active_web_certificate': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'primary_key': 'True', 'db_index': 'True'}),
'lowest_passing_grade': ('django.db.models.fields.DecimalField', [], {'max_digits': '5', 'decimal_places': '2'}),
'mobile_available': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'social_sharing_url': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'start': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'visible_to_staff_only': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
}
}
complete_apps = ['course_overviews'] | unknown | codeparrot/codeparrot-clean | ||
-- float4 check
CREATE TABLE float4tmp (a float4);
\copy float4tmp from 'data/float4.data'
SET enable_seqscan=on;
SELECT count(*) FROM float4tmp WHERE a < -179.0;
SELECT count(*) FROM float4tmp WHERE a <= -179.0;
SELECT count(*) FROM float4tmp WHERE a = -179.0;
SELECT count(*) FROM float4tmp WHERE a >= -179.0;
SELECT count(*) FROM float4tmp WHERE a > -179.0;
SELECT a, a <-> '-179.0' FROM float4tmp ORDER BY a <-> '-179.0' LIMIT 3;
CREATE INDEX float4idx ON float4tmp USING gist ( a );
SET enable_seqscan=off;
SELECT count(*) FROM float4tmp WHERE a < -179.0::float4;
SELECT count(*) FROM float4tmp WHERE a <= -179.0::float4;
SELECT count(*) FROM float4tmp WHERE a = -179.0::float4;
SELECT count(*) FROM float4tmp WHERE a >= -179.0::float4;
SELECT count(*) FROM float4tmp WHERE a > -179.0::float4;
EXPLAIN (COSTS OFF)
SELECT a, a <-> '-179.0' FROM float4tmp ORDER BY a <-> '-179.0' LIMIT 3;
SELECT a, a <-> '-179.0' FROM float4tmp ORDER BY a <-> '-179.0' LIMIT 3; | sql | github | https://github.com/postgres/postgres | contrib/btree_gist/sql/float4.sql |
// This file was automatically generated from shared-mutable-state-and-concurrency.md by Knit tool. Do not edit.
package kotlinx.coroutines.guide.exampleSync04
import kotlinx.coroutines.*
import kotlin.system.*
suspend fun massiveRun(action: suspend () -> Unit) {
val n = 100 // number of coroutines to launch
val k = 1000 // times an action is repeated by each coroutine
val time = measureTimeMillis {
coroutineScope { // scope for coroutines
repeat(n) {
launch {
repeat(k) { action() }
}
}
}
}
println("Completed ${n * k} actions in $time ms")
}
val counterContext = newSingleThreadContext("CounterContext")
var counter = 0
fun main() = runBlocking {
withContext(Dispatchers.Default) {
massiveRun {
// confine each increment to a single-threaded context
withContext(counterContext) {
counter++
}
}
}
println("Counter = $counter")
} | kotlin | github | https://github.com/Kotlin/kotlinx.coroutines | kotlinx-coroutines-core/jvm/test/guide/example-sync-04.kt |
/*
* Copyright (C) 2015 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect.testing.testers;
import static com.google.common.collect.testing.features.CollectionFeature.KNOWN_ORDER;
import static com.google.common.collect.testing.features.CollectionSize.ZERO;
import static com.google.common.collect.testing.features.MapFeature.ALLOWS_NULL_KEYS;
import static com.google.common.collect.testing.features.MapFeature.ALLOWS_NULL_VALUES;
import static java.util.Arrays.asList;
import com.google.common.annotations.GwtCompatible;
import com.google.common.collect.testing.AbstractMapTester;
import com.google.common.collect.testing.Helpers;
import com.google.common.collect.testing.features.CollectionFeature;
import com.google.common.collect.testing.features.CollectionSize;
import com.google.common.collect.testing.features.MapFeature;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.junit.Ignore;
/**
* A generic JUnit test which tests {@link Map#forEach}. Can't be invoked directly; please see
* {@link com.google.common.collect.testing.MapTestSuiteBuilder}.
*
* @author Louis Wasserman
*/
@GwtCompatible
@Ignore("test runners must not instantiate and run this directly, only via suites we build")
// @Ignore affects the Android test runner, which respects JUnit 4 annotations on JUnit 3 tests.
@SuppressWarnings("JUnit4ClassUsedInJUnit3")
@IgnoreJRERequirement // We opt into library desugaring for our tests.
public class MapForEachTester<K, V> extends AbstractMapTester<K, V> {
@CollectionFeature.Require(KNOWN_ORDER)
public void testForEachKnownOrder() {
List<Entry<K, V>> entries = new ArrayList<>();
getMap().forEach((k, v) -> entries.add(entry(k, v)));
assertEquals(getOrderedElements(), entries);
}
@CollectionFeature.Require(absent = KNOWN_ORDER)
public void testForEachUnknownOrder() {
List<Entry<K, V>> entries = new ArrayList<>();
getMap().forEach((k, v) -> entries.add(entry(k, v)));
Helpers.assertEqualIgnoringOrder(getSampleEntries(), entries);
}
@MapFeature.Require(ALLOWS_NULL_KEYS)
@CollectionSize.Require(absent = ZERO)
public void testForEach_nullKeys() {
initMapWithNullKey();
List<Entry<K, V>> expectedEntries = asList(createArrayWithNullKey());
List<Entry<K, V>> entries = new ArrayList<>();
getMap().forEach((k, v) -> entries.add(entry(k, v)));
Helpers.assertEqualIgnoringOrder(expectedEntries, entries);
}
@MapFeature.Require(ALLOWS_NULL_VALUES)
@CollectionSize.Require(absent = ZERO)
public void testForEach_nullValues() {
initMapWithNullValue();
List<Entry<K, V>> expectedEntries = asList(createArrayWithNullValue());
List<Entry<K, V>> entries = new ArrayList<>();
getMap().forEach((k, v) -> entries.add(entry(k, v)));
Helpers.assertEqualIgnoringOrder(expectedEntries, entries);
}
} | java | github | https://github.com/google/guava | android/guava-testlib/src/com/google/common/collect/testing/testers/MapForEachTester.java |
# written in python 3.6.1
#-*- coding: utf-8 -*-
from urllib.request import urlopen
import json
import string
import re
from bs4 import BeautifulSoup
import logging
import time
FILE_PATH = "./boxofficemojo.com/movie_data.txt"
LOG_PATH = "./boxofficemojo.com/scraping.log"
logging.basicConfig(filename=LOG_PATH,level=logging.DEBUG)
Keys = ["Name", "URL", "Genre","Runtime", "Rating", "MovieRanking"
, "PercentageofTotalGross", "WidestRelease", "CloseDate", "InRelease", "TotalGross"
, "Distributor", "Budget", "Domestic_Gross", "Domestic_Percentage"
, "Foreign_Gross", "Foreign_Percentage", "Worldwide_Gross", "OpeningWeekend"
, "Countryclicktoviewweekendbreakdown", "Dist", "ReleaseDate"
, "OpeningWknd", "ofTotal", "TotalGross", "AsOf"]
def add_empty_data(arrData, count):
for i in range(0,count):
arrData.append(" ")
return arrData
def remove_special_chars(dictData):
newDict= {}
for key in dictData:
new_key= re.sub(r'\W+', '', key)
newDict[new_key] = dictData[key]
return newDict
def save_to_json(filePath, dictData, countriesData=None):
dictData = remove_special_chars(dictData)
countriesData = remove_special_chars(countriesData)
if countriesData:
merged = dict(dictData)
merged.update(countriesData)
dictData = merged
with open(filePath, "a") as outfile:
json.dump(dictData, outfile, ensure_ascii=False)
def write_header(filePath):
# Write a header
text_file = open(filePath, "ab")
for header in Keys:
text_file.write((header + u"|").encode('utf-8'))
text_file.write("\n".encode('utf-8'))
text_file.close()
def save_to_file(filePath, dictData, countriesData=None):
dictData = remove_special_chars(dictData)
if countriesData:
countriesData = remove_special_chars(countriesData)
if countriesData:
merged = dict(dictData)
merged.update(countriesData)
dictData = merged
Arranged= []
add_empty_data(Arranged, 50)
text_file = open(filePath, "ab")
for key, value in dictData.items():
for i ,k in enumerate(Keys):
if key == k:
Arranged[i]= value
for data in Arranged:
text_file.write((data + u"|").encode('utf-8'))
text_file.write("\n".encode('utf-8'))
text_file.close()
def get_total_lifetime_grosses(link, arrData):
url = "http://www.boxofficemojo.com"+ link
page = urlopen(url)
soup = BeautifulSoup(page, "lxml")
# Assume that domestic info is from USA
arrData['Countryclicktoviewweekendbreakdown']= "USA"
#print(main_tbl)
tables = soup.find_all('table', attrs={'border': '0' , 'cellspacing':'0', 'cellpadding':'0' , 'width':'100%'})
#print( len(tables))
#td_count = 9
if len(tables) == 4:
#print(tables[3]) # Total lifetime grosses
mp_boxes= tables[3].find_all("div", {"class", "mp_box_tab"})
a= len(mp_boxes)
for box in mp_boxes:
if(box.text == "Total Lifetime Grosses"):
div_content= box.findNext('div')
trs = div_content.find_all('tr')
for tr in trs:
tds = tr.find_all('td')
if len(tds) == 3:
if tds[0].text.strip() == "Domestic:":
arrData["Total Gross"] = tds[1].text.strip()
arrData["% ofTotal"] = tds[2].text.strip()
arrData[tds[0].text.strip()+"_Gross"] = tds[1].text.strip()
arrData[tds[0].text.strip()+"_Percentage"] = tds[2].text.strip()
if(box.text == "Domestic Summary"):
div_content = box.findNext('div')
DS_tables = div_content.find_all('table', attrs = { 'border': '0' , 'cellspacing':'0', 'cellpadding':'0'})
for DS_table in DS_tables:
DS_trs = DS_table.find_all('tr')
for DS_tr in DS_trs:
DS_tr_title = DS_tr.td.text.strip()
if(DS_tr_title == "Opening\xa0Weekend:") or (DS_tr_title == "Opening Weekend:"):
DS_tr_content = DS_tr.td.findNext('td')
if DS_tr_content:
arrData["Opening Weekend"] = DS_tr_content.text.strip()
arrData["OpeningWknd"] = DS_tr_content.text.strip()
elif "(#" in DS_tr_title:
arrData['Movie Ranking'] = DS_tr_title
elif "%\xa0of\xa0Total\xa0Gross" in DS_tr_title or "% of Total Gross" in DS_tr_title:
DS_tr_content = DS_tr.td.findNext('td')
if DS_tr_content:
arrData['Percentage of Total Gross'] = DS_tr_content.text.strip()
elif DS_tr_title == "Widest\xa0Release:" or DS_tr_title == "Widest Release:":
DS_tr_content = DS_tr.td.findNext('td')
if DS_tr_content:
arrData['Widest Release'] = DS_tr_content.text.strip() # 14.
elif DS_tr_title == "Close\xa0Date:" or DS_tr_title == "Close Date:":
DS_tr_content = DS_tr.td.findNext('td')
if DS_tr_content:
arrData['Close Date'] = DS_tr_content.text.strip() # 15.
elif DS_tr_title == "In\xa0Release:" or DS_tr_title == "In Release:":
DS_tr_content = DS_tr.td.findNext('td')
if DS_tr_content:
arrData['In Release'] = DS_tr_content.text.strip() # 15.
if(box.text == "The Players"):
#print(box.findNext('div'))
pass
return arrData
def get_movie_foreign(link, arrData):
try:
eachCountry = {}
ColumnHeaders= []
url = "http://www.boxofficemojo.com"+ link + "&page=intl"
page = urlopen(url)
soup = BeautifulSoup(page, "lxml")
contents = soup.find('table', attrs={'border': '3' , 'cellspacing':'0', 'cellpadding':'5', 'align':'center', 'style':'margin-top: 5px;'})
if len(contents) == 1:
#print(contents)
intl_table = contents.tr.td.table
if intl_table:
trs = intl_table.find_all("tr")
if len(trs) == 3:
#print ("no data")
temp= 0
else:
for row,tr in enumerate(trs):
if row == 0:
tds= tr.find_all("td") # get each header's text
for td in tds:
header= td.text.strip()
if "/" in header:
divided_header = header.split('/')
ColumnHeaders.append(divided_header[0])
ColumnHeaders.append(divided_header[1])
else:
ColumnHeaders.append(td.text.strip())
if(row < 3): # don't save unncessary data
continue
tds= tr.find_all("td")
for column, td in enumerate(tds):
# 11. Country, 12.Dist, 13. Release Date, 14.OW, 15.% of Total, 16.Total gross, 17. as of
eachCountry[ColumnHeaders[column]] = td.text.strip()
save_to_file(FILE_PATH, arrData, eachCountry)
#save_to_json(FILE_PATH, arrData, eachCountry)
eachCountry.clear()
return arrData
except Exception as e:
logging.exception(e)
return arrData
def get_movie_detail(movies_list, link, arrData):
if link not in movies_list:
movies_list.append(link)
url = "http://www.boxofficemojo.com"+ link # 1. URL
page = urlopen(url)
soup = BeautifulSoup(page, "lxml")
contents= soup.find('table', attrs={'border': '0' , 'cellspacing':'1', 'cellpadding':'4' , 'bgcolor':'#dcdcdc', 'width':'95%'})
tabledata = contents.find_all("td")
name_table = soup.find('table', attrs={'border': '0' , 'cellspacing':'0', 'cellpadding':'0' , 'width':'100%', 'style':'padding-top: 5px;'})
name = name_table.font.b.getText() # 0. Name
# 2. Distributor, 3. Release Date, 4. Genre, 5. Runtime, 6. Rating, 7. Budget, 8. TotalGross
arrData['Name'] = name
arrData['URL'] = url
if len(tabledata) == 6:
Distributor = tabledata[0].b.getText()
ReleaseDate = tabledata[1].b.getText()
Genre = tabledata[2].b.getText()
Runtime = tabledata[3].b.getText()
Rating = tabledata[4].b.getText()
Budget = tabledata[5].b.getText()
arrData['Distributor'] = Distributor
arrData['ReleaseDate'] = ReleaseDate
arrData['Genre'] = Genre
arrData['Runtime'] = Runtime
arrData['Rating'] = Rating
arrData['Budget'] = Budget
#arrData.extend([name , url , Distributor, ReleaseDate,Genre ,Runtime , Rating,Budget])
#add_empty_data(arrData, 1) # match gap for missing column
elif len(tabledata) == 7:
TotalGross = tabledata[0].b.getText()
Distributor = tabledata[1].b.getText()
ReleaseDate = tabledata[2].b.getText()
Genre = tabledata[3].b.getText()
Runtime = tabledata[4].b.getText()
Rating = tabledata[5].b.getText()
Budget = tabledata[6].b.getText()
arrData['TotalGross'] = TotalGross
arrData['Distributor'] = Distributor
arrData['ReleaseDate'] = ReleaseDate
arrData['Genre'] = Genre
arrData['Runtime'] = Runtime
arrData['Rating'] = Rating
arrData['Budget'] = Budget
#arrData.extend([ name , url , Distributor, ReleaseDate,Genre ,Runtime , Rating,Budget ,TotalGross])
#print (result)
#print contents2[0]
return arrData
def get_all_movies():
# Alphabet loop for how movies are indexed including
# movies that start with a special character or number
index = ["NUM"] + list(string.ascii_uppercase)
# List of movie urls
movies_list = []
# dict data
arrData = {}
startTime = time.time()
lapTime= 0.0
# if you want to jump directly to somewhere (Set None to be not skipped)
JumpTo = 'S'
IsJumpTarget = False
JumpToPage = 8
write_header(FILE_PATH)
logging.debug("running...start at : " + str(time.time()))
# Loop through the pages for each letter
for letter_idx, letter in enumerate(index):
if JumpTo:
indexOfTargetLetter = index.index(JumpTo)
if letter_idx < indexOfTargetLetter:
logging.debug("skip this letter")
IsJumpTarget= False
continue
elif letter_idx == indexOfTargetLetter:
IsJumpTarget= True
url = ("http://www.boxofficemojo.com/movies/alphabetical.htm?letter=" + letter)
page1 = urlopen(url)
soup1 = BeautifulSoup(page1, "lxml")
navi = soup1.find('div', attrs={"class" : "alpha-nav-holder"})
bs= navi.font.find_all('b')
count_bs= len(bs)
logging.debug("pages count : " + str(count_bs))
if letter == "NUM":
count_bs = 1
# Loop through the pages within each letter
for num in range(1, count_bs+1):
logging.debug("begin to scrap letter : " + letter + ", page : " + str(num))
if JumpToPage:
if num < JumpToPage and IsJumpTarget == True: # do not jump this page, if it's not target letter
logging.debug("skip this page")
continue
url = ("http://www.boxofficemojo.com/movies/alphabetical.htm?"
"letter=" + letter + "&page=" + str(num))
try:
page = urlopen(url)
soup = BeautifulSoup(page, "lxml")
rows = soup.find(id="body").find("table").find("table").find_all(
"table")[1].find_all("tr")
# skip index row
if len(rows) > 1:
counter = 1
for row in rows:
trackingStartTime= time.time()
# skip index row
if counter > 1:
link = row.td.font.a['href']
arrData = get_movie_detail(movies_list, link, arrData)
arrData = get_movie_foreign(link, arrData)
arrData = get_total_lifetime_grosses(link, arrData)
save_to_file(FILE_PATH, arrData)
arrData.clear()
lapTime= time.time() - trackingStartTime
logging.debug("each movie's lapTime : " + str(lapTime))
counter += 1
except Exception as e:
logging.exception(e)
TotalElaspedTime= (time.time() - startTime)
logging.debug('done.' + str(TotalElaspedTime))
get_all_movies() | unknown | codeparrot/codeparrot-clean | ||
"""WeChatTicket URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from WeChatTicket import settings
from wechat.views import CustomWeChatView
from WeChatTicket.views import StaticFileView
from WeChatTicket import search
urlpatterns = [
url(r'^wechat/?$', CustomWeChatView.as_view()),
url(r'^admin/', admin.site.urls),
url(r'^api/u/', include('userpage.urls')),
url(r'^api/a/', include('adminpage.urls')),
url(r'^search_form$',search.search_form),
url(r'^search/$',search.search),
url(r'^', StaticFileView.as_view()),
#url(r'^media/(?P<path>.*)$','django.views.static.serve',{'document_root':settings.MEDIA_ROOT}),
] | unknown | codeparrot/codeparrot-clean | ||
# This file is part of OpenHatch.
# Copyright (C) 2009 OpenHatch, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from south.db import db
from django.db import models
from mysite.search.models import *
class Migration:
def forwards(self, orm):
# Adding field 'Project.icon'
db.add_column('search_project', 'icon', orm['search.project:icon'])
# Adding field 'Project.date_icon_was_fetched_from_ohloh'
db.add_column('search_project', 'date_icon_was_fetched_from_ohloh', orm['search.project:date_icon_was_fetched_from_ohloh'])
# Changing field 'Bug.project'
# (to signature: django.db.models.fields.related.ForeignKey(to=orm['search.Project']))
db.alter_column('search_bug', 'project_id', orm['search.bug:project'])
# Changing field 'Bug.good_for_newcomers'
# (to signature: django.db.models.fields.BooleanField(default=False, blank=True))
db.alter_column('search_bug', 'good_for_newcomers', orm['search.bug:good_for_newcomers'])
def backwards(self, orm):
# Deleting field 'Project.icon'
db.delete_column('search_project', 'icon')
# Deleting field 'Project.date_icon_was_fetched_from_ohloh'
db.delete_column('search_project', 'date_icon_was_fetched_from_ohloh')
# Changing field 'Bug.project'
# (to signature: models.ForeignKey(orm['search.Project']))
db.alter_column('search_bug', 'project_id', orm['search.bug:project'])
# Changing field 'Bug.good_for_newcomers'
# (to signature: models.BooleanField(default=False))
db.alter_column('search_bug', 'good_for_newcomers', orm['search.bug:good_for_newcomers'])
models = {
'search.bug': {
'canonical_bug_link': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'date_reported': ('django.db.models.fields.DateTimeField', [], {}),
'description': ('django.db.models.fields.TextField', [], {}),
'good_for_newcomers': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'importance': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'last_polled': ('django.db.models.fields.DateTimeField', [], {}),
'last_touched': ('django.db.models.fields.DateTimeField', [], {}),
'people_involved': ('django.db.models.fields.IntegerField', [], {}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['search.Project']"}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'submitter_realname': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'submitter_username': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'search.project': {
'date_icon_was_fetched_from_ohloh': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
'icon': ('django.db.models.fields.files.ImageField', [], {'default': 'None', 'max_length': '100', 'null': 'True'}),
'icon_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'})
}
}
complete_apps = ['search'] | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2013-2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# from ssloop
# https://github.com/clowwindy/ssloop
from __future__ import absolute_import, division, print_function, \
with_statement
import os
import time
import socket
import select
import errno
import logging
from collections import defaultdict
from shadowsocks import shell
__all__ = ['EventLoop', 'POLL_NULL', 'POLL_IN', 'POLL_OUT', 'POLL_ERR',
'POLL_HUP', 'POLL_NVAL', 'EVENT_NAMES']
POLL_NULL = 0x00
POLL_IN = 0x01
POLL_OUT = 0x04
POLL_ERR = 0x08
POLL_HUP = 0x10
POLL_NVAL = 0x20
EVENT_NAMES = {
POLL_NULL: 'POLL_NULL',
POLL_IN: 'POLL_IN',
POLL_OUT: 'POLL_OUT',
POLL_ERR: 'POLL_ERR',
POLL_HUP: 'POLL_HUP',
POLL_NVAL: 'POLL_NVAL',
}
# we check timeouts every TIMEOUT_PRECISION seconds
TIMEOUT_PRECISION = 2
class KqueueLoop(object):
MAX_EVENTS = 1024
def __init__(self):
self._kqueue = select.kqueue()
self._fds = {}
def _control(self, fd, mode, flags):
events = []
if mode & POLL_IN:
events.append(select.kevent(fd, select.KQ_FILTER_READ, flags))
if mode & POLL_OUT:
events.append(select.kevent(fd, select.KQ_FILTER_WRITE, flags))
for e in events:
self._kqueue.control([e], 0)
def poll(self, timeout):
if timeout < 0:
timeout = None # kqueue behaviour
events = self._kqueue.control(None, KqueueLoop.MAX_EVENTS, timeout)
results = defaultdict(lambda: POLL_NULL)
for e in events:
fd = e.ident
if e.filter == select.KQ_FILTER_READ:
results[fd] |= POLL_IN
elif e.filter == select.KQ_FILTER_WRITE:
results[fd] |= POLL_OUT
return results.items()
def register(self, fd, mode):
self._fds[fd] = mode
self._control(fd, mode, select.KQ_EV_ADD)
def unregister(self, fd):
self._control(fd, self._fds[fd], select.KQ_EV_DELETE)
del self._fds[fd]
def modify(self, fd, mode):
self.unregister(fd)
self.register(fd, mode)
def close(self):
self._kqueue.close()
class SelectLoop(object):
def __init__(self):
self._r_list = set()
self._w_list = set()
self._x_list = set()
def poll(self, timeout):
r, w, x = select.select(self._r_list, self._w_list, self._x_list,
timeout)
results = defaultdict(lambda: POLL_NULL)
for p in [(r, POLL_IN), (w, POLL_OUT), (x, POLL_ERR)]:
for fd in p[0]:
results[fd] |= p[1]
return results.items()
def register(self, fd, mode):
if mode & POLL_IN:
self._r_list.add(fd)
if mode & POLL_OUT:
self._w_list.add(fd)
if mode & POLL_ERR:
self._x_list.add(fd)
def unregister(self, fd):
if fd in self._r_list:
self._r_list.remove(fd)
if fd in self._w_list:
self._w_list.remove(fd)
if fd in self._x_list:
self._x_list.remove(fd)
def modify(self, fd, mode):
self.unregister(fd)
self.register(fd, mode)
def close(self):
pass
class EventLoop(object):
def __init__(self):
if hasattr(select, 'epoll'):
self._impl = select.epoll()
model = 'epoll'
elif hasattr(select, 'kqueue'):
self._impl = KqueueLoop()
model = 'kqueue'
elif hasattr(select, 'select'):
self._impl = SelectLoop()
model = 'select'
else:
raise Exception('can not find any available functions in select '
'package')
self._fdmap = {} # (f, handler)
self._last_time = time.time()
self._periodic_callbacks = []
self._stopping = False
logging.debug('using event model: %s', model)
def poll(self, timeout=None):
events = self._impl.poll(timeout)
return [(self._fdmap[fd][0], fd, event) for fd, event in events]
def add(self, f, mode, handler):
fd = f.fileno()
self._fdmap[fd] = (f, handler)
self._impl.register(fd, mode)
def remove(self, f):
fd = f.fileno()
del self._fdmap[fd]
self._impl.unregister(fd)
def removefd(self, fd):
del self._fdmap[fd]
self._impl.unregister(fd)
def add_periodic(self, callback):
self._periodic_callbacks.append(callback)
def remove_periodic(self, callback):
self._periodic_callbacks.remove(callback)
def modify(self, f, mode):
fd = f.fileno()
self._impl.modify(fd, mode)
def stop(self):
self._stopping = True
def run(self):
events = []
while not self._stopping:
asap = False
try:
events = self.poll(TIMEOUT_PRECISION)
except (OSError, IOError) as e:
if errno_from_exception(e) in (errno.EPIPE, errno.EINTR):
# EPIPE: Happens when the client closes the connection
# EINTR: Happens when received a signal
# handles them as soon as possible
asap = True
logging.debug('poll:%s', e)
else:
logging.error('poll:%s', e)
import traceback
traceback.print_exc()
continue
handle = False
for sock, fd, event in events:
handler = self._fdmap.get(fd, None)
if handler is not None:
handler = handler[1]
try:
handle = handler.handle_event(sock, fd, event) or handle
except (OSError, IOError) as e:
shell.print_exception(e)
now = time.time()
if asap or now - self._last_time >= TIMEOUT_PRECISION:
for callback in self._periodic_callbacks:
callback()
self._last_time = now
if events and not handle:
time.sleep(0.001)
def __del__(self):
self._impl.close()
# from tornado
def errno_from_exception(e):
"""Provides the errno from an Exception object.
There are cases that the errno attribute was not set so we pull
the errno out of the args but if someone instatiates an Exception
without any args you will get a tuple error. So this function
abstracts all that behavior to give you a safe way to get the
errno.
"""
if hasattr(e, 'errno'):
return e.errno
elif e.args:
return e.args[0]
else:
return None
# from tornado
def get_sock_error(sock):
error_number = sock.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR)
return socket.error(error_number, os.strerror(error_number)) | unknown | codeparrot/codeparrot-clean | ||
module.exports = function b() {
require("./a");
return "This is c";
}; | javascript | github | https://github.com/webpack/webpack | test/fixtures/c.js |
from __future__ import absolute_import
from django.utils.translation import ugettext_lazy as _
from acls.api import class_permissions
#from agencies.classes import AgencyElement
from agencies.models import Agency
from common.utils import encapsulate
from navigation.api import register_top_menu, register_model_list_columns
from navigation.classes import Link
from .links import (link_circuit_create, link_circuit_delete, link_circuit_edit,
link_circuit_view, link_agency_circuit_list, link_equipment_create,
link_equipment_delete, link_equipment_edit, link_equipment_view, link_agency_equipment_list)
from .models import Circuit, Equipment
from .permissions import (PERMISSION_CIRCUIT_CREATE, PERMISSION_CIRCUIT_DELETE,
PERMISSION_CIRCUIT_EDIT, PERMISSION_CIRCUIT_VIEW, PERMISSION_EQUIPMENT_CREATE,
PERMISSION_EQUIPMENT_DELETE, PERMISSION_EQUIPMENT_EDIT, PERMISSION_EQUIPMENT_VIEW)
#Link.bind_links(['equipment_list'], [link_equipment_list], menu_name='secondary_menu')
Link.bind_links(['agency_equipment_list', 'equipment_create', Equipment], [link_equipment_create], menu_name='secondary_menu')
Link.bind_links([Agency], [link_agency_equipment_list])
Link.bind_links([Equipment], [link_equipment_view, link_equipment_edit, link_equipment_delete])
Link.bind_links(['agency_circuit_list', 'circuit_create', Circuit], [link_circuit_create], menu_name='secondary_menu')
Link.bind_links([Agency], [link_agency_circuit_list])
Link.bind_links([Circuit], [link_circuit_view, link_circuit_edit, link_circuit_delete])
register_model_list_columns(Equipment, [
{'name': _(u'name'), 'attribute': 'label'},
])
register_model_list_columns(Circuit, [
{'name': _(u'purpose'), 'attribute': 'purpose'},
{'name': _(u'provider'), 'attribute': 'provider'},
{'name': _(u'technology'), 'attribute': 'technology'},
{'name': _(u'bandwidth'), 'attribute': encapsulate(lambda x: x.get_bandwidth_display())},
])
#AgencyElement(link_agency_equipment_list)
class_permissions(Agency, [
PERMISSION_EQUIPMENT_CREATE, PERMISSION_EQUIPMENT_DELETE,
PERMISSION_EQUIPMENT_EDIT, PERMISSION_EQUIPMENT_VIEW
]
)
class_permissions(Agency, [
PERMISSION_CIRCUIT_CREATE, PERMISSION_CIRCUIT_DELETE,
PERMISSION_CIRCUIT_EDIT, PERMISSION_CIRCUIT_VIEW
]
) | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
###############################################################################
#
# RetrieveUserInfo
# Returns the available resources for a specific user and the URIs for accessing them.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class RetrieveUserInfo(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the RetrieveUserInfo Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(RetrieveUserInfo, self).__init__(temboo_session, '/Library/RunKeeper/Users/RetrieveUserInfo')
def new_input_set(self):
return RetrieveUserInfoInputSet()
def _make_result_set(self, result, path):
return RetrieveUserInfoResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return RetrieveUserInfoChoreographyExecution(session, exec_id, path)
class RetrieveUserInfoInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the RetrieveUserInfo
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AccessToken(self, value):
"""
Set the value of the AccessToken input for this Choreo. ((required, string) The Access Token retrieved after the final step in the OAuth process.)
"""
super(RetrieveUserInfoInputSet, self)._set_input('AccessToken', value)
class RetrieveUserInfoResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the RetrieveUserInfo Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from RunKeeper.)
"""
return self._output.get('Response', None)
class RetrieveUserInfoChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return RetrieveUserInfoResultSet(response, path) | unknown | codeparrot/codeparrot-clean | ||
"""
WSGI config for DjangoApplication7 project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "DjangoApplication.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application) | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Yannick Buron, Nicolas Petit
# Copyright 2015, TODAY Clouder SASU
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License with Attribution
# clause as published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License with
# Attribution clause along with this program. If not, see
# <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Clouder Website',
'version': '10.0.10.0.0',
'category': 'Clouder',
'depends': ['base', 'auth_signup', 'clouder'],
'author': 'Yannick Buron (Clouder), Nicolas Petit',
'license': 'LGPL-3',
'website': 'https://github.com/clouder-community/clouder',
'demo': [],
'data': [
'security/ir.model.access.csv',
'clouder_website_view.xml',
'templates.xml'
],
'installable': True,
'application': True,
} | unknown | codeparrot/codeparrot-clean | ||
# Ansible module to manage CheckPoint Firewall (c) 2019
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from units.modules.utils import set_module_args, exit_json, fail_json, AnsibleExitJson
from ansible.module_utils import basic
from ansible.modules.network.check_point import cp_mgmt_service_udp
OBJECT = {
"name": "New_UDP_Service_1",
"port": 5669,
"keep_connections_open_after_policy_installation": False,
"session_timeout": 0,
"match_for_any": True,
"sync_connections_on_cluster": True,
"aggressive_aging": {
"enable": True,
"timeout": 360,
"use_default_timeout": False
},
"accept_replies": False
}
CREATE_PAYLOAD = {
"name": "New_UDP_Service_1",
"port": 5669,
"keep_connections_open_after_policy_installation": False,
"session_timeout": 0,
"match_for_any": True,
"sync_connections_on_cluster": True,
"aggressive_aging": {
"enable": True,
"timeout": 360,
"use_default_timeout": False
},
"accept_replies": False
}
UPDATE_PAYLOAD = {
"name": "New_UDP_Service_1",
"color": "blue",
"port": 5656,
"aggressive_aging": {
"default_timeout": 3600
},
"accept_replies": True
}
OBJECT_AFTER_UPDATE = UPDATE_PAYLOAD
DELETE_PAYLOAD = {
"name": "New_UDP_Service_1",
"state": "absent"
}
function_path = 'ansible.modules.network.check_point.cp_mgmt_service_udp.api_call'
api_call_object = 'service-udp'
class TestCheckpointServiceUdp(object):
module = cp_mgmt_service_udp
@pytest.fixture(autouse=True)
def module_mock(self, mocker):
return mocker.patch.multiple(basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json)
@pytest.fixture
def connection_mock(self, mocker):
connection_class_mock = mocker.patch('ansible.module_utils.network.checkpoint.checkpoint.Connection')
return connection_class_mock.return_value
def test_create(self, mocker, connection_mock):
mock_function = mocker.patch(function_path)
mock_function.return_value = {'changed': True, api_call_object: OBJECT}
result = self._run_module(CREATE_PAYLOAD)
assert result['changed']
assert OBJECT.items() == result[api_call_object].items()
def test_create_idempotent(self, mocker, connection_mock):
mock_function = mocker.patch(function_path)
mock_function.return_value = {'changed': False, api_call_object: OBJECT}
result = self._run_module(CREATE_PAYLOAD)
assert not result['changed']
def test_update(self, mocker, connection_mock):
mock_function = mocker.patch(function_path)
mock_function.return_value = {'changed': True, api_call_object: OBJECT_AFTER_UPDATE}
result = self._run_module(UPDATE_PAYLOAD)
assert result['changed']
assert OBJECT_AFTER_UPDATE.items() == result[api_call_object].items()
def test_update_idempotent(self, mocker, connection_mock):
mock_function = mocker.patch(function_path)
mock_function.return_value = {'changed': False, api_call_object: OBJECT_AFTER_UPDATE}
result = self._run_module(UPDATE_PAYLOAD)
assert not result['changed']
def test_delete(self, mocker, connection_mock):
mock_function = mocker.patch(function_path)
mock_function.return_value = {'changed': True}
result = self._run_module(DELETE_PAYLOAD)
assert result['changed']
def test_delete_idempotent(self, mocker, connection_mock):
mock_function = mocker.patch(function_path)
mock_function.return_value = {'changed': False}
result = self._run_module(DELETE_PAYLOAD)
assert not result['changed']
def _run_module(self, module_args):
set_module_args(module_args)
with pytest.raises(AnsibleExitJson) as ex:
self.module.main()
return ex.value.args[0] | unknown | codeparrot/codeparrot-clean | ||
# Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
class WatchListRule:
'''A rule with instructions to do when the rule is satisified.'''
def __init__(self, complex_definition, instructions):
self.definitions_to_match = complex_definition.split('|')
self._instructions = instructions
def match(self, matching_definitions):
for test_definition in self.definitions_to_match:
if test_definition in matching_definitions:
return True
return False
def instructions(self):
return self._instructions
def remove_instruction(self, instruction):
self._instructions.remove(instruction) | unknown | codeparrot/codeparrot-clean | ||
/*
* Copyright 2010-2023 JetBrains s.r.o. and Kotlin Programming Language contributors.
* Use of this source code is governed by the Apache 2.0 license that can be found in the license/LICENSE.txt file.
*/
package org.jetbrains.kotlin.analysis.api.fir
import com.intellij.psi.PsiElement
import com.intellij.psi.search.GlobalSearchScope
import org.jetbrains.kotlin.analysis.low.level.api.fir.sessions.llFirSession
import org.jetbrains.kotlin.analysis.api.projectStructure.KaBuiltinsModule
import org.jetbrains.kotlin.analysis.api.projectStructure.KaModule
import org.jetbrains.kotlin.analysis.api.platform.declarations.createDeclarationProvider
import org.jetbrains.kotlin.fir.containingClassLookupTag
import org.jetbrains.kotlin.fir.declarations.*
import org.jetbrains.kotlin.name.ClassId
import org.jetbrains.kotlin.name.FqName
import org.jetbrains.kotlin.name.Name
import org.jetbrains.kotlin.name.StandardClassIds
import org.jetbrains.kotlin.psi.KtClassOrObject
//todo introduce LibraryModificationTracker based cache?
internal object FirSyntheticFunctionInterfaceSourceProvider {
fun findPsi(fir: FirDeclaration, scope: GlobalSearchScope): PsiElement? {
return when (fir) {
is FirNamedFunction -> provideSourceForInvokeFunction(fir, scope)
is FirClass -> provideSourceForFunctionClass(fir, scope)
else -> null
}
}
private fun provideSourceForInvokeFunction(function: FirNamedFunction, scope: GlobalSearchScope): PsiElement? {
val classId = function.containingClassLookupTag()?.classId ?: return null
val classOrObject = classByClassId(classId, scope, function.llFirSession.ktModule) ?: return null
return classOrObject.declarations.singleOrNull()
}
private fun provideSourceForFunctionClass(klass: FirClass, scope: GlobalSearchScope): PsiElement? {
return classByClassId(klass.symbol.classId, scope, klass.llFirSession.ktModule)
}
private fun classByClassId(classId: ClassId, scope: GlobalSearchScope, ktModule: KaModule): KtClassOrObject? {
val project = ktModule.project
val correctedClassId = classIdMapping[classId] ?: return null
require(ktModule is KaBuiltinsModule) {
"Expected builtin module but found $ktModule"
}
return project.createDeclarationProvider(scope, ktModule)
.getAllClassesByClassId(correctedClassId)
.firstOrNull { it.containingKtFile.isCompiled }
}
private val classIdMapping = (0..23).associate { i ->
StandardClassIds.FunctionN(i) to ClassId(FqName("kotlin.jvm.functions"), Name.identifier("Function$i"))
}
} | kotlin | github | https://github.com/JetBrains/kotlin | analysis/analysis-api-fir/src/org/jetbrains/kotlin/analysis/api/fir/FirSyntheticFunctionInterfaceSourceProvider.kt |
// @flow @validatePreserveExistingMemoizationGuarantees @enableUseTypeAnnotations
import {useMemo} from 'react';
import {useFragment} from 'shared-runtime';
// This is a version of error.todo-repro-missing-memoization-lack-of-phi-types
// with explicit type annotations and using enableUseTypeAnnotations to demonstrate
// that type information is sufficient to preserve memoization in this example
function Component() {
const data = useFragment();
const nodes: Array<any> = data.nodes ?? [];
const flatMap: Array<any> = nodes.flatMap(node => node.items);
const filtered: Array<any> = flatMap.filter(item => item != null);
const map: Array<any> = useMemo(() => filtered.map(), [filtered]);
const index: Array<any> = filtered.findIndex(x => x === null);
return (
<div>
{map}
{index}
</div>
);
} | javascript | github | https://github.com/facebook/react | compiler/packages/babel-plugin-react-compiler/src/__tests__/fixtures/compiler/repro-missing-memoization-lack-of-phi-types-explicit-types.js |
# ext/associationproxy.py
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Contain the ``AssociationProxy`` class.
The ``AssociationProxy`` is a Python property object which provides
transparent proxied access to the endpoint of an association object.
See the example ``examples/association/proxied_association.py``.
"""
import itertools
import operator
import weakref
from .. import exc, orm, util
from ..orm import collections, interfaces
from ..sql import not_, or_
def association_proxy(target_collection, attr, **kw):
"""Return a Python property implementing a view of a target
attribute which references an attribute on members of the
target.
The returned value is an instance of :class:`.AssociationProxy`.
Implements a Python property representing a relationship as a collection
of simpler values, or a scalar value. The proxied property will mimic
the collection type of the target (list, dict or set), or, in the case of
a one to one relationship, a simple scalar value.
:param target_collection: Name of the attribute we'll proxy to.
This attribute is typically mapped by
:func:`~sqlalchemy.orm.relationship` to link to a target collection, but
can also be a many-to-one or non-scalar relationship.
:param attr: Attribute on the associated instance or instances we'll
proxy for.
For example, given a target collection of [obj1, obj2], a list created
by this proxy property would look like [getattr(obj1, *attr*),
getattr(obj2, *attr*)]
If the relationship is one-to-one or otherwise uselist=False, then
simply: getattr(obj, *attr*)
:param creator: optional.
When new items are added to this proxied collection, new instances of
the class collected by the target collection will be created. For list
and set collections, the target class constructor will be called with
the 'value' for the new instance. For dict types, two arguments are
passed: key and value.
If you want to construct instances differently, supply a *creator*
function that takes arguments as above and returns instances.
For scalar relationships, creator() will be called if the target is None.
If the target is present, set operations are proxied to setattr() on the
associated object.
If you have an associated object with multiple attributes, you may set
up multiple association proxies mapping to different attributes. See
the unit tests for examples, and for examples of how creator() functions
can be used to construct the scalar relationship on-demand in this
situation.
:param \*\*kw: Passes along any other keyword arguments to
:class:`.AssociationProxy`.
"""
return AssociationProxy(target_collection, attr, **kw)
ASSOCIATION_PROXY = util.symbol('ASSOCIATION_PROXY')
"""Symbol indicating an :class:`_InspectionAttr` that's
of type :class:`.AssociationProxy`.
Is assigned to the :attr:`._InspectionAttr.extension_type`
attibute.
"""
class AssociationProxy(interfaces._InspectionAttr):
"""A descriptor that presents a read/write view of an object attribute."""
is_attribute = False
extension_type = ASSOCIATION_PROXY
def __init__(self, target_collection, attr, creator=None,
getset_factory=None, proxy_factory=None,
proxy_bulk_set=None):
"""Construct a new :class:`.AssociationProxy`.
The :func:`.association_proxy` function is provided as the usual
entrypoint here, though :class:`.AssociationProxy` can be instantiated
and/or subclassed directly.
:param target_collection: Name of the collection we'll proxy to,
usually created with :func:`.relationship`.
:param attr: Attribute on the collected instances we'll proxy
for. For example, given a target collection of [obj1, obj2], a
list created by this proxy property would look like
[getattr(obj1, attr), getattr(obj2, attr)]
:param creator: Optional. When new items are added to this proxied
collection, new instances of the class collected by the target
collection will be created. For list and set collections, the
target class constructor will be called with the 'value' for the
new instance. For dict types, two arguments are passed:
key and value.
If you want to construct instances differently, supply a 'creator'
function that takes arguments as above and returns instances.
:param getset_factory: Optional. Proxied attribute access is
automatically handled by routines that get and set values based on
the `attr` argument for this proxy.
If you would like to customize this behavior, you may supply a
`getset_factory` callable that produces a tuple of `getter` and
`setter` functions. The factory is called with two arguments, the
abstract type of the underlying collection and this proxy instance.
:param proxy_factory: Optional. The type of collection to emulate is
determined by sniffing the target collection. If your collection
type can't be determined by duck typing or you'd like to use a
different collection implementation, you may supply a factory
function to produce those collections. Only applicable to
non-scalar relationships.
:param proxy_bulk_set: Optional, use with proxy_factory. See
the _set() method for details.
"""
self.target_collection = target_collection
self.value_attr = attr
self.creator = creator
self.getset_factory = getset_factory
self.proxy_factory = proxy_factory
self.proxy_bulk_set = proxy_bulk_set
self.owning_class = None
self.key = '_%s_%s_%s' % (
type(self).__name__, target_collection, id(self))
self.collection_class = None
@property
def remote_attr(self):
"""The 'remote' :class:`.MapperProperty` referenced by this
:class:`.AssociationProxy`.
.. versionadded:: 0.7.3
See also:
:attr:`.AssociationProxy.attr`
:attr:`.AssociationProxy.local_attr`
"""
return getattr(self.target_class, self.value_attr)
@property
def local_attr(self):
"""The 'local' :class:`.MapperProperty` referenced by this
:class:`.AssociationProxy`.
.. versionadded:: 0.7.3
See also:
:attr:`.AssociationProxy.attr`
:attr:`.AssociationProxy.remote_attr`
"""
return getattr(self.owning_class, self.target_collection)
@property
def attr(self):
"""Return a tuple of ``(local_attr, remote_attr)``.
This attribute is convenient when specifying a join
using :meth:`.Query.join` across two relationships::
sess.query(Parent).join(*Parent.proxied.attr)
.. versionadded:: 0.7.3
See also:
:attr:`.AssociationProxy.local_attr`
:attr:`.AssociationProxy.remote_attr`
"""
return (self.local_attr, self.remote_attr)
def _get_property(self):
return (orm.class_mapper(self.owning_class).
get_property(self.target_collection))
@util.memoized_property
def target_class(self):
"""The intermediary class handled by this :class:`.AssociationProxy`.
Intercepted append/set/assignment events will result
in the generation of new instances of this class.
"""
return self._get_property().mapper.class_
@util.memoized_property
def scalar(self):
"""Return ``True`` if this :class:`.AssociationProxy` proxies a scalar
relationship on the local side."""
scalar = not self._get_property().uselist
if scalar:
self._initialize_scalar_accessors()
return scalar
@util.memoized_property
def _value_is_scalar(self):
return not self._get_property().\
mapper.get_property(self.value_attr).uselist
@util.memoized_property
def _target_is_object(self):
return getattr(self.target_class, self.value_attr).impl.uses_objects
def __get__(self, obj, class_):
if self.owning_class is None:
self.owning_class = class_ and class_ or type(obj)
if obj is None:
return self
if self.scalar:
target = getattr(obj, self.target_collection)
return self._scalar_get(target)
else:
try:
# If the owning instance is reborn (orm session resurrect,
# etc.), refresh the proxy cache.
creator_id, proxy = getattr(obj, self.key)
if id(obj) == creator_id:
return proxy
except AttributeError:
pass
proxy = self._new(_lazy_collection(obj, self.target_collection))
setattr(obj, self.key, (id(obj), proxy))
return proxy
def __set__(self, obj, values):
if self.owning_class is None:
self.owning_class = type(obj)
if self.scalar:
creator = self.creator and self.creator or self.target_class
target = getattr(obj, self.target_collection)
if target is None:
setattr(obj, self.target_collection, creator(values))
else:
self._scalar_set(target, values)
else:
proxy = self.__get__(obj, None)
if proxy is not values:
proxy.clear()
self._set(proxy, values)
def __delete__(self, obj):
if self.owning_class is None:
self.owning_class = type(obj)
delattr(obj, self.key)
def _initialize_scalar_accessors(self):
if self.getset_factory:
get, set = self.getset_factory(None, self)
else:
get, set = self._default_getset(None)
self._scalar_get, self._scalar_set = get, set
def _default_getset(self, collection_class):
attr = self.value_attr
_getter = operator.attrgetter(attr)
getter = lambda target: _getter(target) if target is not None else None
if collection_class is dict:
setter = lambda o, k, v: setattr(o, attr, v)
else:
setter = lambda o, v: setattr(o, attr, v)
return getter, setter
def _new(self, lazy_collection):
creator = self.creator and self.creator or self.target_class
self.collection_class = util.duck_type_collection(lazy_collection())
if self.proxy_factory:
return self.proxy_factory(
lazy_collection, creator, self.value_attr, self)
if self.getset_factory:
getter, setter = self.getset_factory(self.collection_class, self)
else:
getter, setter = self._default_getset(self.collection_class)
if self.collection_class is list:
return _AssociationList(
lazy_collection, creator, getter, setter, self)
elif self.collection_class is dict:
return _AssociationDict(
lazy_collection, creator, getter, setter, self)
elif self.collection_class is set:
return _AssociationSet(
lazy_collection, creator, getter, setter, self)
else:
raise exc.ArgumentError(
'could not guess which interface to use for '
'collection_class "%s" backing "%s"; specify a '
'proxy_factory and proxy_bulk_set manually' %
(self.collection_class.__name__, self.target_collection))
def _inflate(self, proxy):
creator = self.creator and self.creator or self.target_class
if self.getset_factory:
getter, setter = self.getset_factory(self.collection_class, self)
else:
getter, setter = self._default_getset(self.collection_class)
proxy.creator = creator
proxy.getter = getter
proxy.setter = setter
def _set(self, proxy, values):
if self.proxy_bulk_set:
self.proxy_bulk_set(proxy, values)
elif self.collection_class is list:
proxy.extend(values)
elif self.collection_class is dict:
proxy.update(values)
elif self.collection_class is set:
proxy.update(values)
else:
raise exc.ArgumentError(
'no proxy_bulk_set supplied for custom '
'collection_class implementation')
@property
def _comparator(self):
return self._get_property().comparator
def any(self, criterion=None, **kwargs):
"""Produce a proxied 'any' expression using EXISTS.
This expression will be a composed product
using the :meth:`.RelationshipProperty.Comparator.any`
and/or :meth:`.RelationshipProperty.Comparator.has`
operators of the underlying proxied attributes.
"""
if self._value_is_scalar:
value_expr = getattr(
self.target_class, self.value_attr).has(criterion, **kwargs)
else:
value_expr = getattr(
self.target_class, self.value_attr).any(criterion, **kwargs)
# check _value_is_scalar here, otherwise
# we're scalar->scalar - call .any() so that
# the "can't call any() on a scalar" msg is raised.
if self.scalar and not self._value_is_scalar:
return self._comparator.has(
value_expr
)
else:
return self._comparator.any(
value_expr
)
def has(self, criterion=None, **kwargs):
"""Produce a proxied 'has' expression using EXISTS.
This expression will be a composed product
using the :meth:`.RelationshipProperty.Comparator.any`
and/or :meth:`.RelationshipProperty.Comparator.has`
operators of the underlying proxied attributes.
"""
if self._target_is_object:
return self._comparator.has(
getattr(self.target_class, self.value_attr).\
has(criterion, **kwargs)
)
else:
if criterion is not None or kwargs:
raise exc.ArgumentError(
"Non-empty has() not allowed for "
"column-targeted association proxy; use ==")
return self._comparator.has()
def contains(self, obj):
"""Produce a proxied 'contains' expression using EXISTS.
This expression will be a composed product
using the :meth:`.RelationshipProperty.Comparator.any`
, :meth:`.RelationshipProperty.Comparator.has`,
and/or :meth:`.RelationshipProperty.Comparator.contains`
operators of the underlying proxied attributes.
"""
if self.scalar and not self._value_is_scalar:
return self._comparator.has(
getattr(self.target_class, self.value_attr).contains(obj)
)
else:
return self._comparator.any(**{self.value_attr: obj})
def __eq__(self, obj):
# note the has() here will fail for collections; eq_()
# is only allowed with a scalar.
if obj is None:
return or_(
self._comparator.has(**{self.value_attr: obj}),
self._comparator == None
)
else:
return self._comparator.has(**{self.value_attr: obj})
def __ne__(self, obj):
# note the has() here will fail for collections; eq_()
# is only allowed with a scalar.
return self._comparator.has(
getattr(self.target_class, self.value_attr) != obj)
class _lazy_collection(object):
def __init__(self, obj, target):
self.ref = weakref.ref(obj)
self.target = target
def __call__(self):
obj = self.ref()
if obj is None:
raise exc.InvalidRequestError(
"stale association proxy, parent object has gone out of "
"scope")
return getattr(obj, self.target)
def __getstate__(self):
return {'obj': self.ref(), 'target': self.target}
def __setstate__(self, state):
self.ref = weakref.ref(state['obj'])
self.target = state['target']
class _AssociationCollection(object):
def __init__(self, lazy_collection, creator, getter, setter, parent):
"""Constructs an _AssociationCollection.
This will always be a subclass of either _AssociationList,
_AssociationSet, or _AssociationDict.
lazy_collection
A callable returning a list-based collection of entities (usually an
object attribute managed by a SQLAlchemy relationship())
creator
A function that creates new target entities. Given one parameter:
value. This assertion is assumed::
obj = creator(somevalue)
assert getter(obj) == somevalue
getter
A function. Given an associated object, return the 'value'.
setter
A function. Given an associated object and a value, store that
value on the object.
"""
self.lazy_collection = lazy_collection
self.creator = creator
self.getter = getter
self.setter = setter
self.parent = parent
col = property(lambda self: self.lazy_collection())
def __len__(self):
return len(self.col)
def __bool__(self):
return bool(self.col)
__nonzero__ = __bool__
def __getstate__(self):
return {'parent': self.parent, 'lazy_collection': self.lazy_collection}
def __setstate__(self, state):
self.parent = state['parent']
self.lazy_collection = state['lazy_collection']
self.parent._inflate(self)
class _AssociationList(_AssociationCollection):
"""Generic, converting, list-to-list proxy."""
def _create(self, value):
return self.creator(value)
def _get(self, object):
return self.getter(object)
def _set(self, object, value):
return self.setter(object, value)
def __getitem__(self, index):
return self._get(self.col[index])
def __setitem__(self, index, value):
if not isinstance(index, slice):
self._set(self.col[index], value)
else:
if index.stop is None:
stop = len(self)
elif index.stop < 0:
stop = len(self) + index.stop
else:
stop = index.stop
step = index.step or 1
start = index.start or 0
rng = list(range(index.start or 0, stop, step))
if step == 1:
for i in rng:
del self[start]
i = start
for item in value:
self.insert(i, item)
i += 1
else:
if len(value) != len(rng):
raise ValueError(
"attempt to assign sequence of size %s to "
"extended slice of size %s" % (len(value),
len(rng)))
for i, item in zip(rng, value):
self._set(self.col[i], item)
def __delitem__(self, index):
del self.col[index]
def __contains__(self, value):
for member in self.col:
# testlib.pragma exempt:__eq__
if self._get(member) == value:
return True
return False
def __getslice__(self, start, end):
return [self._get(member) for member in self.col[start:end]]
def __setslice__(self, start, end, values):
members = [self._create(v) for v in values]
self.col[start:end] = members
def __delslice__(self, start, end):
del self.col[start:end]
def __iter__(self):
"""Iterate over proxied values.
For the actual domain objects, iterate over .col instead or
just use the underlying collection directly from its property
on the parent.
"""
for member in self.col:
yield self._get(member)
raise StopIteration
def append(self, value):
item = self._create(value)
self.col.append(item)
def count(self, value):
return sum([1 for _ in
util.itertools_filter(lambda v: v == value, iter(self))])
def extend(self, values):
for v in values:
self.append(v)
def insert(self, index, value):
self.col[index:index] = [self._create(value)]
def pop(self, index=-1):
return self.getter(self.col.pop(index))
def remove(self, value):
for i, val in enumerate(self):
if val == value:
del self.col[i]
return
raise ValueError("value not in list")
def reverse(self):
"""Not supported, use reversed(mylist)"""
raise NotImplementedError
def sort(self):
"""Not supported, use sorted(mylist)"""
raise NotImplementedError
def clear(self):
del self.col[0:len(self.col)]
def __eq__(self, other):
return list(self) == other
def __ne__(self, other):
return list(self) != other
def __lt__(self, other):
return list(self) < other
def __le__(self, other):
return list(self) <= other
def __gt__(self, other):
return list(self) > other
def __ge__(self, other):
return list(self) >= other
def __cmp__(self, other):
return cmp(list(self), other)
def __add__(self, iterable):
try:
other = list(iterable)
except TypeError:
return NotImplemented
return list(self) + other
def __radd__(self, iterable):
try:
other = list(iterable)
except TypeError:
return NotImplemented
return other + list(self)
def __mul__(self, n):
if not isinstance(n, int):
return NotImplemented
return list(self) * n
__rmul__ = __mul__
def __iadd__(self, iterable):
self.extend(iterable)
return self
def __imul__(self, n):
# unlike a regular list *=, proxied __imul__ will generate unique
# backing objects for each copy. *= on proxied lists is a bit of
# a stretch anyhow, and this interpretation of the __imul__ contract
# is more plausibly useful than copying the backing objects.
if not isinstance(n, int):
return NotImplemented
if n == 0:
self.clear()
elif n > 1:
self.extend(list(self) * (n - 1))
return self
def copy(self):
return list(self)
def __repr__(self):
return repr(list(self))
def __hash__(self):
raise TypeError("%s objects are unhashable" % type(self).__name__)
for func_name, func in list(locals().items()):
if (util.callable(func) and func.__name__ == func_name and
not func.__doc__ and hasattr(list, func_name)):
func.__doc__ = getattr(list, func_name).__doc__
del func_name, func
_NotProvided = util.symbol('_NotProvided')
class _AssociationDict(_AssociationCollection):
"""Generic, converting, dict-to-dict proxy."""
def _create(self, key, value):
return self.creator(key, value)
def _get(self, object):
return self.getter(object)
def _set(self, object, key, value):
return self.setter(object, key, value)
def __getitem__(self, key):
return self._get(self.col[key])
def __setitem__(self, key, value):
if key in self.col:
self._set(self.col[key], key, value)
else:
self.col[key] = self._create(key, value)
def __delitem__(self, key):
del self.col[key]
def __contains__(self, key):
# testlib.pragma exempt:__hash__
return key in self.col
def has_key(self, key):
# testlib.pragma exempt:__hash__
return key in self.col
def __iter__(self):
return iter(self.col.keys())
def clear(self):
self.col.clear()
def __eq__(self, other):
return dict(self) == other
def __ne__(self, other):
return dict(self) != other
def __lt__(self, other):
return dict(self) < other
def __le__(self, other):
return dict(self) <= other
def __gt__(self, other):
return dict(self) > other
def __ge__(self, other):
return dict(self) >= other
def __cmp__(self, other):
return cmp(dict(self), other)
def __repr__(self):
return repr(dict(self.items()))
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def setdefault(self, key, default=None):
if key not in self.col:
self.col[key] = self._create(key, default)
return default
else:
return self[key]
def keys(self):
return self.col.keys()
if util.py2k:
def iteritems(self):
return ((key, self._get(self.col[key])) for key in self.col)
def itervalues(self):
return (self._get(self.col[key]) for key in self.col)
def iterkeys(self):
return self.col.iterkeys()
def values(self):
return [self._get(member) for member in self.col.values()]
def items(self):
return [(k, self._get(self.col[k])) for k in self]
else:
def items(self):
return ((key, self._get(self.col[key])) for key in self.col)
def values(self):
return (self._get(self.col[key]) for key in self.col)
def pop(self, key, default=_NotProvided):
if default is _NotProvided:
member = self.col.pop(key)
else:
member = self.col.pop(key, default)
return self._get(member)
def popitem(self):
item = self.col.popitem()
return (item[0], self._get(item[1]))
def update(self, *a, **kw):
if len(a) > 1:
raise TypeError('update expected at most 1 arguments, got %i' %
len(a))
elif len(a) == 1:
seq_or_map = a[0]
# discern dict from sequence - took the advice from
# http://www.voidspace.org.uk/python/articles/duck_typing.shtml
# still not perfect :(
if hasattr(seq_or_map, 'keys'):
for item in seq_or_map:
self[item] = seq_or_map[item]
else:
try:
for k, v in seq_or_map:
self[k] = v
except ValueError:
raise ValueError(
"dictionary update sequence "
"requires 2-element tuples")
for key, value in kw:
self[key] = value
def copy(self):
return dict(self.items())
def __hash__(self):
raise TypeError("%s objects are unhashable" % type(self).__name__)
for func_name, func in list(locals().items()):
if (util.callable(func) and func.__name__ == func_name and
not func.__doc__ and hasattr(dict, func_name)):
func.__doc__ = getattr(dict, func_name).__doc__
del func_name, func
class _AssociationSet(_AssociationCollection):
"""Generic, converting, set-to-set proxy."""
def _create(self, value):
return self.creator(value)
def _get(self, object):
return self.getter(object)
def _set(self, object, value):
return self.setter(object, value)
def __len__(self):
return len(self.col)
def __bool__(self):
if self.col:
return True
else:
return False
__nonzero__ = __bool__
def __contains__(self, value):
for member in self.col:
# testlib.pragma exempt:__eq__
if self._get(member) == value:
return True
return False
def __iter__(self):
"""Iterate over proxied values.
For the actual domain objects, iterate over .col instead or just use
the underlying collection directly from its property on the parent.
"""
for member in self.col:
yield self._get(member)
raise StopIteration
def add(self, value):
if value not in self:
self.col.add(self._create(value))
# for discard and remove, choosing a more expensive check strategy rather
# than call self.creator()
def discard(self, value):
for member in self.col:
if self._get(member) == value:
self.col.discard(member)
break
def remove(self, value):
for member in self.col:
if self._get(member) == value:
self.col.discard(member)
return
raise KeyError(value)
def pop(self):
if not self.col:
raise KeyError('pop from an empty set')
member = self.col.pop()
return self._get(member)
def update(self, other):
for value in other:
self.add(value)
def __ior__(self, other):
if not collections._set_binops_check_strict(self, other):
return NotImplemented
for value in other:
self.add(value)
return self
def _set(self):
return set(iter(self))
def union(self, other):
return set(self).union(other)
__or__ = union
def difference(self, other):
return set(self).difference(other)
__sub__ = difference
def difference_update(self, other):
for value in other:
self.discard(value)
def __isub__(self, other):
if not collections._set_binops_check_strict(self, other):
return NotImplemented
for value in other:
self.discard(value)
return self
def intersection(self, other):
return set(self).intersection(other)
__and__ = intersection
def intersection_update(self, other):
want, have = self.intersection(other), set(self)
remove, add = have - want, want - have
for value in remove:
self.remove(value)
for value in add:
self.add(value)
def __iand__(self, other):
if not collections._set_binops_check_strict(self, other):
return NotImplemented
want, have = self.intersection(other), set(self)
remove, add = have - want, want - have
for value in remove:
self.remove(value)
for value in add:
self.add(value)
return self
def symmetric_difference(self, other):
return set(self).symmetric_difference(other)
__xor__ = symmetric_difference
def symmetric_difference_update(self, other):
want, have = self.symmetric_difference(other), set(self)
remove, add = have - want, want - have
for value in remove:
self.remove(value)
for value in add:
self.add(value)
def __ixor__(self, other):
if not collections._set_binops_check_strict(self, other):
return NotImplemented
want, have = self.symmetric_difference(other), set(self)
remove, add = have - want, want - have
for value in remove:
self.remove(value)
for value in add:
self.add(value)
return self
def issubset(self, other):
return set(self).issubset(other)
def issuperset(self, other):
return set(self).issuperset(other)
def clear(self):
self.col.clear()
def copy(self):
return set(self)
def __eq__(self, other):
return set(self) == other
def __ne__(self, other):
return set(self) != other
def __lt__(self, other):
return set(self) < other
def __le__(self, other):
return set(self) <= other
def __gt__(self, other):
return set(self) > other
def __ge__(self, other):
return set(self) >= other
def __repr__(self):
return repr(set(self))
def __hash__(self):
raise TypeError("%s objects are unhashable" % type(self).__name__)
for func_name, func in list(locals().items()):
if (util.callable(func) and func.__name__ == func_name and
not func.__doc__ and hasattr(set, func_name)):
func.__doc__ = getattr(set, func_name).__doc__
del func_name, func | unknown | codeparrot/codeparrot-clean | ||
/* Copyright 2015 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_CORE_LIB_IO_RECORD_WRITER_H_
#define TENSORFLOW_CORE_LIB_IO_RECORD_WRITER_H_
#include "tensorflow/core/lib/hash/crc32c.h"
#include "tensorflow/core/platform/coding.h"
#include "tensorflow/core/platform/status.h"
#include "tensorflow/core/platform/stringpiece.h"
#if !defined(IS_SLIM_BUILD)
#include "tensorflow/core/lib/io/zlib_compression_options.h"
#include "tensorflow/core/lib/io/zlib_outputbuffer.h"
#endif // IS_SLIM_BUILD
#include "xla/tsl/lib/io/record_writer.h"
#include "tensorflow/core/platform/cord.h"
#include "tensorflow/core/platform/macros.h"
#include "tensorflow/core/platform/types.h"
namespace tensorflow {
namespace io {
// NOLINTBEGIN(misc-unused-using-decls)
using tsl::io::RecordWriter;
using tsl::io::RecordWriterOptions;
// NOLINTEND(misc-unused-using-decls)
} // namespace io
} // namespace tensorflow
#endif // TENSORFLOW_CORE_LIB_IO_RECORD_WRITER_H_ | c | github | https://github.com/tensorflow/tensorflow | tensorflow/core/lib/io/record_writer.h |
from django.shortcuts import get_object_or_404
from django.utils.html import strip_tags
from django_socketio import events
from chat.models import ChatRoom
@events.on_message(channel="^room-")
def message(request, socket, context, message):
"""
Event handler for a room receiving a message. First validates a
joining user's name and sends them the list of users.
"""
room = get_object_or_404(ChatRoom, id=message["room"])
if message["action"] == "start":
name = strip_tags(message["name"])
user, created = room.users.get_or_create(name=name)
if not created:
socket.send({"action": "in-use"})
else:
context["user"] = user
users = [u.name for u in room.users.exclude(id=user.id)]
socket.send({"action": "started", "users": users})
user.session = socket.session.session_id
user.save()
joined = {"action": "join", "name": user.name, "id": user.id}
socket.send_and_broadcast_channel(joined)
else:
try:
user = context["user"]
except KeyError:
return
if message["action"] == "message":
message["message"] = strip_tags(message["message"])
message["name"] = user.name
socket.send_and_broadcast_channel(message)
@events.on_finish(channel="^room-")
def finish(request, socket, context):
"""
Event handler for a socket session ending in a room. Broadcast
the user leaving and delete them from the DB.
"""
try:
user = context["user"]
except KeyError:
return
left = {"action": "leave", "name": user.name, "id": user.id}
socket.broadcast_channel(left)
user.delete() | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python
'''Add an Item to Pocket'''
__author__ = 'Felipe Borges'
import sys
sys.path.append("..")
import getopt
import pocket
USAGE = '''Usage: save_to_pocket [options] url
This script adds an Item to Pocket.
Options:
-h --help: print this help
--consumer_key : the Pocket API consumer key
--access_token : the user's Pocket Access Token
'''
def print_usage_and_exit():
print USAGE
sys.exit(2)
def main():
try:
shortflags = 'h'
longflags = ['help', 'consumer_key=', 'access_token=']
opts, args = getopt.gnu_getopt(sys.argv[1:], shortflags, longflags)
except getopt.GetoptError:
print_usage_and_exit()
consumer_key = None
access_token = None
for o, a in opts:
if o in ('-h', '--help'):
print_usage_and_exit()
if o in ('--consumer_key'):
consumer_key = a
if o in ('--access_token'):
access_token = a
url = ' '.join(args)
if not url or not consumer_key or not access_token:
print_usage_and_exit()
api = pocket.Api(consumer_key = consumer_key, access_token = access_token)
try:
item = api.add(url)
print 'Item \'%s\' added successfuly!' % item.normal_url
except e:
print e
sys.exit(2)
if __name__ == "__main__":
main() | unknown | codeparrot/codeparrot-clean | ||
# Copyright (c) 2016 Uber Technologies, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from builtins import object
from threadloop import ThreadLoop
import tornado
import tornado.httpclient
from tornado.httputil import url_concat
from .TUDPTransport import TUDPTransport
from concurrent.futures import Future
from thrift.transport.TTransport import TBufferedTransport
class LocalAgentHTTP(object):
def __init__(self, host, port):
self.agent_http_host = host
self.agent_http_port = int(port)
def request_sampling_strategy(self, service_name, timeout):
http_client = tornado.httpclient.AsyncHTTPClient(
defaults=dict(request_timeout=timeout))
# Properly url encode the params
url = url_concat(
'http://%s:%d/sampling' % (self.agent_http_host, self.agent_http_port),
[('service', service_name)])
return http_client.fetch(url)
class LocalAgentSender(TBufferedTransport):
"""
LocalAgentSender implements a everything necessary to communicate with
local jaeger-agent. This class is designed to work in tornado and
non-tornado environments. If in torndado, pass in the ioloop, if not
then LocalAgentSender will create one for itself.
NOTE: LocalAgentSender derives from TBufferedTransport. This will buffer
up all written data until flush() is called. Flush gets called at the
end of the batch span submission call.
"""
def __init__(self, host, sampling_port, reporting_port, io_loop=None):
# IOLoop
self._thread_loop = None
self.io_loop = io_loop or self._create_new_thread_loop()
# http sampling
self.local_agent_http = LocalAgentHTTP(host, sampling_port)
# udp reporting - this will only get written to after our flush() call.
# We are buffering things up because we are a TBufferedTransport.
udp = TUDPTransport(host, reporting_port)
TBufferedTransport.__init__(self, udp)
def _create_new_thread_loop(self):
"""
Create a daemonized thread that will run Tornado IOLoop.
:return: the IOLoop backed by the new thread.
"""
self._thread_loop = ThreadLoop()
if not self._thread_loop.is_ready():
self._thread_loop.start()
return self._thread_loop._io_loop
def readFrame(self):
"""Empty read frame that is never ready"""
return Future()
# Pass-through for the http
def request_sampling_strategy(self, service_name, timeout):
return self.local_agent_http.request_sampling_strategy(
service_name, timeout) | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env bash
# Copyright 2020 The Cockroach Authors.
#
# Use of this software is governed by the CockroachDB Software License
# included in the /LICENSE file.
# This script makes it easy to make custom builds.
#
# It creates a tag for a SHA that triggers a build in the Make and Publish
# Build TeamCity build config. Once the build is complete, binaries and a
# docker image are available. For details on how to validate everything is
# correct and how to use the binaries/docker image, see:
# https://cockroachlabs.atlassian.net/wiki/spaces/ENG/pages/846299518/One-Off+Builds+A+How+To
#
# How to use this script:
#
# 1) To tag the checked out SHA (the script is not available for releases
# v20.1.5, v19.2.10 and older; use option 2 for those releases) run it
# with no arguments from the root of the repo.
#
# 2) To tag a non-checked out SHA including any SHAs on releases (or branches)
# older than v20.1.5 and v19.2.10, run it from the root of the repo with
# the SHA that you want to tag as the single argument.
#
# ./scripts/tag-custom-build.sh "$SHA"
#
# Use the --jj flag to get the current SHA from jj instead of git:
#
# ./scripts/tag-custom-build.sh --jj
#
# Note the Tag Name and Build ID (printed at the end of the script output).
#
# Verify the SHA on the GitHub page for the tag (it should open automatically
# in your browser) is the one you tagged. (If the page didn't open in your
# browser, the tag should be somewhere in this list, not necessarily at the top:
# https://github.com/cockroachdb/cockroach/tags .)
#
# Use the tag name to find the build in the Make and Publish Build build config
# in TeamCity.
#
# Use the Build ID when referencing the binaries and docker image with others.
set -euo pipefail
use_jj=false
# Parse command line options
while getopts ":j-:" opt; do
case $opt in
j)
use_jj=true
;;
-)
case "${OPTARG}" in
jj)
use_jj=true
;;
*)
echo "Invalid option: --${OPTARG}" >&2
exit 1
;;
esac
;;
\?)
echo "Invalid option: -$OPTARG" >&2
exit 1
;;
esac
done
# Shift past the processed options
shift $((OPTIND-1))
# Get SHA from positional parameter if provided
SHA="${1-}"
if [ -z "$SHA" ] ; then
if [ "$use_jj" = true ] ; then
SHA="$(jj log -r@ -n1 --template commit_id --no-graph)"
else
SHA="$(git rev-parse HEAD)"
fi
fi
# Ensure all the latest tags are downloaded locally
git fetch -t
ID="$(git describe --tags --match=v[0-9]* "$SHA")"
TAG="custombuild-$ID"
git push git@github.com:cockroachdb/cockroach.git "$SHA:refs/tags/$TAG"
TAG_URL="https://github.com/cockroachdb/cockroach/releases/tag/${TAG}"
TEAMCITY_URL="https://teamcity.cockroachdb.com/buildConfiguration/Internal_Cockroach_Release_Customized_MakeAndPublishCustomizedBuild?mode=builds&branch=${TAG}"
if [ "$(command -v open)" ] ; then
open "$TEAMCITY_URL"
open "$TAG_URL"
elif [ "$(command -v xdg-open)" ] ; then
xdg-open "$TEAMCITY_URL"
xdg-open "$TAG_URL"
fi
cat << EOF
See the one-off builds wiki page for steps for the rest of the process:
https://cockroachlabs.atlassian.net/wiki/spaces/ENG/pages/846299518/One-Off+Builds+A+How+To
Here is the tag in GitHub:
$TAG_URL
Here is where the build run should show up in TeamCity for the tag:
$TEAMCITY_URL
Tag name: $TAG
Build ID: $ID
The binaries will be available at:
https://storage.googleapis.com/cockroach-customized-builds-artifacts-prod/cockroach-$ID.linux-amd64.tgz
https://storage.googleapis.com/cockroach-customized-builds-artifacts-prod/cockroach-$ID.linux-amd64-fips.tgz
https://storage.googleapis.com/cockroach-customized-builds-artifacts-prod/cockroach-$ID.linux-arm64.tgz
https://storage.googleapis.com/cockroach-customized-builds-artifacts-prod/cockroach-$ID.linux-s390x.tgz
https://storage.googleapis.com/cockroach-customized-builds-artifacts-prod/cockroach-$ID.darwin-11.0-arm64.tgz
https://storage.googleapis.com/cockroach-customized-builds-artifacts-prod/cockroach-$ID.darwin-10.9-amd64.tgz
https://storage.googleapis.com/cockroach-customized-builds-artifacts-prod/cockroach-$ID.windows-6.2-amd64.zip
Pull the docker image by:
docker pull us-docker.pkg.dev/cockroach-cloud-images/cockroachdb-customized/cockroach-customized:$ID
docker pull us-docker.pkg.dev/cockroach-cloud-images/cockroachdb-customized/cockroach-customized:$ID-fips
EOF | unknown | github | https://github.com/cockroachdb/cockroach | scripts/tag-custom-build.sh |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Database setup and migration commands."""
from nova import utils
IMPL = utils.LazyPluggable(
'db_backend',
config_group='baremetal',
sqlalchemy='nova.virt.baremetal.db.sqlalchemy.migration')
INIT_VERSION = 0
def db_sync(version=None):
"""Migrate the database to `version` or the most recent version."""
return IMPL.db_sync(version=version)
def db_version():
"""Display the current database version."""
return IMPL.db_version() | unknown | codeparrot/codeparrot-clean | ||
// mksyscall.pl -l32 -arm -tags linux,mipsle syscall_linux.go syscall_linux_mipsx.go
// Code generated by the command above; DO NOT EDIT.
//go:build linux && mipsle
package syscall
import "unsafe"
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func faccessat(dirfd int, path string, mode uint32) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_FACCESSAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(mode))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func faccessat2(dirfd int, path string, mode uint32, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall6(_SYS_faccessat2, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(flags), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func fchmodat(dirfd int, path string, mode uint32) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_FCHMODAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(mode))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func fchmodat2(dirfd int, path string, mode uint32, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall6(_SYS_fchmodat2, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(flags), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func linkat(olddirfd int, oldpath string, newdirfd int, newpath string, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(oldpath)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(newpath)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_LINKAT, uintptr(olddirfd), uintptr(unsafe.Pointer(_p0)), uintptr(newdirfd), uintptr(unsafe.Pointer(_p1)), uintptr(flags), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func openat(dirfd int, path string, flags int, mode uint32) (fd int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
r0, _, e1 := Syscall6(SYS_OPENAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(flags), uintptr(mode), 0, 0)
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func pipe2(p *[2]_C_int, flags int) (err error) {
_, _, e1 := RawSyscall(SYS_PIPE2, uintptr(unsafe.Pointer(p)), uintptr(flags), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func readlinkat(dirfd int, path string, buf []byte) (n int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
var _p1 unsafe.Pointer
if len(buf) > 0 {
_p1 = unsafe.Pointer(&buf[0])
} else {
_p1 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_READLINKAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(buf)), 0, 0)
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func symlinkat(oldpath string, newdirfd int, newpath string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(oldpath)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(newpath)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_SYMLINKAT, uintptr(unsafe.Pointer(_p0)), uintptr(newdirfd), uintptr(unsafe.Pointer(_p1)))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func unlinkat(dirfd int, path string, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_UNLINKAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(flags))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func utimensat(dirfd int, path string, times *[2]Timespec, flag int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_UTIMENSAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(times)), uintptr(flag), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getcwd(buf []byte) (n int, err error) {
var _p0 unsafe.Pointer
if len(buf) > 0 {
_p0 = unsafe.Pointer(&buf[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall(SYS_GETCWD, uintptr(_p0), uintptr(len(buf)), 0)
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func wait4(pid int, wstatus *_C_int, options int, rusage *Rusage) (wpid int, err error) {
r0, _, e1 := Syscall6(SYS_WAIT4, uintptr(pid), uintptr(unsafe.Pointer(wstatus)), uintptr(options), uintptr(unsafe.Pointer(rusage)), 0, 0)
wpid = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func ptrace(request int, pid int, addr uintptr, data uintptr) (err error) {
_, _, e1 := Syscall6(SYS_PTRACE, uintptr(request), uintptr(pid), uintptr(addr), uintptr(data), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func ptracePtr(request int, pid int, addr uintptr, data unsafe.Pointer) (err error) {
_, _, e1 := Syscall6(SYS_PTRACE, uintptr(request), uintptr(pid), uintptr(addr), uintptr(data), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func reboot(magic1 uint, magic2 uint, cmd int, arg string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(arg)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_REBOOT, uintptr(magic1), uintptr(magic2), uintptr(cmd), uintptr(unsafe.Pointer(_p0)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func mount(source string, target string, fstype string, flags uintptr, data *byte) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(source)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(target)
if err != nil {
return
}
var _p2 *byte
_p2, err = BytePtrFromString(fstype)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_MOUNT, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(unsafe.Pointer(_p2)), uintptr(flags), uintptr(unsafe.Pointer(data)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Acct(path string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_ACCT, uintptr(unsafe.Pointer(_p0)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Adjtimex(buf *Timex) (state int, err error) {
r0, _, e1 := Syscall(SYS_ADJTIMEX, uintptr(unsafe.Pointer(buf)), 0, 0)
state = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Chdir(path string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_CHDIR, uintptr(unsafe.Pointer(_p0)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Chroot(path string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_CHROOT, uintptr(unsafe.Pointer(_p0)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Close(fd int) (err error) {
_, _, e1 := Syscall(SYS_CLOSE, uintptr(fd), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Dup(oldfd int) (fd int, err error) {
r0, _, e1 := Syscall(SYS_DUP, uintptr(oldfd), 0, 0)
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Dup3(oldfd int, newfd int, flags int) (err error) {
_, _, e1 := Syscall(SYS_DUP3, uintptr(oldfd), uintptr(newfd), uintptr(flags))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func EpollCreate1(flag int) (fd int, err error) {
r0, _, e1 := RawSyscall(SYS_EPOLL_CREATE1, uintptr(flag), 0, 0)
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func EpollCtl(epfd int, op int, fd int, event *EpollEvent) (err error) {
_, _, e1 := RawSyscall6(SYS_EPOLL_CTL, uintptr(epfd), uintptr(op), uintptr(fd), uintptr(unsafe.Pointer(event)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fallocate(fd int, mode uint32, off int64, len int64) (err error) {
_, _, e1 := Syscall6(SYS_FALLOCATE, uintptr(fd), uintptr(mode), uintptr(off), uintptr(off>>32), uintptr(len), uintptr(len>>32))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fchdir(fd int) (err error) {
_, _, e1 := Syscall(SYS_FCHDIR, uintptr(fd), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fchmod(fd int, mode uint32) (err error) {
_, _, e1 := Syscall(SYS_FCHMOD, uintptr(fd), uintptr(mode), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fchownat(dirfd int, path string, uid int, gid int, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_FCHOWNAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(uid), uintptr(gid), uintptr(flags), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func fcntl(fd int, cmd int, arg int) (val int, err error) {
r0, _, e1 := Syscall(SYS_FCNTL, uintptr(fd), uintptr(cmd), uintptr(arg))
val = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fdatasync(fd int) (err error) {
_, _, e1 := Syscall(SYS_FDATASYNC, uintptr(fd), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Flock(fd int, how int) (err error) {
_, _, e1 := Syscall(SYS_FLOCK, uintptr(fd), uintptr(how), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fsync(fd int) (err error) {
_, _, e1 := Syscall(SYS_FSYNC, uintptr(fd), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getdents(fd int, buf []byte) (n int, err error) {
var _p0 unsafe.Pointer
if len(buf) > 0 {
_p0 = unsafe.Pointer(&buf[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall(SYS_GETDENTS64, uintptr(fd), uintptr(_p0), uintptr(len(buf)))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getpgid(pid int) (pgid int, err error) {
r0, _, e1 := RawSyscall(SYS_GETPGID, uintptr(pid), 0, 0)
pgid = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getpid() (pid int) {
r0, _ := rawSyscallNoError(SYS_GETPID, 0, 0, 0)
pid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getppid() (ppid int) {
r0, _ := rawSyscallNoError(SYS_GETPPID, 0, 0, 0)
ppid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getpriority(which int, who int) (prio int, err error) {
r0, _, e1 := Syscall(SYS_GETPRIORITY, uintptr(which), uintptr(who), 0)
prio = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getrusage(who int, rusage *Rusage) (err error) {
_, _, e1 := RawSyscall(SYS_GETRUSAGE, uintptr(who), uintptr(unsafe.Pointer(rusage)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Gettid() (tid int) {
r0, _ := rawSyscallNoError(SYS_GETTID, 0, 0, 0)
tid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getxattr(path string, attr string, dest []byte) (sz int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(attr)
if err != nil {
return
}
var _p2 unsafe.Pointer
if len(dest) > 0 {
_p2 = unsafe.Pointer(&dest[0])
} else {
_p2 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_GETXATTR, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(_p2), uintptr(len(dest)), 0, 0)
sz = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func InotifyAddWatch(fd int, pathname string, mask uint32) (watchdesc int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(pathname)
if err != nil {
return
}
r0, _, e1 := Syscall(SYS_INOTIFY_ADD_WATCH, uintptr(fd), uintptr(unsafe.Pointer(_p0)), uintptr(mask))
watchdesc = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func InotifyInit1(flags int) (fd int, err error) {
r0, _, e1 := RawSyscall(SYS_INOTIFY_INIT1, uintptr(flags), 0, 0)
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func InotifyRmWatch(fd int, watchdesc uint32) (success int, err error) {
r0, _, e1 := RawSyscall(SYS_INOTIFY_RM_WATCH, uintptr(fd), uintptr(watchdesc), 0)
success = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Kill(pid int, sig Signal) (err error) {
_, _, e1 := RawSyscall(SYS_KILL, uintptr(pid), uintptr(sig), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Klogctl(typ int, buf []byte) (n int, err error) {
var _p0 unsafe.Pointer
if len(buf) > 0 {
_p0 = unsafe.Pointer(&buf[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall(SYS_SYSLOG, uintptr(typ), uintptr(_p0), uintptr(len(buf)))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Listxattr(path string, dest []byte) (sz int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
var _p1 unsafe.Pointer
if len(dest) > 0 {
_p1 = unsafe.Pointer(&dest[0])
} else {
_p1 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall(SYS_LISTXATTR, uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(dest)))
sz = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Mkdirat(dirfd int, path string, mode uint32) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_MKDIRAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(mode))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Mknodat(dirfd int, path string, mode uint32, dev int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_MKNODAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(dev), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Nanosleep(time *Timespec, leftover *Timespec) (err error) {
_, _, e1 := Syscall(SYS_NANOSLEEP, uintptr(unsafe.Pointer(time)), uintptr(unsafe.Pointer(leftover)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func PivotRoot(newroot string, putold string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(newroot)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(putold)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_PIVOT_ROOT, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func prlimit1(pid int, resource int, newlimit *Rlimit, old *Rlimit) (err error) {
_, _, e1 := RawSyscall6(SYS_PRLIMIT64, uintptr(pid), uintptr(resource), uintptr(unsafe.Pointer(newlimit)), uintptr(unsafe.Pointer(old)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func read(fd int, p []byte) (n int, err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall(SYS_READ, uintptr(fd), uintptr(_p0), uintptr(len(p)))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Removexattr(path string, attr string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(attr)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_REMOVEXATTR, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setdomainname(p []byte) (err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall(SYS_SETDOMAINNAME, uintptr(_p0), uintptr(len(p)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Sethostname(p []byte) (err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall(SYS_SETHOSTNAME, uintptr(_p0), uintptr(len(p)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setpgid(pid int, pgid int) (err error) {
_, _, e1 := RawSyscall(SYS_SETPGID, uintptr(pid), uintptr(pgid), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setsid() (pid int, err error) {
r0, _, e1 := RawSyscall(SYS_SETSID, 0, 0, 0)
pid = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Settimeofday(tv *Timeval) (err error) {
_, _, e1 := RawSyscall(SYS_SETTIMEOFDAY, uintptr(unsafe.Pointer(tv)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setpriority(which int, who int, prio int) (err error) {
_, _, e1 := Syscall(SYS_SETPRIORITY, uintptr(which), uintptr(who), uintptr(prio))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setxattr(path string, attr string, data []byte, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(attr)
if err != nil {
return
}
var _p2 unsafe.Pointer
if len(data) > 0 {
_p2 = unsafe.Pointer(&data[0])
} else {
_p2 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall6(SYS_SETXATTR, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(_p2), uintptr(len(data)), uintptr(flags), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Sync() {
Syscall(SYS_SYNC, 0, 0, 0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Sysinfo(info *Sysinfo_t) (err error) {
_, _, e1 := RawSyscall(SYS_SYSINFO, uintptr(unsafe.Pointer(info)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Tee(rfd int, wfd int, len int, flags int) (n int64, err error) {
r0, r1, e1 := Syscall6(SYS_TEE, uintptr(rfd), uintptr(wfd), uintptr(len), uintptr(flags), 0, 0)
n = int64(int64(r1)<<32 | int64(r0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Tgkill(tgid int, tid int, sig Signal) (err error) {
_, _, e1 := RawSyscall(SYS_TGKILL, uintptr(tgid), uintptr(tid), uintptr(sig))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Times(tms *Tms) (ticks uintptr, err error) {
r0, _, e1 := RawSyscall(SYS_TIMES, uintptr(unsafe.Pointer(tms)), 0, 0)
ticks = uintptr(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Umask(mask int) (oldmask int) {
r0, _ := rawSyscallNoError(SYS_UMASK, uintptr(mask), 0, 0)
oldmask = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Uname(buf *Utsname) (err error) {
_, _, e1 := RawSyscall(SYS_UNAME, uintptr(unsafe.Pointer(buf)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Unmount(target string, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(target)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_UMOUNT2, uintptr(unsafe.Pointer(_p0)), uintptr(flags), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Unshare(flags int) (err error) {
_, _, e1 := Syscall(SYS_UNSHARE, uintptr(flags), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func write(fd int, p []byte) (n int, err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall(SYS_WRITE, uintptr(fd), uintptr(_p0), uintptr(len(p)))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func exitThread(code int) (err error) {
_, _, e1 := Syscall(SYS_EXIT, uintptr(code), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func readlen(fd int, p *byte, np int) (n int, err error) {
r0, _, e1 := Syscall(SYS_READ, uintptr(fd), uintptr(unsafe.Pointer(p)), uintptr(np))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func munmap(addr uintptr, length uintptr) (err error) {
_, _, e1 := Syscall(SYS_MUNMAP, uintptr(addr), uintptr(length), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Madvise(b []byte, advice int) (err error) {
var _p0 unsafe.Pointer
if len(b) > 0 {
_p0 = unsafe.Pointer(&b[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall(SYS_MADVISE, uintptr(_p0), uintptr(len(b)), uintptr(advice))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Mprotect(b []byte, prot int) (err error) {
var _p0 unsafe.Pointer
if len(b) > 0 {
_p0 = unsafe.Pointer(&b[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall(SYS_MPROTECT, uintptr(_p0), uintptr(len(b)), uintptr(prot))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Mlock(b []byte) (err error) {
var _p0 unsafe.Pointer
if len(b) > 0 {
_p0 = unsafe.Pointer(&b[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall(SYS_MLOCK, uintptr(_p0), uintptr(len(b)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Munlock(b []byte) (err error) {
var _p0 unsafe.Pointer
if len(b) > 0 {
_p0 = unsafe.Pointer(&b[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall(SYS_MUNLOCK, uintptr(_p0), uintptr(len(b)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Mlockall(flags int) (err error) {
_, _, e1 := Syscall(SYS_MLOCKALL, uintptr(flags), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Munlockall() (err error) {
_, _, e1 := Syscall(SYS_MUNLOCKALL, 0, 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Dup2(oldfd int, newfd int) (err error) {
_, _, e1 := Syscall(SYS_DUP2, uintptr(oldfd), uintptr(newfd), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fchown(fd int, uid int, gid int) (err error) {
_, _, e1 := Syscall(SYS_FCHOWN, uintptr(fd), uintptr(uid), uintptr(gid))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func fstatat(dirfd int, path string, stat *Stat_t, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_FSTATAT64, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(stat)), uintptr(flags), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Ftruncate(fd int, length int64) (err error) {
_, _, e1 := Syscall6(SYS_FTRUNCATE64, uintptr(fd), 0, uintptr(length), uintptr(length>>32), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getegid() (egid int) {
r0, _ := rawSyscallNoError(SYS_GETEGID, 0, 0, 0)
egid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Geteuid() (euid int) {
r0, _ := rawSyscallNoError(SYS_GETEUID, 0, 0, 0)
euid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getgid() (gid int) {
r0, _ := rawSyscallNoError(SYS_GETGID, 0, 0, 0)
gid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getuid() (uid int) {
r0, _ := rawSyscallNoError(SYS_GETUID, 0, 0, 0)
uid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Lchown(path string, uid int, gid int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_LCHOWN, uintptr(unsafe.Pointer(_p0)), uintptr(uid), uintptr(gid))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Listen(s int, n int) (err error) {
_, _, e1 := Syscall(SYS_LISTEN, uintptr(s), uintptr(n), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Pause() (err error) {
_, _, e1 := Syscall(SYS_PAUSE, 0, 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func pread(fd int, p []byte, offset int64) (n int, err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_PREAD64, uintptr(fd), uintptr(_p0), uintptr(len(p)), 0, uintptr(offset), uintptr(offset>>32))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func pwrite(fd int, p []byte, offset int64) (n int, err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_PWRITE64, uintptr(fd), uintptr(_p0), uintptr(len(p)), 0, uintptr(offset), uintptr(offset>>32))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Renameat(olddirfd int, oldpath string, newdirfd int, newpath string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(oldpath)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(newpath)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_RENAMEAT, uintptr(olddirfd), uintptr(unsafe.Pointer(_p0)), uintptr(newdirfd), uintptr(unsafe.Pointer(_p1)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Select(nfd int, r *FdSet, w *FdSet, e *FdSet, timeout *Timeval) (n int, err error) {
r0, _, e1 := Syscall6(SYS__NEWSELECT, uintptr(nfd), uintptr(unsafe.Pointer(r)), uintptr(unsafe.Pointer(w)), uintptr(unsafe.Pointer(e)), uintptr(unsafe.Pointer(timeout)), 0)
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func sendfile(outfd int, infd int, offset *int64, count int) (written int, err error) {
r0, _, e1 := Syscall6(SYS_SENDFILE64, uintptr(outfd), uintptr(infd), uintptr(unsafe.Pointer(offset)), uintptr(count), 0, 0)
written = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setfsgid(gid int) (err error) {
_, _, e1 := Syscall(SYS_SETFSGID, uintptr(gid), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setfsuid(uid int) (err error) {
_, _, e1 := Syscall(SYS_SETFSUID, uintptr(uid), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Shutdown(fd int, how int) (err error) {
_, _, e1 := Syscall(SYS_SHUTDOWN, uintptr(fd), uintptr(how), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Splice(rfd int, roff *int64, wfd int, woff *int64, len int, flags int) (n int, err error) {
r0, _, e1 := Syscall6(SYS_SPLICE, uintptr(rfd), uintptr(unsafe.Pointer(roff)), uintptr(wfd), uintptr(unsafe.Pointer(woff)), uintptr(len), uintptr(flags))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func SyncFileRange(fd int, off int64, n int64, flags int) (err error) {
_, _, e1 := Syscall9(SYS_SYNC_FILE_RANGE, uintptr(fd), 0, uintptr(off), uintptr(off>>32), uintptr(n), uintptr(n>>32), uintptr(flags), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Truncate(path string, length int64) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_TRUNCATE64, uintptr(unsafe.Pointer(_p0)), 0, uintptr(length), uintptr(length>>32), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Ustat(dev int, ubuf *Ustat_t) (err error) {
_, _, e1 := Syscall(SYS_USTAT, uintptr(dev), uintptr(unsafe.Pointer(ubuf)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (fd int, err error) {
r0, _, e1 := Syscall6(SYS_ACCEPT4, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen)), uintptr(flags), 0, 0)
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func bind(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) {
_, _, e1 := Syscall(SYS_BIND, uintptr(s), uintptr(addr), uintptr(addrlen))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func connect(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) {
_, _, e1 := Syscall(SYS_CONNECT, uintptr(s), uintptr(addr), uintptr(addrlen))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func getgroups(n int, list *_Gid_t) (nn int, err error) {
r0, _, e1 := RawSyscall(SYS_GETGROUPS, uintptr(n), uintptr(unsafe.Pointer(list)), 0)
nn = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func getsockopt(s int, level int, name int, val unsafe.Pointer, vallen *_Socklen) (err error) {
_, _, e1 := Syscall6(SYS_GETSOCKOPT, uintptr(s), uintptr(level), uintptr(name), uintptr(val), uintptr(unsafe.Pointer(vallen)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func setsockopt(s int, level int, name int, val unsafe.Pointer, vallen uintptr) (err error) {
_, _, e1 := Syscall6(SYS_SETSOCKOPT, uintptr(s), uintptr(level), uintptr(name), uintptr(val), uintptr(vallen), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func socket(domain int, typ int, proto int) (fd int, err error) {
r0, _, e1 := RawSyscall(SYS_SOCKET, uintptr(domain), uintptr(typ), uintptr(proto))
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func socketpair(domain int, typ int, proto int, fd *[2]int32) (err error) {
_, _, e1 := RawSyscall6(SYS_SOCKETPAIR, uintptr(domain), uintptr(typ), uintptr(proto), uintptr(unsafe.Pointer(fd)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func getpeername(fd int, rsa *RawSockaddrAny, addrlen *_Socklen) (err error) {
_, _, e1 := RawSyscall(SYS_GETPEERNAME, uintptr(fd), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen)))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func getsockname(fd int, rsa *RawSockaddrAny, addrlen *_Socklen) (err error) {
_, _, e1 := RawSyscall(SYS_GETSOCKNAME, uintptr(fd), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen)))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func recvfrom(fd int, p []byte, flags int, from *RawSockaddrAny, fromlen *_Socklen) (n int, err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_RECVFROM, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(flags), uintptr(unsafe.Pointer(from)), uintptr(unsafe.Pointer(fromlen)))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func sendto(s int, buf []byte, flags int, to unsafe.Pointer, addrlen _Socklen) (err error) {
var _p0 unsafe.Pointer
if len(buf) > 0 {
_p0 = unsafe.Pointer(&buf[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall6(SYS_SENDTO, uintptr(s), uintptr(_p0), uintptr(len(buf)), uintptr(flags), uintptr(to), uintptr(addrlen))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func recvmsg(s int, msg *Msghdr, flags int) (n int, err error) {
r0, _, e1 := Syscall(SYS_RECVMSG, uintptr(s), uintptr(unsafe.Pointer(msg)), uintptr(flags))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func sendmsg(s int, msg *Msghdr, flags int) (n int, err error) {
r0, _, e1 := Syscall(SYS_SENDMSG, uintptr(s), uintptr(unsafe.Pointer(msg)), uintptr(flags))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func InotifyInit() (fd int, err error) {
r0, _, e1 := RawSyscall(SYS_INOTIFY_INIT, 0, 0, 0)
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Ioperm(from int, num int, on int) (err error) {
_, _, e1 := Syscall(SYS_IOPERM, uintptr(from), uintptr(num), uintptr(on))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Iopl(level int) (err error) {
_, _, e1 := Syscall(SYS_IOPL, uintptr(level), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func futimesat(dirfd int, path string, times *[2]Timeval) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_FUTIMESAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(times)))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Gettimeofday(tv *Timeval) (err error) {
_, _, e1 := RawSyscall(SYS_GETTIMEOFDAY, uintptr(unsafe.Pointer(tv)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Time(t *Time_t) (tt Time_t, err error) {
r0, _, e1 := RawSyscall(SYS_TIME, uintptr(unsafe.Pointer(t)), 0, 0)
tt = Time_t(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Utime(path string, buf *Utimbuf) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_UTIME, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(buf)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func utimes(path string, times *[2]Timeval) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_UTIMES, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(times)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Lstat(path string, stat *Stat_t) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_LSTAT64, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(stat)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fstat(fd int, stat *Stat_t) (err error) {
_, _, e1 := Syscall(SYS_FSTAT64, uintptr(fd), uintptr(unsafe.Pointer(stat)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Stat(path string, stat *Stat_t) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_STAT64, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(stat)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func EpollWait(epfd int, events []EpollEvent, msec int) (n int, err error) {
var _p0 unsafe.Pointer
if len(events) > 0 {
_p0 = unsafe.Pointer(&events[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_EPOLL_WAIT, uintptr(epfd), uintptr(_p0), uintptr(len(events)), uintptr(msec), 0, 0)
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func mmap2(addr uintptr, length uintptr, prot int, flags int, fd int, pageOffset uintptr) (xaddr uintptr, err error) {
r0, _, e1 := Syscall6(SYS_MMAP2, uintptr(addr), uintptr(length), uintptr(prot), uintptr(flags), uintptr(fd), uintptr(pageOffset))
xaddr = uintptr(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
} | go | github | https://github.com/golang/go | src/syscall/zsyscall_linux_mipsle.go |
# encoding: utf-8
from south.db import db
from south.v2 import SchemaMigration
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Permission'
db.create_table('permissions_permission', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('namespace', self.gf('django.db.models.fields.CharField')(max_length=64)),
('name', self.gf('django.db.models.fields.CharField')(max_length=64)),
('label', self.gf('django.db.models.fields.CharField')(max_length=96)),
))
db.send_create_signal('permissions', ['Permission'])
# Adding unique constraint on 'Permission', fields ['namespace', 'name']
db.create_unique('permissions_permission', ['namespace', 'name'])
# Adding model 'PermissionHolder'
db.create_table('permissions_permissionholder', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('permission', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['permissions.Permission'])),
('holder_type', self.gf('django.db.models.fields.related.ForeignKey')(related_name='permission_holder', to=orm['contenttypes.ContentType'])),
('holder_id', self.gf('django.db.models.fields.PositiveIntegerField')()),
))
db.send_create_signal('permissions', ['PermissionHolder'])
# Adding model 'Role'
db.create_table('permissions_role', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=64)),
('label', self.gf('django.db.models.fields.CharField')(unique=True, max_length=64)),
))
db.send_create_signal('permissions', ['Role'])
# Adding model 'RoleMember'
db.create_table('permissions_rolemember', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('role', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['permissions.Role'])),
('member_type', self.gf('django.db.models.fields.related.ForeignKey')(related_name='role_member', to=orm['contenttypes.ContentType'])),
('member_id', self.gf('django.db.models.fields.PositiveIntegerField')()),
))
db.send_create_signal('permissions', ['RoleMember'])
def backwards(self, orm):
# Removing unique constraint on 'Permission', fields ['namespace', 'name']
db.delete_unique('permissions_permission', ['namespace', 'name'])
# Deleting model 'Permission'
db.delete_table('permissions_permission')
# Deleting model 'PermissionHolder'
db.delete_table('permissions_permissionholder')
# Deleting model 'Role'
db.delete_table('permissions_role')
# Deleting model 'RoleMember'
db.delete_table('permissions_rolemember')
models = {
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'permissions.permission': {
'Meta': {'ordering': "('namespace', 'label')", 'unique_together': "(('namespace', 'name'),)", 'object_name': 'Permission'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '96'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'namespace': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'permissions.permissionholder': {
'Meta': {'object_name': 'PermissionHolder'},
'holder_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'holder_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'permission_holder'", 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'permission': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['permissions.Permission']"})
},
'permissions.role': {
'Meta': {'ordering': "('label',)", 'object_name': 'Role'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'})
},
'permissions.rolemember': {
'Meta': {'object_name': 'RoleMember'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'member_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'member_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'role_member'", 'to': "orm['contenttypes.ContentType']"}),
'role': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['permissions.Role']"})
}
}
complete_apps = ['permissions'] | unknown | codeparrot/codeparrot-clean |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.