code stringlengths 1 25.8M | language stringclasses 18 values | source stringclasses 4 values | repo stringclasses 78 values | path stringlengths 0 268 |
|---|---|---|---|---|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from django.conf import settings
from django.test import TestCase, override_settings
from unittest import skip
from zerver.lib.avatar import avatar_url
from zerver.lib.bugdown import url_filename
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.test_runner import slow
from zerver.lib.upload import sanitize_name, S3UploadBackend, \
upload_message_image, delete_message_image, LocalUploadBackend
import zerver.lib.upload
from zerver.models import Attachment, Recipient, get_user_profile_by_email, \
get_old_unclaimed_attachments, Message, UserProfile
from zerver.lib.actions import do_delete_old_unclaimed_attachments
import ujson
from six.moves import urllib
from six import text_type
from PIL import Image
from boto.s3.connection import S3Connection
from boto.s3.key import Key
from six.moves import StringIO as _StringIO
import mock
import os
import io
import shutil
import re
import datetime
import requests
import base64
from datetime import timedelta
from django.utils import timezone
from moto import mock_s3
TEST_AVATAR_DIR = os.path.join(os.path.dirname(__file__), 'images')
from typing import Any, Callable, TypeVar
def destroy_uploads():
# type: () -> None
if os.path.exists(settings.LOCAL_UPLOADS_DIR):
shutil.rmtree(settings.LOCAL_UPLOADS_DIR)
class StringIO(_StringIO):
name = '' # https://github.com/python/typeshed/issues/598
class FileUploadTest(ZulipTestCase):
def test_rest_endpoint(self):
# type: () -> None
"""
Tests the /api/v1/user_uploads api endpoint. Here a single file is uploaded
and downloaded using a username and api_key
"""
fp = StringIO("zulip!")
fp.name = "zulip.txt"
# Upload file via API
auth_headers = self.api_auth('hamlet@zulip.com')
result = self.client_post('/api/v1/user_uploads', {'file': fp}, **auth_headers)
json = ujson.loads(result.content)
self.assertIn("uri", json)
uri = json["uri"]
base = '/user_uploads/'
self.assertEquals(base, uri[:len(base)])
# Download file via API
self.client_post('/accounts/logout/')
response = self.client_get(uri, **auth_headers)
data = b"".join(response.streaming_content)
self.assertEquals(b"zulip!", data)
# Files uploaded through the API should be accesible via the web client
self.login("hamlet@zulip.com")
response = self.client_get(uri)
data = b"".join(response.streaming_content)
self.assertEquals(b"zulip!", data)
def test_file_too_big_failure(self):
# type: () -> None
"""
Attempting to upload big files should fail.
"""
self.login("hamlet@zulip.com")
fp = StringIO("bah!")
fp.name = "a.txt"
# Use MAX_FILE_UPLOAD_SIZE of 0, because the next increment
# would be 1MB.
with self.settings(MAX_FILE_UPLOAD_SIZE=0):
result = self.client_post("/json/upload_file", {'f1': fp})
self.assert_json_error(result, 'File Upload is larger than allowed limit')
def test_multiple_upload_failure(self):
# type: () -> None
"""
Attempting to upload two files should fail.
"""
self.login("hamlet@zulip.com")
fp = StringIO("bah!")
fp.name = "a.txt"
fp2 = StringIO("pshaw!")
fp2.name = "b.txt"
result = self.client_post("/json/upload_file", {'f1': fp, 'f2': fp2})
self.assert_json_error(result, "You may only upload one file at a time")
def test_no_file_upload_failure(self):
# type: () -> None
"""
Calling this endpoint with no files should fail.
"""
self.login("hamlet@zulip.com")
result = self.client_post("/json/upload_file")
self.assert_json_error(result, "You must specify a file to upload")
def test_download_non_existent_file(self):
# type: () -> None
self.login("hamlet@zulip.com")
response = self.client_get('/user_uploads/unk/nonexistent_file')
self.assertEquals(response.status_code, 404)
self.assertIn('File not found', str(response.content))
def test_serve_s3_error_handling(self):
# type: () -> None
self.login("hamlet@zulip.com")
use_s3 = lambda: self.settings(LOCAL_UPLOADS_DIR=None)
getting_realm_id = lambda realm_id: mock.patch(
'zerver.views.upload.get_realm_for_filename',
return_value=realm_id
)
# nonexistent_file
with use_s3(), getting_realm_id(None):
response = self.client_get('/user_uploads/unk/nonexistent_file')
self.assertEquals(response.status_code, 404)
self.assertIn('File not found', str(response.content))
# invalid realm of 999999 (for non-zulip.com)
user = get_user_profile_by_email('hamlet@zulip.com')
user.realm.domain = 'example.com'
user.realm.save()
with use_s3(), getting_realm_id(999999):
response = self.client_get('/user_uploads/unk/whatever')
self.assertEquals(response.status_code, 403)
# This test will go through the code path for uploading files onto LOCAL storage
# when zulip is in DEVELOPMENT mode.
def test_file_upload_authed(self):
# type: () -> None
"""
A call to /json/upload_file should return a uri and actually create an
entry in the database. This entry will be marked unclaimed till a message
refers it.
"""
self.login("hamlet@zulip.com")
fp = StringIO("zulip!")
fp.name = "zulip.txt"
result = self.client_post("/json/upload_file", {'file': fp})
self.assert_json_success(result)
json = ujson.loads(result.content)
self.assertIn("uri", json)
uri = json["uri"]
base = '/user_uploads/'
self.assertEquals(base, uri[:len(base)])
# In the future, local file requests will follow the same style as S3
# requests; they will be first authenthicated and redirected
response = self.client_get(uri)
data = b"".join(response.streaming_content)
self.assertEquals(b"zulip!", data)
# check if DB has attachment marked as unclaimed
entry = Attachment.objects.get(file_name='zulip.txt')
self.assertEquals(entry.is_claimed(), False)
self.subscribe_to_stream("hamlet@zulip.com", "Denmark")
body = "First message ...[zulip.txt](http://localhost:9991" + uri + ")"
self.send_message("hamlet@zulip.com", "Denmark", Recipient.STREAM, body, "test")
self.assertIn('title="zulip.txt"', self.get_last_message().rendered_content)
def test_delete_old_unclaimed_attachments(self):
# type: () -> None
# Upload some files and make them older than a weeek
self.login("hamlet@zulip.com")
d1 = StringIO("zulip!")
d1.name = "dummy_1.txt"
result = self.client_post("/json/upload_file", {'file': d1})
json = ujson.loads(result.content)
uri = json["uri"]
d1_path_id = re.sub('/user_uploads/', '', uri)
d2 = StringIO("zulip!")
d2.name = "dummy_2.txt"
result = self.client_post("/json/upload_file", {'file': d2})
json = ujson.loads(result.content)
uri = json["uri"]
d2_path_id = re.sub('/user_uploads/', '', uri)
two_week_ago = timezone.now() - datetime.timedelta(weeks=2)
d1_attachment = Attachment.objects.get(path_id = d1_path_id)
d1_attachment.create_time = two_week_ago
d1_attachment.save()
self.assertEqual(str(d1_attachment), u'<Attachment: dummy_1.txt>')
d2_attachment = Attachment.objects.get(path_id = d2_path_id)
d2_attachment.create_time = two_week_ago
d2_attachment.save()
# Send message refering only dummy_1
self.subscribe_to_stream("hamlet@zulip.com", "Denmark")
body = "Some files here ...[zulip.txt](http://localhost:9991/user_uploads/" + d1_path_id + ")"
self.send_message("hamlet@zulip.com", "Denmark", Recipient.STREAM, body, "test")
# dummy_2 should not exist in database or the uploads folder
do_delete_old_unclaimed_attachments(2)
self.assertTrue(not Attachment.objects.filter(path_id = d2_path_id).exists())
self.assertTrue(not delete_message_image(d2_path_id))
def test_multiple_claim_attachments(self):
# type: () -> None
"""
This test tries to claim the same attachment twice. The messages field in
the Attachment model should have both the messages in its entry.
"""
self.login("hamlet@zulip.com")
d1 = StringIO("zulip!")
d1.name = "dummy_1.txt"
result = self.client_post("/json/upload_file", {'file': d1})
json = ujson.loads(result.content)
uri = json["uri"]
d1_path_id = re.sub('/user_uploads/', '', uri)
self.subscribe_to_stream("hamlet@zulip.com", "Denmark")
body = "First message ...[zulip.txt](http://localhost:9991/user_uploads/" + d1_path_id + ")"
self.send_message("hamlet@zulip.com", "Denmark", Recipient.STREAM, body, "test")
body = "Second message ...[zulip.txt](http://localhost:9991/user_uploads/" + d1_path_id + ")"
self.send_message("hamlet@zulip.com", "Denmark", Recipient.STREAM, body, "test")
self.assertEquals(Attachment.objects.get(path_id=d1_path_id).messages.count(), 2)
def test_check_attachment_reference_update(self):
# type: () -> None
f1 = StringIO("file1")
f1.name = "file1.txt"
f2 = StringIO("file2")
f2.name = "file2.txt"
f3 = StringIO("file3")
f3.name = "file3.txt"
self.login("hamlet@zulip.com")
result = self.client_post("/json/upload_file", {'file': f1})
json = ujson.loads(result.content)
uri = json["uri"]
f1_path_id = re.sub('/user_uploads/', '', uri)
result = self.client_post("/json/upload_file", {'file': f2})
json = ujson.loads(result.content)
uri = json["uri"]
f2_path_id = re.sub('/user_uploads/', '', uri)
self.subscribe_to_stream("hamlet@zulip.com", "test")
body = ("[f1.txt](http://localhost:9991/user_uploads/" + f1_path_id + ")"
"[f2.txt](http://localhost:9991/user_uploads/" + f2_path_id + ")")
msg_id = self.send_message("hamlet@zulip.com", "test", Recipient.STREAM, body, "test")
result = self.client_post("/json/upload_file", {'file': f3})
json = ujson.loads(result.content)
uri = json["uri"]
f3_path_id = re.sub('/user_uploads/', '', uri)
new_body = ("[f3.txt](http://localhost:9991/user_uploads/" + f3_path_id + ")"
"[f2.txt](http://localhost:9991/user_uploads/" + f2_path_id + ")")
result = self.client_post("/json/update_message", {
'message_id': msg_id,
'content': new_body
})
self.assert_json_success(result)
message = Message.objects.get(id=msg_id)
f1_attachment = Attachment.objects.get(path_id=f1_path_id)
f2_attachment = Attachment.objects.get(path_id=f2_path_id)
f3_attachment = Attachment.objects.get(path_id=f3_path_id)
self.assertTrue(message not in f1_attachment.messages.all())
self.assertTrue(message in f2_attachment.messages.all())
self.assertTrue(message in f3_attachment.messages.all())
# Delete all the attachments from the message
new_body = "(deleted)"
result = self.client_post("/json/update_message", {
'message_id': msg_id,
'content': new_body
})
self.assert_json_success(result)
message = Message.objects.get(id=msg_id)
f1_attachment = Attachment.objects.get(path_id=f1_path_id)
f2_attachment = Attachment.objects.get(path_id=f2_path_id)
f3_attachment = Attachment.objects.get(path_id=f3_path_id)
self.assertTrue(message not in f1_attachment.messages.all())
self.assertTrue(message not in f2_attachment.messages.all())
self.assertTrue(message not in f3_attachment.messages.all())
def test_file_name(self):
# type: () -> None
"""
Unicode filenames should be processed correctly.
"""
self.login("hamlet@zulip.com")
for expected in ["Здравейте.txt", "test"]:
fp = StringIO("bah!")
fp.name = urllib.parse.quote(expected)
result = self.client_post("/json/upload_file", {'f1': fp})
content = ujson.loads(result.content)
assert sanitize_name(expected) in content['uri']
def tearDown(self):
# type: () -> None
destroy_uploads()
class AvatarTest(ZulipTestCase):
def test_multiple_upload_failure(self):
# type: () -> None
"""
Attempting to upload two files should fail.
"""
self.login("hamlet@zulip.com")
fp1 = open(os.path.join(TEST_AVATAR_DIR, 'img.png'), 'rb')
fp2 = open(os.path.join(TEST_AVATAR_DIR, 'img.png'), 'rb')
result = self.client_post("/json/set_avatar", {'f1': fp1, 'f2': fp2})
self.assert_json_error(result, "You must upload exactly one avatar.")
def test_no_file_upload_failure(self):
# type: () -> None
"""
Calling this endpoint with no files should fail.
"""
self.login("hamlet@zulip.com")
result = self.client_post("/json/set_avatar")
self.assert_json_error(result, "You must upload exactly one avatar.")
correct_files = [
('img.png', 'png_resized.png'),
('img.jpg', None), # jpeg resizing is platform-dependent
('img.gif', 'gif_resized.png'),
('img.tif', 'tif_resized.png')
]
corrupt_files = ['text.txt', 'corrupt.png', 'corrupt.gif']
def test_get_gravatar_avatar(self):
# type: () -> None
self.login("hamlet@zulip.com")
cordelia = get_user_profile_by_email('cordelia@zulip.com')
cordelia.avatar_source = UserProfile.AVATAR_FROM_GRAVATAR
cordelia.save()
with self.settings(ENABLE_GRAVATAR=True):
response = self.client_get("/avatar/cordelia@zulip.com?foo=bar")
redirect_url = response['Location']
self.assertEqual(redirect_url, avatar_url(cordelia) + '&foo=bar')
with self.settings(ENABLE_GRAVATAR=False):
response = self.client_get("/avatar/cordelia@zulip.com?foo=bar")
redirect_url = response['Location']
self.assertTrue(redirect_url.endswith(avatar_url(cordelia) + '&foo=bar'))
def test_get_user_avatar(self):
# type: () -> None
self.login("hamlet@zulip.com")
cordelia = get_user_profile_by_email('cordelia@zulip.com')
cordelia.avatar_source = UserProfile.AVATAR_FROM_USER
cordelia.save()
response = self.client_get("/avatar/cordelia@zulip.com?foo=bar")
redirect_url = response['Location']
self.assertTrue(redirect_url.endswith(avatar_url(cordelia) + '&foo=bar'))
def test_non_valid_user_avatar(self):
# type: () -> None
# It's debatable whether we should generate avatars for non-users,
# but this test just validates the current code's behavior.
self.login("hamlet@zulip.com")
response = self.client_get("/avatar/nonexistent_user@zulip.com?foo=bar")
redirect_url = response['Location']
actual_url = 'https://secure.gravatar.com/avatar/444258b521f152129eb0c162996e572d?d=identicon&foo=bar'
self.assertEqual(redirect_url, actual_url)
def test_valid_avatars(self):
# type: () -> None
"""
A call to /json/set_avatar with a valid file should return a url and actually create an avatar.
"""
for fname, rfname in self.correct_files:
# TODO: use self.subTest once we're exclusively on python 3 by uncommenting the line below.
# with self.subTest(fname=fname):
self.login("hamlet@zulip.com")
fp = open(os.path.join(TEST_AVATAR_DIR, fname), 'rb')
result = self.client_post("/json/set_avatar", {'file': fp})
self.assert_json_success(result)
json = ujson.loads(result.content)
self.assertIn("avatar_url", json)
url = json["avatar_url"]
base = '/user_avatars/'
self.assertEquals(base, url[:len(base)])
if rfname is not None:
response = self.client_get(url)
data = b"".join(response.streaming_content)
self.assertEquals(Image.open(io.BytesIO(data)).size, (100, 100))
# Verify that the medium-size avatar was created
user_profile = get_user_profile_by_email('hamlet@zulip.com')
medium_avatar_url = avatar_url(user_profile, medium=True)
medium_avatar_disk_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars",
medium_avatar_url.split("/")[-1].split("?")[0])
self.assertTrue(os.path.exists(medium_avatar_disk_path))
# Confirm that ensure_medium_avatar_url works to recreate
# medium size avatars from the original if needed
os.remove(medium_avatar_disk_path)
self.assertFalse(os.path.exists(medium_avatar_disk_path))
zerver.lib.upload.upload_backend.ensure_medium_avatar_image(user_profile.email)
self.assertTrue(os.path.exists(medium_avatar_disk_path))
def test_invalid_avatars(self):
# type: () -> None
"""
A call to /json/set_avatar with an invalid file should fail.
"""
for fname in self.corrupt_files:
# with self.subTest(fname=fname):
self.login("hamlet@zulip.com")
fp = open(os.path.join(TEST_AVATAR_DIR, fname), 'rb')
result = self.client_post("/json/set_avatar", {'file': fp})
self.assert_json_error(result, "Could not decode avatar image; did you upload an image file?")
def tearDown(self):
# type: () -> None
destroy_uploads()
class LocalStorageTest(ZulipTestCase):
def test_file_upload_local(self):
# type: () -> None
sender_email = "hamlet@zulip.com"
user_profile = get_user_profile_by_email(sender_email)
uri = upload_message_image(u'dummy.txt', u'text/plain', b'zulip!', user_profile)
base = '/user_uploads/'
self.assertEquals(base, uri[:len(base)])
path_id = re.sub('/user_uploads/', '', uri)
file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, 'files', path_id)
self.assertTrue(os.path.isfile(file_path))
def test_delete_message_image_local(self):
# type: () -> None
self.login("hamlet@zulip.com")
fp = StringIO("zulip!")
fp.name = "zulip.txt"
result = self.client_post("/json/upload_file", {'file': fp})
json = ujson.loads(result.content)
uri = json["uri"]
path_id = re.sub('/user_uploads/', '', uri)
self.assertTrue(delete_message_image(path_id))
def tearDown(self):
# type: () -> None
destroy_uploads()
FuncT = TypeVar('FuncT', bound=Callable[..., None])
def use_s3_backend(method):
# type: (FuncT) -> FuncT
@mock_s3
@override_settings(LOCAL_UPLOADS_DIR=None)
def new_method(*args, **kwargs):
# type: (*Any, **Any) -> Any
zerver.lib.upload.upload_backend = S3UploadBackend()
try:
return method(*args, **kwargs)
finally:
zerver.lib.upload.upload_backend = LocalUploadBackend()
return new_method
class S3Test(ZulipTestCase):
@use_s3_backend
def test_file_upload_s3(self):
# type: () -> None
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
bucket = conn.create_bucket(settings.S3_AUTH_UPLOADS_BUCKET)
sender_email = "hamlet@zulip.com"
user_profile = get_user_profile_by_email(sender_email)
uri = upload_message_image(u'dummy.txt', u'text/plain', b'zulip!', user_profile)
base = '/user_uploads/'
self.assertEquals(base, uri[:len(base)])
path_id = re.sub('/user_uploads/', '', uri)
self.assertEquals(b"zulip!", bucket.get_key(path_id).get_contents_as_string())
self.subscribe_to_stream("hamlet@zulip.com", "Denmark")
body = "First message ...[zulip.txt](http://localhost:9991" + uri + ")"
self.send_message("hamlet@zulip.com", "Denmark", Recipient.STREAM, body, "test")
self.assertIn('title="dummy.txt"', self.get_last_message().rendered_content)
@use_s3_backend
def test_message_image_delete_s3(self):
# type: () -> None
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
conn.create_bucket(settings.S3_AUTH_UPLOADS_BUCKET)
sender_email = "hamlet@zulip.com"
user_profile = get_user_profile_by_email(sender_email)
uri = upload_message_image(u'dummy.txt', u'text/plain', b'zulip!', user_profile)
path_id = re.sub('/user_uploads/', '', uri)
self.assertTrue(delete_message_image(path_id))
@use_s3_backend
def test_file_upload_authed(self):
# type: () -> None
"""
A call to /json/upload_file should return a uri and actually create an object.
"""
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
conn.create_bucket(settings.S3_AUTH_UPLOADS_BUCKET)
self.login("hamlet@zulip.com")
fp = StringIO("zulip!")
fp.name = "zulip.txt"
result = self.client_post("/json/upload_file", {'file': fp})
self.assert_json_success(result)
json = ujson.loads(result.content)
self.assertIn("uri", json)
uri = json["uri"]
base = '/user_uploads/'
self.assertEquals(base, uri[:len(base)])
response = self.client_get(uri)
redirect_url = response['Location']
self.assertEquals(b"zulip!", urllib.request.urlopen(redirect_url).read().strip()) # type: ignore # six.moves.urllib.request.urlopen is not defined in typeshed
self.subscribe_to_stream("hamlet@zulip.com", "Denmark")
body = "First message ...[zulip.txt](http://localhost:9991" + uri + ")"
self.send_message("hamlet@zulip.com", "Denmark", Recipient.STREAM, body, "test")
self.assertIn('title="zulip.txt"', self.get_last_message().rendered_content)
class UploadTitleTests(TestCase):
def test_upload_titles(self):
# type: () -> None
self.assertEqual(url_filename("http://localhost:9991/user_uploads/1/LUeQZUG5jxkagzVzp1Ox_amr/dummy.txt"), "dummy.txt")
self.assertEqual(url_filename("http://localhost:9991/user_uploads/1/94/SzGYe0RFT-tEcOhQ6n-ZblFZ/zulip.txt"), "zulip.txt")
self.assertEqual(url_filename("https://zulip.com/user_uploads/4142/LUeQZUG5jxkagzVzp1Ox_amr/pasted_image.png"), "pasted_image.png")
self.assertEqual(url_filename("https://zulipchat.com/integrations"), "https://zulipchat.com/integrations")
self.assertEqual(url_filename("https://example.com"), "https://example.com")
class SanitizeNameTests(TestCase):
def test_file_name(self):
# type: () -> None
self.assertEquals(sanitize_name(u'test.txt'), u'test.txt')
self.assertEquals(sanitize_name(u'.hidden'), u'.hidden')
self.assertEquals(sanitize_name(u'.hidden.txt'), u'.hidden.txt')
self.assertEquals(sanitize_name(u'tarball.tar.gz'), u'tarball.tar.gz')
self.assertEquals(sanitize_name(u'.hidden_tarball.tar.gz'), u'.hidden_tarball.tar.gz')
self.assertEquals(sanitize_name(u'Testing{}*&*#().ta&&%$##&&r.gz'), u'Testing.tar.gz')
self.assertEquals(sanitize_name(u'*testingfile?*.txt'), u'testingfile.txt')
self.assertEquals(sanitize_name(u'snowman☃.txt'), u'snowman.txt')
self.assertEquals(sanitize_name(u'테스트.txt'), u'테스트.txt')
self.assertEquals(sanitize_name(u'~/."\`\?*"u0`000ssh/test.t**{}ar.gz'), u'.u0000sshtest.tar.gz') | unknown | codeparrot/codeparrot-clean | ||
import Foundation
import NIOHTTP1
// Comments on these properties are copied from the mozilla doc URL shown below.
extension HTTPHeaders {
/// Represents the HTTP `Cache-Control` header.
/// - See Also:
/// [Cache-Control docs](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Cache-Control)
public struct CacheControl: Sendable {
/// The max-stale option can be present with no value, or be present with a number of seconds. By using
/// a struct you can check the nullability of the `maxStale` variable as well as then check the nullability
/// of the `seconds` to differentiate.
public struct MaxStale: Sendable {
/// The upper limit of staleness the client will accept.
public var seconds: Int?
}
/// Indicates that once a resource becomes stale, caches must not use their stale copy without
/// successful [validation](https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching#Cache_validation) on the origin server.
public var mustRevalidate: Bool
/// Caches must check with the origin server for
/// [validation](https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching#Cache_validation) before using the cached copy.
public var noCache: Bool
/// The cache **should not store anything** about the client request or server response.
public var noStore: Bool
/// No transformations or conversions should be made to the resource. The Content-Encoding, Content-Range, Content-Type headers must not be modified
/// by a proxy. A non-transparent proxy or browser feature such as
/// [Google's Light Mode](https://support.google.com/webmasters/answer/6211428?hl=en) might, for example, convert between image
/// formats in order to save cache space or to reduce the amount of traffic on a slow link. The `no-transform` directive disallows this.
public var noTransform: Bool
/// The response may be cached by any cache, even if the response is normally non-cacheable
public var isPublic: Bool
/// The response is for a single user and **must not** be stored by a shared cache. A private cache (like the user's browser cache) may store the response.
public var isPrivate: Bool
/// Like `must-revalidate`, but only for shared caches (e.g., proxies). Ignored by private caches.
public var proxyRevalidate: Bool
/// Indicates to not retrieve new data. This being the case, the server wishes the client to obtain a response only once and then cache. From this moment the
/// client should keep releasing a cached copy and avoid contacting the origin-server to see if a newer copy exists.
public var onlyIfCached: Bool
/// Indicates that the response body **will not change** over time.
///
/// The resource, if *unexpired*, is unchanged on the server and therefore the client should
/// not send a conditional revalidation for it (e.g. `If-None-Match` or `If-Modified-Since`) to check for updates, even when the user explicitly refreshes
/// the page. Clients that aren't aware of this extension must ignore them as per the HTTP specification. In Firefox, immutable is only honored on https:// transactions.
/// For more information, see also this [blog post](https://bitsup.blogspot.de/2016/05/cache-control-immutable.html).
public var immutable: Bool
/// The maximum amount of time a resource is considered fresh. Unlike the`Expires` header, this directive is relative to the time of the request.
public var maxAge: Int?
/// Overrides max-age or the Expires header, but only for shared caches (e.g., proxies). Ignored by private caches.
public var sMaxAge: Int?
/// Indicates the client will accept a stale response. An optional value in seconds indicates the upper limit of staleness the client will accept.
public var maxStale: MaxStale?
/// Indicates the client wants a response that will still be fresh for at least the specified number of seconds.
public var minFresh: Int?
/// Indicates the client will accept a stale response, while asynchronously checking in the background for a fresh one. The value indicates how long the client will accept a stale response.
public var staleWhileRevalidate: Int?
/// Indicates the client will accept a stale response if the check for a fresh one fails. The value indicates how many *seconds* long the client will accept the stale response after the initial expiration.
public var staleIfError: Int?
/// Creates a new `CacheControl`.
public init(
mustRevalidated: Bool = false,
noCache: Bool = false,
noStore: Bool = false,
noTransform: Bool = false,
isPublic: Bool = false,
isPrivate: Bool = false,
proxyRevalidate: Bool = false,
onlyIfCached: Bool = false,
immutable: Bool = false,
maxAge: Int? = nil,
sMaxAge: Int? = nil,
maxStale: MaxStale? = nil,
minFresh: Int? = nil,
staleWhileRevalidate: Int? = nil,
staleIfError: Int? = nil
) {
self.mustRevalidate = mustRevalidated
self.noCache = noCache
self.noStore = noStore
self.noTransform = noTransform
self.isPublic = isPublic
self.isPrivate = isPrivate
self.proxyRevalidate = proxyRevalidate
self.onlyIfCached = onlyIfCached
self.immutable = immutable
self.maxAge = maxAge
self.sMaxAge = sMaxAge
self.maxStale = maxStale
self.minFresh = minFresh
self.staleWhileRevalidate = staleWhileRevalidate
self.staleIfError = staleIfError
}
public static func parse(_ value: String) -> CacheControl? {
var set = CharacterSet.whitespacesAndNewlines
set.insert(",")
var foundSomething = false
var cache = CacheControl()
value
.replacingOccurrences(of: " ", with: "")
.replacingOccurrences(of: "\t", with: "")
.lowercased()
.split(separator: ",")
.forEach {
let str = String($0)
if let keyPath = Self.exactMatch[str] {
cache[keyPath: keyPath] = true
foundSomething = true
return
}
if value == "max-stale" {
cache.maxStale = .init()
foundSomething = true
return
}
let parts = str.components(separatedBy: "=")
guard parts.count == 2, let seconds = Int(parts[1]), seconds >= 0 else {
return
}
if parts[0] == "max-stale" {
cache.maxStale = .init(seconds: seconds)
foundSomething = true
return
}
guard let keyPath = Self.prefix[parts[0]] else {
return
}
cache[keyPath: keyPath] = seconds
foundSomething = true
}
return foundSomething ? cache : nil
}
/// Generates the header string for this instance.
public func serialize() -> String {
var options = Self.exactMatch
.filter { self[keyPath: $0.value] == true }
.map { $0.key }
var optionsWithSeconds = Self.prefix
.filter { self[keyPath: $0.value] != nil }
.map { "\($0.key)=\(self[keyPath: $0.value]!)" }
if let maxStale = self.maxStale {
if let seconds = maxStale.seconds {
optionsWithSeconds.append("max-stale=\(seconds)")
} else {
options.append("max-stale")
}
}
return (options + optionsWithSeconds).joined(separator: ", ")
}
private static let exactMatch: [String: WritableKeyPath<Self, Bool>] = [
"immutable": \.immutable,
"must-revalidate": \.mustRevalidate,
"no-cache": \.noCache,
"no-store": \.noStore,
"no-transform": \.noTransform,
"public": \.isPublic,
"private": \.isPrivate,
"proxy-revalidate": \.proxyRevalidate,
"only-if-cached": \.onlyIfCached
]
private static let prefix: [String: WritableKeyPath<Self, Int?>] = [
"max-age": \.maxAge,
"s-maxage": \.sMaxAge,
"min-fresh": \.minFresh,
"stale-while-revalidate": \.staleWhileRevalidate,
"stale-if-error": \.staleIfError
]
}
/// Gets the value of the `Cache-Control` header, if present.
public var cacheControl: CacheControl? {
get { self.first(name: .cacheControl).flatMap(CacheControl.parse) }
set {
if let new = newValue?.serialize() {
self.replaceOrAdd(name: .cacheControl, value: new)
} else {
self.remove(name: .expires)
}
}
}
}
extension WritableKeyPath: @retroactive @unchecked Sendable {} | swift | github | https://github.com/vapor/vapor | Sources/Vapor/HTTP/Headers/HTTPHeaderCacheControl.swift |
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2007, 2008, 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
WebJournal input parameters washing related functions
"""
import time
import re
from invenio.webjournal_config import \
InvenioWebJournalIssueNumberBadlyFormedError, \
InvenioWebJournalNoArticleNumberError, \
InvenioWebJournalArchiveDateWronglyFormedError, \
InvenioWebJournalNoPopupRecordError, \
InvenioWebJournalNoCategoryError
from invenio.webjournal_utils import \
get_current_issue, \
guess_journal_name, \
get_journal_categories, \
get_journal_nb_issues_per_year
from invenio.config import CFG_SITE_LANG
# precompiled patterns for the parameters
issue_number_pattern = re.compile("^\d{1,3}/\d{4}$")
def wash_journal_language(ln):
"""
Washes the language parameter. If there is a language, return this,
otherwise return CFG_SITE_LANG constant
"""
if ln == "":
return CFG_SITE_LANG
else:
return ln
def wash_journal_name(ln, journal_name, guess=True):
"""
Washes the journal name parameter. In case of non-empty string,
returns it, otherwise redirects to a guessing function.
If 'guess' is True the function tries to fix the capitalization of
the journal name.
"""
if guess or not journal_name:
return guess_journal_name(ln, journal_name)
else:
return journal_name
def wash_issue_number(ln, journal_name, issue_number):
"""
Washes an issue number to fit the pattern ww/YYYY, e.g. 50/2007
w/YYYY is also accepted and transformed to 0w/YYYY, e.g. 2/2007 -> 02/2007
If no issue number is found, tries to get the current issue
"""
if issue_number == "":
return get_current_issue(ln, journal_name)
else:
issue_number_match = issue_number_pattern.match(issue_number)
if issue_number_match:
issue_number = issue_number_match.group()
number, year = issue_number.split('/')
number_issues_per_year = get_journal_nb_issues_per_year(journal_name)
precision = len(str(number_issues_per_year))
return ("%0" + str(precision) + "i/%s") % (int(number), year)
else:
raise InvenioWebJournalIssueNumberBadlyFormedError(ln,
issue_number)
def wash_category(ln, category, journal_name, issue):
"""
Washes a category name.
"""
categories = get_journal_categories(journal_name, issue=None)
if category in categories:
return category
elif category == "" and len(categories) > 0:
return categories[0]
else:
raise InvenioWebJournalNoCategoryError(ln,
category,
categories)
def wash_article_number(ln, number, journal_name):
"""
Washes an article number. First checks if it is non-empty, then if it is
convertable to int. If all passes, returns the number, else throws
exception.
"""
if number == "":
raise InvenioWebJournalNoArticleNumberError(ln, journal_name)
try:
int(number)
except:
raise InvenioWebJournalNoArticleNumberError(ln, journal_name)
return number
def wash_popup_record(ln, record, journal_name):
"""
"""
if record == "":
raise InvenioWebJournalNoPopupRecordError(ln, journal_name,
"no recid")
try:
int(record)
except:
raise InvenioWebJournalNoPopupRecordError(ln, journal_name,
record)
return record
def wash_archive_date(ln, journal_name, archive_date):
"""
Washes an archive date to the form dd/mm/yyyy or empty.
"""
if archive_date == "":
return ""
try:
time.strptime(archive_date, "%d/%m/%Y")
except:
raise InvenioWebJournalArchiveDateWronglyFormedError(ln,
archive_date)
return archive_date | unknown | codeparrot/codeparrot-clean | ||
import {
codeFixAll,
createCodeFixAction,
registerCodeFix,
} from "../_namespaces/ts.codefix.js";
import {
AsExpression,
Diagnostics,
factory,
findAncestor,
getTokenAtPosition,
isAsExpression,
isInJSFile,
isTypeAssertionExpression,
SourceFile,
SyntaxKind,
textChanges,
TypeAssertion,
} from "../_namespaces/ts.js";
const fixId = "addConvertToUnknownForNonOverlappingTypes";
const errorCodes = [Diagnostics.Conversion_of_type_0_to_type_1_may_be_a_mistake_because_neither_type_sufficiently_overlaps_with_the_other_If_this_was_intentional_convert_the_expression_to_unknown_first.code];
registerCodeFix({
errorCodes,
getCodeActions: function getCodeActionsToAddConvertToUnknownForNonOverlappingTypes(context) {
const assertion = getAssertion(context.sourceFile, context.span.start);
if (assertion === undefined) return undefined;
const changes = textChanges.ChangeTracker.with(context, t => makeChange(t, context.sourceFile, assertion));
return [createCodeFixAction(fixId, changes, Diagnostics.Add_unknown_conversion_for_non_overlapping_types, fixId, Diagnostics.Add_unknown_to_all_conversions_of_non_overlapping_types)];
},
fixIds: [fixId],
getAllCodeActions: context =>
codeFixAll(context, errorCodes, (changes, diag) => {
const assertion = getAssertion(diag.file, diag.start);
if (assertion) {
makeChange(changes, diag.file, assertion);
}
}),
});
function makeChange(changeTracker: textChanges.ChangeTracker, sourceFile: SourceFile, assertion: AsExpression | TypeAssertion) {
const replacement = isAsExpression(assertion)
? factory.createAsExpression(assertion.expression, factory.createKeywordTypeNode(SyntaxKind.UnknownKeyword))
: factory.createTypeAssertion(factory.createKeywordTypeNode(SyntaxKind.UnknownKeyword), assertion.expression);
changeTracker.replaceNode(sourceFile, assertion.expression, replacement);
}
function getAssertion(sourceFile: SourceFile, pos: number): AsExpression | TypeAssertion | undefined {
if (isInJSFile(sourceFile)) return undefined;
return findAncestor(getTokenAtPosition(sourceFile, pos), (n): n is AsExpression | TypeAssertion => isAsExpression(n) || isTypeAssertionExpression(n));
} | typescript | github | https://github.com/microsoft/TypeScript | src/services/codefixes/addConvertToUnknownForNonOverlappingTypes.ts |
from datetime import (
date,
time,
timedelta,
)
import pickle
import numpy as np
import pytest
from pandas._libs.missing import NA
from pandas.core.dtypes.common import is_scalar
import pandas as pd
import pandas._testing as tm
def test_singleton():
assert NA is NA
new_NA = type(NA)()
assert new_NA is NA
def test_repr():
assert repr(NA) == "<NA>"
assert str(NA) == "<NA>"
def test_format():
# GH-34740
assert format(NA) == "<NA>"
assert format(NA, ">10") == " <NA>"
assert format(NA, "xxx") == "<NA>" # NA is flexible, accept any format spec
assert f"{NA}" == "<NA>"
assert f"{NA:>10}" == " <NA>"
assert f"{NA:xxx}" == "<NA>"
def test_truthiness():
msg = "boolean value of NA is ambiguous"
with pytest.raises(TypeError, match=msg):
bool(NA)
with pytest.raises(TypeError, match=msg):
not NA
def test_hashable():
assert hash(NA) == hash(NA)
d = {NA: "test"}
assert d[NA] == "test"
@pytest.mark.parametrize(
"other", [NA, 1, 1.0, "a", b"a", np.int64(1), np.nan], ids=repr
)
def test_arithmetic_ops(all_arithmetic_functions, other):
op = all_arithmetic_functions
if op.__name__ in ("pow", "rpow", "rmod") and isinstance(other, (str, bytes)):
pytest.skip(reason=f"{op.__name__} with NA and {other} not defined.")
if op.__name__ in ("divmod", "rdivmod"):
assert op(NA, other) is (NA, NA)
else:
if op.__name__ == "rpow":
# avoid special case
other += 1
assert op(NA, other) is NA
@pytest.mark.parametrize(
"other",
[
NA,
1,
1.0,
"a",
b"a",
np.int64(1),
np.nan,
np.bool_(True),
time(0),
date(1, 2, 3),
timedelta(1),
pd.NaT,
],
)
def test_comparison_ops(comparison_op, other):
assert comparison_op(NA, other) is NA
assert comparison_op(other, NA) is NA
@pytest.mark.parametrize(
"value",
[
0,
0.0,
-0,
-0.0,
False,
np.bool_(False),
np.int_(0),
np.float64(0),
np.int_(-0),
np.float64(-0),
],
)
@pytest.mark.parametrize("asarray", [True, False])
def test_pow_special(value, asarray):
if asarray:
value = np.array([value])
result = NA**value
if asarray:
result = result[0]
else:
# this assertion isn't possible for ndarray.
assert isinstance(result, type(value))
assert result == 1
@pytest.mark.parametrize(
"value", [1, 1.0, True, np.bool_(True), np.int_(1), np.float64(1)]
)
@pytest.mark.parametrize("asarray", [True, False])
def test_rpow_special(value, asarray):
if asarray:
value = np.array([value])
result = value**NA
if asarray:
result = result[0]
elif not isinstance(value, (np.float64, np.bool_, np.int_)):
# this assertion isn't possible with asarray=True
assert isinstance(result, type(value))
assert result == value
@pytest.mark.parametrize("value", [-1, -1.0, np.int_(-1), np.float64(-1)])
@pytest.mark.parametrize("asarray", [True, False])
def test_rpow_minus_one(value, asarray):
if asarray:
value = np.array([value])
result = value**NA
if asarray:
result = result[0]
assert pd.isna(result)
def test_unary_ops():
assert +NA is NA
assert -NA is NA
assert abs(NA) is NA
assert ~NA is NA
def test_logical_and():
assert NA & True is NA
assert True & NA is NA
assert NA & False is False
assert False & NA is False
assert NA & NA is NA
# GH#58427
assert NA & np.bool_(True) is NA
assert np.bool_(True) & NA is NA
assert NA & np.bool_(False) is False
assert np.bool_(False) & NA is False
msg = "unsupported operand type"
with pytest.raises(TypeError, match=msg):
NA & 5
def test_logical_or():
assert NA | True is True
assert True | NA is True
assert NA | False is NA
assert False | NA is NA
assert NA | NA is NA
# GH#58427
assert NA | np.bool_(True) is True
assert np.bool_(True) | NA is True
assert NA | np.bool_(False) is NA
assert np.bool_(False) | NA is NA
msg = "unsupported operand type"
with pytest.raises(TypeError, match=msg):
NA | 5
def test_logical_xor():
assert NA ^ True is NA
assert True ^ NA is NA
assert NA ^ False is NA
assert False ^ NA is NA
assert NA ^ NA is NA
# GH#58427
assert NA ^ np.bool_(True) is NA
assert np.bool_(True) ^ NA is NA
assert NA ^ np.bool_(False) is NA
assert np.bool_(False) ^ NA is NA
msg = "unsupported operand type"
with pytest.raises(TypeError, match=msg):
NA ^ 5
def test_logical_not():
assert ~NA is NA
@pytest.mark.parametrize("shape", [(3,), (3, 3), (1, 2, 3)])
def test_arithmetic_ndarray(shape, all_arithmetic_functions):
op = all_arithmetic_functions
a = np.zeros(shape)
if op.__name__ == "pow":
a += 5
result = op(NA, a)
expected = np.full(a.shape, NA, dtype=object)
tm.assert_numpy_array_equal(result, expected)
def test_is_scalar():
assert is_scalar(NA) is True
def test_isna():
assert pd.isna(NA) is True
assert pd.notna(NA) is False
def test_series_isna():
s = pd.Series([1, NA], dtype=object)
expected = pd.Series([False, True])
tm.assert_series_equal(s.isna(), expected)
def test_ufunc():
assert np.log(NA) is NA
assert np.add(NA, 1) is NA
result = np.divmod(NA, 1)
assert result[0] is NA and result[1] is NA
result = np.frexp(NA)
assert result[0] is NA and result[1] is NA
def test_ufunc_raises():
msg = "ufunc method 'at'"
with pytest.raises(ValueError, match=msg):
np.log.at(NA, 0)
def test_binary_input_not_dunder():
a = np.array([1, 2, 3])
expected = np.array([NA, NA, NA], dtype=object)
result = np.logaddexp(a, NA)
tm.assert_numpy_array_equal(result, expected)
result = np.logaddexp(NA, a)
tm.assert_numpy_array_equal(result, expected)
# all NA, multiple inputs
assert np.logaddexp(NA, NA) is NA
result = np.modf(NA, NA)
assert len(result) == 2
assert all(x is NA for x in result)
def test_divmod_ufunc():
# binary in, binary out.
a = np.array([1, 2, 3])
expected = np.array([NA, NA, NA], dtype=object)
result = np.divmod(a, NA)
assert isinstance(result, tuple)
for arr in result:
tm.assert_numpy_array_equal(arr, expected)
tm.assert_numpy_array_equal(arr, expected)
result = np.divmod(NA, a)
for arr in result:
tm.assert_numpy_array_equal(arr, expected)
tm.assert_numpy_array_equal(arr, expected)
def test_integer_hash_collision_dict():
# GH 30013
result = {NA: "foo", hash(NA): "bar"}
assert result[NA] == "foo"
assert result[hash(NA)] == "bar"
def test_integer_hash_collision_set():
# GH 30013
result = {NA, hash(NA)}
assert len(result) == 2
assert NA in result
assert hash(NA) in result
def test_pickle_roundtrip():
# https://github.com/pandas-dev/pandas/issues/31847
result = pickle.loads(pickle.dumps(NA))
assert result is NA
def test_pickle_roundtrip_pandas(temp_file):
result = tm.round_trip_pickle(NA, temp_file)
assert result is NA
@pytest.mark.parametrize(
"values, dtype", [([1, 2, NA], "Int64"), (["A", "B", NA], "string")]
)
@pytest.mark.parametrize("as_frame", [True, False])
def test_pickle_roundtrip_containers(as_frame, values, dtype, temp_file):
s = pd.Series(pd.array(values, dtype=dtype))
if as_frame:
s = s.to_frame(name="A")
result = tm.round_trip_pickle(s, temp_file)
tm.assert_equal(result, s) | python | github | https://github.com/pandas-dev/pandas | pandas/tests/scalar/test_na_scalar.py |
#
# Copyright (c) 2008--2010 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
#
# Database types we support for out variables
#
# Data types
class DatabaseDataType:
type_name = None
def __init__(self, value=None, size=None):
self.size = size or 1
self.set_value(value)
def get_value(self):
return self.value
def set_value(self, value):
self.value = value
def __str__(self):
return self.type_name
class NUMBER(DatabaseDataType):
type_name = "NUMBER"
class STRING(DatabaseDataType):
type_name = "STRING"
def __init__(self, value=None, size=None):
DatabaseDataType.__init__(self, value=value, size=size)
if not size:
self.size = 4000
class BINARY(DatabaseDataType):
type_name = "BINARY"
class LONG_BINARY(DatabaseDataType):
type_name = "LONG_BINARY"
# XXX More data types to be added as we find need for them | unknown | codeparrot/codeparrot-clean | ||
########################################################################
#
# File Name: ParsedStep.py
#
#
"""
A Parsed token that represents a step on the result tree.
WWW: http://4suite.org/XPATH e-mail: support@4suite.org
Copyright (c) 2000-2001 Fourthought Inc, USA. All Rights Reserved.
See http://4suite.org/COPYRIGHT for license and copyright information
"""
from xml.dom import Node
from xml.xpath import Util
from xml.xpath import NamespaceNode
import sys
class ParsedStep:
def __init__(self, axis, nodeTest, predicates=None):
self._axis = axis
self._nodeTest = nodeTest
self._predicates = predicates
return
def evaluate(self, context):
"""
Select a set of nodes from the axis, then filter through the node
test and the predicates.
"""
(node_set, reverse) = self._axis.select(context, self._nodeTest.match)
if self._predicates and len(node_set):
node_set = self._predicates.filter(node_set, context, reverse)
return node_set
select = evaluate
def pprint(self, indent=''):
print indent + str(self)
self._axis.pprint(indent + ' ')
self._nodeTest.pprint(indent + ' ')
self._predicates and self._predicates.pprint(indent + ' ')
def __str__(self):
return '<Step at %x: %s>' % (id(self), repr(self))
def __repr__(self):
result = repr(self._axis) + '::' + repr(self._nodeTest)
if self._predicates:
result = result + repr(self._predicates)
return result
class ParsedAbbreviatedStep:
def __init__(self, parent):
self.parent = parent
def evaluate(self, context):
if self.parent:
if context.node.nodeType == Node.ATTRIBUTE_NODE:
return [context.node.ownerElement]
return context.node.parentNode and [context.node.parentNode] or []
return [context.node]
select = evaluate
def pprint(self, indent=''):
print indent + str(self)
def __str__(self):
return '<AbbreviatedStep at %x: %s>' % (id(self), repr(self))
def __repr__(self):
return self.parent and '..' or '.'
# From the XPath 2.0 Working Draft
# Used by XPointer
class ParsedNodeSetFunction:
def __init__(self, function, predicates=None):
self._function = function
self._predicates = predicates
return
def evaluate(self, context):
"""
Select a set of nodes from the node-set function then filter
through the predicates.
"""
node_set = self._function.evaluate(context)
if type(node_set) != type([]):
raise SyntaxError('%s does not evaluate to a node-set' %
repr(self._function))
if self._predicates and len(node_set):
node_set = self._predicates.filter(node_set, context, reverse)
return node_set
select = evaluate
def pprint(self, indent=''):
print indent + str(self)
self._function.pprint(indent + ' ')
self._predicates and self._predicates.pprint(indent + ' ')
def __str__(self):
return '<Step at %x: %s>' % (id(self), repr(self))
def __repr__(self):
result = repr(self._function)
if self._predicates:
result = result + repr(self._predicates)
return result | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
"""
***************************************************************************
doGrid.py
---------------------
Date : June 2010
Copyright : (C) 2010 by Giuseppe Sucameli
Email : brush dot tyler at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Giuseppe Sucameli'
__date__ = 'June 2010'
__copyright__ = '(C) 2010, Giuseppe Sucameli'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from qgis.core import *
from qgis.gui import *
from ui_widgetGrid import Ui_GdalToolsWidget as Ui_Widget
from widgetPluginBase import GdalToolsBasePluginWidget as BasePluginWidget
import GdalTools_utils as Utils
class GdalToolsDialog(QWidget, Ui_Widget, BasePluginWidget):
def __init__(self, iface):
QWidget.__init__(self)
self.iface = iface
self.canvas = self.iface.mapCanvas()
self.algorithm = ('invdist', 'average', 'nearest', 'datametrics')
self.datametrics = ('minimum', 'maximum', 'range')
self.setupUi(self)
BasePluginWidget.__init__(self, self.iface, "gdal_grid")
self.outSelector.setType( self.outSelector.FILE )
self.extentSelector.setCanvas(self.canvas)
#self.extentSelector.stop()
# set the default QSpinBoxes and QProgressBar value
self.widthSpin.setValue(3000)
self.heightSpin.setValue(3000)
self.invdistPowerSpin.setValue(2.0)
self.outputFormat = Utils.fillRasterOutputFormat()
self.lastEncoding = Utils.getLastUsedEncoding()
self.setParamsStatus(
[
(self.inSelector, SIGNAL("filenameChanged()")),
(self.outSelector, SIGNAL("filenameChanged()")),
(self.zfieldCombo, SIGNAL("currentIndexChanged(int)"), self.zfieldCheck),
(self.algorithmCombo, SIGNAL("currentIndexChanged(int)"), self.algorithmCheck),
(self.stackedWidget, None, self.algorithmCheck),
([self.invdistPowerSpin, self.invdistSmothingSpin, self.invdistRadius1Spin, self.invdistRadius2Spin, self.invdistAngleSpin, self.invdistNoDataSpin], SIGNAL("valueChanged(double)")),
([self.invdistMaxPointsSpin, self.invdistMinPointsSpin], SIGNAL("valueChanged(int)")),
([self.averageRadius1Spin, self.averageRadius2Spin, self.averageAngleSpin, self.averageNoDataSpin], SIGNAL("valueChanged(double)")),
(self.averageMinPointsSpin, SIGNAL("valueChanged(int)")),
([self.nearestRadius1Spin, self.nearestRadius2Spin, self.nearestAngleSpin, self.nearestNoDataSpin], SIGNAL("valueChanged(double)")),
(self.datametricsCombo, SIGNAL("currentIndexChanged(int)")),
([self.datametricsRadius1Spin, self.datametricsRadius2Spin, self.datametricsAngleSpin, self.datametricsNoDataSpin], SIGNAL("valueChanged(double)")),
(self.datametricsMinPointsSpin, SIGNAL("valueChanged(int)")),
(self.extentSelector, [SIGNAL("selectionStarted()"), SIGNAL("newExtentDefined()")], self.extentGroup),
( [self.widthSpin, self.heightSpin], SIGNAL( "valueChanged(int)" ), self.resizeGroupBox )
]
)
self.connect(self.inSelector, SIGNAL("selectClicked()"), self.fillInputFileEdit)
self.connect(self.outSelector, SIGNAL("selectClicked()"), self.fillOutputFileEdit)
self.connect(self.inSelector, SIGNAL("layerChanged()"), self.fillFieldsCombo)
self.connect(self.extentGroup, SIGNAL("toggled(bool)"), self.onExtentCheckedChanged)
def onClosing(self):
self.extentSelector.stop()
BasePluginWidget.onClosing(self)
def onExtentCheckedChanged(self, enabled):
self.extentSelector.start() if enabled else self.extentSelector.stop()
def onLayersChanged(self):
self.inSelector.setLayers( Utils.LayerRegistry.instance().getVectorLayers() )
def fillFieldsCombo(self):
if self.inSelector.layer() == None:
return
self.lastEncoding = self.inSelector.layer().dataProvider().encoding()
self.loadFields( self.getInputFileName() )
def fillInputFileEdit(self):
lastUsedFilter = Utils.FileFilter.lastUsedVectorFilter()
inputFile, encoding = Utils.FileDialog.getOpenFileName(self, self.tr( "Select the input file for Grid" ), Utils.FileFilter.allVectorsFilter(), lastUsedFilter, True)
if inputFile.isEmpty():
return
Utils.FileFilter.setLastUsedVectorFilter(lastUsedFilter)
self.inSelector.setFilename(inputFile)
self.lastEncoding = encoding
self.loadFields( inputFile )
def fillOutputFileEdit(self):
lastUsedFilter = Utils.FileFilter.lastUsedRasterFilter()
outputFile = Utils.FileDialog.getSaveFileName(self, self.tr( "Select the raster file to save the results to" ), Utils.FileFilter.allRastersFilter(), lastUsedFilter )
if outputFile.isEmpty():
return
Utils.FileFilter.setLastUsedRasterFilter(lastUsedFilter)
self.outputFormat = Utils.fillRasterOutputFormat( lastUsedFilter, outputFile )
self.outSelector.setFilename(outputFile)
def getArguments(self):
arguments = QStringList()
if self.zfieldCheck.isChecked() and self.zfieldCombo.currentIndex() >= 0:
arguments << "-zfield"
arguments << self.zfieldCombo.currentText()
inputFn = self.getInputFileName()
if not inputFn.isEmpty():
arguments << "-l"
arguments << QFileInfo( inputFn ).baseName()
if self.extentGroup.isChecked():
rect = self.extentSelector.getExtent()
if rect != None:
arguments << "-txe"
arguments << str(rect.xMinimum())
arguments << str(rect.xMaximum())
arguments << "-tye"
arguments << str(rect.yMaximum())
arguments << str(rect.yMinimum())
if self.algorithmCheck.isChecked() and self.algorithmCombo.currentIndex() >= 0:
arguments << "-a"
arguments << self.algorithmArguments(self.algorithmCombo.currentIndex())
if self.resizeGroupBox.isChecked():
arguments << "-outsize"
arguments << str( self.widthSpin.value() )
arguments << str( self.heightSpin.value() )
outputFn = self.getOutputFileName()
if not outputFn.isEmpty():
arguments << "-of"
arguments << self.outputFormat
arguments << inputFn
arguments << outputFn
return arguments
def getInputFileName(self):
return self.inSelector.filename()
def getOutputFileName(self):
return self.outSelector.filename()
def addLayerIntoCanvas(self, fileInfo):
self.iface.addRasterLayer(fileInfo.filePath())
def algorithmArguments(self, index):
algorithm = self.algorithm[index]
arguments = QStringList()
if algorithm == "invdist":
arguments.append(algorithm)
arguments.append("power=" + str(self.invdistPowerSpin.value()))
arguments.append("smothing=" + str(self.invdistSmothingSpin.value()))
arguments.append("radius1=" + str(self.invdistRadius1Spin.value()))
arguments.append("radius2=" + str(self.invdistRadius2Spin.value()))
arguments.append("angle=" + str(self.invdistAngleSpin.value()))
arguments.append("max_points=" + str(self.invdistMaxPointsSpin.value()))
arguments.append("min_points=" + str(self.invdistMinPointsSpin.value()))
arguments.append("nodata=" + str(self.invdistNoDataSpin.value()))
elif algorithm == "average":
arguments.append(algorithm)
arguments.append("radius1=" + str(self.averageRadius1Spin.value()))
arguments.append("radius2=" + str(self.averageRadius2Spin.value()))
arguments.append("angle=" + str(self.averageAngleSpin.value()))
arguments.append("min_points=" + str(self.averageMinPointsSpin.value()))
arguments.append("nodata=" + str(self.averageNoDataSpin.value()))
elif algorithm == "nearest":
arguments.append(algorithm)
arguments.append("radius1=" + str(self.nearestRadius1Spin.value()))
arguments.append("radius2=" + str(self.nearestRadius2Spin.value()))
arguments.append("angle=" + str(self.nearestAngleSpin.value()))
arguments.append("nodata=" + str(self.nearestNoDataSpin.value()))
else:
arguments.append(self.datametrics[self.datametricsCombo.currentIndex()])
arguments.append("radius1=" + str(self.datametricsRadius1Spin.value()))
arguments.append("radius2=" + str(self.datametricsRadius2Spin.value()))
arguments.append("angle=" + str(self.datametricsAngleSpin.value()))
arguments.append("min_points=" + str(self.datametricsMinPointsSpin.value()))
arguments.append("nodata=" + str(self.datametricsNoDataSpin.value()))
return arguments.join(":")
def loadFields(self, vectorFile = ''):
self.zfieldCombo.clear()
if vectorFile.isEmpty():
return
try:
(fields, names) = Utils.getVectorFields(vectorFile)
except Utils.UnsupportedOGRFormat, e:
QErrorMessage(self).showMessage( e.args[0] )
self.inSelector.setLayer( None )
return
ncodec = QTextCodec.codecForName(self.lastEncoding)
for name in names:
self.zfieldCombo.addItem( ncodec.toUnicode(name) ) | unknown | codeparrot/codeparrot-clean | ||
from ..base import BaseTest
from ...models.base import get_session, sessions
from ...models import base
from ...models.User import UserModel
from ...views import users
from ...modules.carry import global_scope
from ...lib.Encryption import Encryption
class Test(BaseTest):
def setUp(self):
# Create validation key
key_salt = global_scope['enc'].key + \
global_scope['conf'].salt.encode()
# Save user
user = UserModel(key='key_validation',
value=global_scope['enc'].encrypt(key_salt))
get_session().add(user)
get_session().commit()
def tearDown(self):
# Truncate table
self.session.query(UserModel).delete()
self.session.commit()
def test_validation_key_new(self):
users.validation_key_new()
# Get inserted row
user = get_session().query(UserModel).filter(
UserModel.key == 'key_validation').order_by(UserModel.id.desc()).first()
# Re-create key + salt
key_salt = global_scope['enc'].key + \
global_scope['conf'].salt.encode()
self.assertEqual(global_scope['enc'].decrypt(user.value), (key_salt))
def test_validation_key_validate(self):
self.assertTrue(users.validation_key_validate(
self.secret_key.encode()))
def test_validation_key_validate_2(self):
# Testing "except exc.DatabaseError"
# Force re-initialization of db session
save_sessions = base.sessions
base.sessions = {}
# Force wrong key in Encryption class
global_scope['enc'].key = b'some invalid key'
self.assertFalse(users.validation_key_validate(
b'some invalid key'))
# Restore db sessions
base.sessions = save_sessions
# Restore key
global_scope['enc'].key = self.secret_key.encode()
def test_validation_key_validate_3(self):
# Testing "except ValueError" for key decryption error
self.assertFalse(users.validation_key_validate(
b'some invalid key'))
def test_validation_key_rekey(self):
enc = Encryption(b'new key')
self.assertTrue(users.validation_key_rekey(enc))
def test_validation_key_rekey_2(self):
# Test without a valid row in the table
# Truncate table
self.session.query(UserModel).delete()
self.session.commit()
enc = Encryption(b'new key')
self.assertFalse(users.validation_key_rekey(enc)) | unknown | codeparrot/codeparrot-clean | ||
from __future__ import absolute_import
from django.conf import settings
from django.core import validators
from django.core.exceptions import ValidationError
from django.db import connection
from django.db.models import Q
from zerver.decorator import authenticated_api_view, authenticated_json_post_view, \
has_request_variables, REQ, JsonableError, \
to_non_negative_int, to_non_negative_float
from django.utils.html import escape as escape_html
from django.views.decorators.csrf import csrf_exempt
from zerver.lib import bugdown
from zerver.lib.actions import recipient_for_emails, do_update_message_flags, \
compute_mit_user_fullname, compute_irc_user_fullname, compute_jabber_user_fullname, \
create_mirror_user_if_needed, check_send_message, do_update_message, \
extract_recipients
from zerver.lib.cache import generic_bulk_cached_fetch
from zerver.lib.query import last_n
from zerver.lib.response import json_success, json_error
from zerver.lib.utils import statsd
from zerver.lib.validator import \
check_list, check_int, check_dict, check_string, check_bool
from zerver.models import Message, UserProfile, Stream, Subscription, \
Recipient, UserMessage, bulk_get_recipients, get_recipient, \
get_user_profile_by_email, get_stream, valid_stream_name, \
parse_usermessage_flags, to_dict_cache_key_id, extract_message_dict, \
stringify_message_dict, \
resolve_email_to_domain, get_realm, get_active_streams, \
bulk_get_streams
import sqlalchemy
from sqlalchemy import func
from sqlalchemy.sql import select, join, column, literal_column, literal, and_, \
or_, not_, union_all, alias
import re
import ujson
from zerver.lib.rest import rest_dispatch as _rest_dispatch
rest_dispatch = csrf_exempt((lambda request, *args, **kwargs: _rest_dispatch(request, globals(), *args, **kwargs)))
# This is a Pool that doesn't close connections. Therefore it can be used with
# existing Django database connections.
class NonClosingPool(sqlalchemy.pool.NullPool):
def status(self):
return "NonClosingPool"
def _do_return_conn(self, conn):
pass
def recreate(self):
return self.__class__(creator=self._creator,
recycle=self._recycle,
use_threadlocal=self._use_threadlocal,
reset_on_return=self._reset_on_return,
echo=self.echo,
logging_name=self._orig_logging_name,
_dispatch=self.dispatch)
sqlalchemy_engine = None
def get_sqlalchemy_connection():
global sqlalchemy_engine
if sqlalchemy_engine is None:
def get_dj_conn():
connection.ensure_connection()
return connection.connection
sqlalchemy_engine = sqlalchemy.create_engine('postgresql://',
creator=get_dj_conn,
poolclass=NonClosingPool,
pool_reset_on_return=False)
sa_connection = sqlalchemy_engine.connect()
sa_connection.execution_options(autocommit=False)
return sa_connection
@authenticated_json_post_view
def json_get_old_messages(request, user_profile):
return get_old_messages_backend(request, user_profile)
class BadNarrowOperator(Exception):
def __init__(self, desc):
self.desc = desc
def to_json_error_msg(self):
return 'Invalid narrow operator: ' + self.desc
# When you add a new operator to this, also update zerver/lib/narrow.py
class NarrowBuilder(object):
def __init__(self, user_profile, msg_id_column):
self.user_profile = user_profile
self.msg_id_column = msg_id_column
def add_term(self, query, term):
# We have to be careful here because we're letting users call a method
# by name! The prefix 'by_' prevents it from colliding with builtin
# Python __magic__ stuff.
operator = term['operator']
operand = term['operand']
negated = term.get('negated', False)
method_name = 'by_' + operator.replace('-', '_')
method = getattr(self, method_name, None)
if method is None:
raise BadNarrowOperator('unknown operator ' + operator)
if negated:
maybe_negate = not_
else:
maybe_negate = lambda cond: cond
return method(query, operand, maybe_negate)
def by_has(self, query, operand, maybe_negate):
if operand not in ['attachment', 'image', 'link']:
raise BadNarrowOperator("unknown 'has' operand " + operand)
col_name = 'has_' + operand
cond = column(col_name)
return query.where(maybe_negate(cond))
def by_in(self, query, operand, maybe_negate):
if operand == 'home':
conditions = exclude_muting_conditions(self.user_profile, [])
return query.where(and_(*conditions))
elif operand == 'all':
return query
raise BadNarrowOperator("unknown 'in' operand " + operand)
def by_is(self, query, operand, maybe_negate):
if operand == 'private':
query = query.select_from(join(query.froms[0], "zerver_recipient",
column("recipient_id") ==
literal_column("zerver_recipient.id")))
cond = or_(column("type") == Recipient.PERSONAL,
column("type") == Recipient.HUDDLE)
return query.where(maybe_negate(cond))
elif operand == 'starred':
cond = column("flags").op("&")(UserMessage.flags.starred.mask) != 0
return query.where(maybe_negate(cond))
elif operand == 'mentioned' or operand == 'alerted':
cond = column("flags").op("&")(UserMessage.flags.mentioned.mask) != 0
return query.where(maybe_negate(cond))
raise BadNarrowOperator("unknown 'is' operand " + operand)
_alphanum = frozenset(
'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789')
def _pg_re_escape(self, pattern):
"""
Escape user input to place in a regex
Python's re.escape escapes unicode characters in a way which postgres
fails on, u'\u03bb' to u'\\\u03bb'. This function will correctly escape
them for postgres, u'\u03bb' to u'\\u03bb'.
"""
s = list(pattern)
for i, c in enumerate(s):
if c not in self._alphanum:
if c == '\000':
s[1] = '\\000'
elif ord(c) >= 128:
# convert the character to hex postgres regex will take
# \uXXXX
s[i] = '\\u{:0>4x}'.format(ord(c))
else:
s[i] = '\\' + c
return ''.join(s)
def by_stream(self, query, operand, maybe_negate):
stream = get_stream(operand, self.user_profile.realm)
if stream is None:
raise BadNarrowOperator('unknown stream ' + operand)
if self.user_profile.realm.domain == "mit.edu":
# MIT users expect narrowing to "social" to also show messages to /^(un)*social(.d)*$/
# (unsocial, ununsocial, social.d, etc)
m = re.search(r'^(?:un)*(.+?)(?:\.d)*$', stream.name, re.IGNORECASE)
if m:
base_stream_name = m.group(1)
else:
base_stream_name = stream.name
matching_streams = get_active_streams(self.user_profile.realm).filter(
name__iregex=r'^(un)*%s(\.d)*$' % (self._pg_re_escape(base_stream_name),))
matching_stream_ids = [matching_stream.id for matching_stream in matching_streams]
recipients = bulk_get_recipients(Recipient.STREAM, matching_stream_ids).values()
cond = column("recipient_id").in_([recipient.id for recipient in recipients])
return query.where(maybe_negate(cond))
recipient = get_recipient(Recipient.STREAM, type_id=stream.id)
cond = column("recipient_id") == recipient.id
return query.where(maybe_negate(cond))
def by_topic(self, query, operand, maybe_negate):
if self.user_profile.realm.domain == "mit.edu":
# MIT users expect narrowing to topic "foo" to also show messages to /^foo(.d)*$/
# (foo, foo.d, foo.d.d, etc)
m = re.search(r'^(.*?)(?:\.d)*$', operand, re.IGNORECASE)
if m:
base_topic = m.group(1)
else:
base_topic = operand
# Additionally, MIT users expect the empty instance and
# instance "personal" to be the same.
if base_topic in ('', 'personal', '(instance "")'):
regex = r'^(|personal|\(instance ""\))(\.d)*$'
else:
regex = r'^%s(\.d)*$' % (self._pg_re_escape(base_topic),)
cond = column("subject").op("~*")(regex)
return query.where(maybe_negate(cond))
cond = func.upper(column("subject")) == func.upper(literal(operand))
return query.where(maybe_negate(cond))
def by_sender(self, query, operand, maybe_negate):
try:
sender = get_user_profile_by_email(operand)
except UserProfile.DoesNotExist:
raise BadNarrowOperator('unknown user ' + operand)
cond = column("sender_id") == literal(sender.id)
return query.where(maybe_negate(cond))
def by_near(self, query, operand, maybe_negate):
return query
def by_id(self, query, operand, maybe_negate):
cond = self.msg_id_column == literal(operand)
return query.where(maybe_negate(cond))
def by_pm_with(self, query, operand, maybe_negate):
if ',' in operand:
# Huddle
try:
emails = [e.strip() for e in operand.split(',')]
recipient = recipient_for_emails(emails, False,
self.user_profile, self.user_profile)
except ValidationError:
raise BadNarrowOperator('unknown recipient ' + operand)
cond = column("recipient_id") == recipient.id
return query.where(maybe_negate(cond))
else:
# Personal message
self_recipient = get_recipient(Recipient.PERSONAL, type_id=self.user_profile.id)
if operand == self.user_profile.email:
# Personals with self
cond = and_(column("sender_id") == self.user_profile.id,
column("recipient_id") == self_recipient.id)
return query.where(maybe_negate(cond))
# Personals with other user; include both directions.
try:
narrow_profile = get_user_profile_by_email(operand)
except UserProfile.DoesNotExist:
raise BadNarrowOperator('unknown user ' + operand)
narrow_recipient = get_recipient(Recipient.PERSONAL, narrow_profile.id)
cond = or_(and_(column("sender_id") == narrow_profile.id,
column("recipient_id") == self_recipient.id),
and_(column("sender_id") == self.user_profile.id,
column("recipient_id") == narrow_recipient.id))
return query.where(maybe_negate(cond))
def by_search(self, query, operand, maybe_negate):
tsquery = func.plainto_tsquery(literal("zulip.english_us_search"), literal(operand))
ts_locs_array = func.ts_match_locs_array
query = query.column(ts_locs_array(literal("zulip.english_us_search"),
column("rendered_content"),
tsquery).label("content_matches"))
# We HTML-escape the subject in Postgres to avoid doing a server round-trip
query = query.column(ts_locs_array(literal("zulip.english_us_search"),
func.escape_html(column("subject")),
tsquery).label("subject_matches"))
# Do quoted string matching. We really want phrase
# search here so we can ignore punctuation and do
# stemming, but there isn't a standard phrase search
# mechanism in Postgres
for term in re.findall('"[^"]+"|\S+', operand):
if term[0] == '"' and term[-1] == '"':
term = term[1:-1]
term = '%' + connection.ops.prep_for_like_query(term) + '%'
cond = or_(column("content").ilike(term),
column("subject").ilike(term))
query = query.where(maybe_negate(cond))
cond = column("search_tsvector").op("@@")(tsquery)
return query.where(maybe_negate(cond))
def highlight_string(string, locs):
if isinstance(string, unicode):
string = string.encode('utf-8')
highlight_start = '<span class="highlight">'
highlight_stop = '</span>'
pos = 0
result = ''
for loc in locs:
(offset, length) = loc
result += string[pos:offset]
result += highlight_start
result += string[offset:offset + length]
result += highlight_stop
pos = offset + length
result += string[pos:]
return result.decode('utf-8')
def get_search_fields(rendered_content, subject, content_matches, subject_matches):
return dict(match_content=highlight_string(rendered_content, content_matches),
match_subject=highlight_string(escape_html(subject), subject_matches))
def narrow_parameter(json):
# FIXME: A hack to support old mobile clients
if json == '{}':
return None
data = ujson.loads(json)
if not isinstance(data, list):
raise ValueError("argument is not a list")
def convert_term(elem):
# We have to support a legacy tuple format.
if isinstance(elem, list):
if (len(elem) != 2
or any(not isinstance(x, str) and not isinstance(x, unicode)
for x in elem)):
raise ValueError("element is not a string pair")
return dict(operator=elem[0], operand=elem[1])
if isinstance(elem, dict):
validator = check_dict([
('operator', check_string),
('operand', check_string),
])
error = validator('elem', elem)
if error:
raise JsonableError(error)
# whitelist the fields we care about for now
return dict(
operator=elem['operator'],
operand=elem['operand'],
negated=elem.get('negated', False),
)
raise ValueError("element is not a dictionary")
return map(convert_term, data)
def is_public_stream(stream, realm):
if not valid_stream_name(stream):
raise JsonableError("Invalid stream name")
stream = get_stream(stream, realm)
if stream is None:
return False
return stream.is_public()
def ok_to_include_history(narrow, realm):
# There are occasions where we need to find Message rows that
# have no corresponding UserMessage row, because the user is
# reading a public stream that might include messages that
# were sent while the user was not subscribed, but which they are
# allowed to see. We have to be very careful about constructing
# queries in those situations, so this function should return True
# only if we are 100% sure that we're gonna add a clause to the
# query that narrows to a particular public stream on the user's realm.
# If we screw this up, then we can get into a nasty situation of
# polluting our narrow results with messages from other realms.
include_history = False
if narrow is not None:
for term in narrow:
if term['operator'] == "stream" and not term.get('negated', False):
if is_public_stream(term['operand'], realm):
include_history = True
# Disable historical messages if the user is narrowing on anything
# that's a property on the UserMessage table. There cannot be
# historical messages in these cases anyway.
for term in narrow:
if term['operator'] == "is":
include_history = False
return include_history
def get_stream_name_from_narrow(narrow):
for term in narrow:
if term['operator'] == 'stream':
return term['operand'].lower()
return None
def exclude_muting_conditions(user_profile, narrow):
conditions = []
stream_name = get_stream_name_from_narrow(narrow)
if stream_name is None:
rows = Subscription.objects.filter(
user_profile=user_profile,
active=True,
in_home_view=False,
recipient__type=Recipient.STREAM
).values('recipient_id')
muted_recipient_ids = map(lambda row: row['recipient_id'], rows)
condition = not_(column("recipient_id").in_(muted_recipient_ids))
conditions.append(condition)
muted_topics = ujson.loads(user_profile.muted_topics)
if muted_topics:
if stream_name is not None:
muted_topics = [m for m in muted_topics if m[0].lower() == stream_name]
if not muted_topics:
return conditions
muted_streams = bulk_get_streams(user_profile.realm,
[muted[0] for muted in muted_topics])
muted_recipients = bulk_get_recipients(Recipient.STREAM,
[stream.id for stream in muted_streams.itervalues()])
recipient_map = dict((s.name.lower(), muted_recipients[s.id].id)
for s in muted_streams.itervalues())
muted_topics = [m for m in muted_topics if m[0].lower() in recipient_map]
if muted_topics:
def mute_cond(muted):
stream_cond = column("recipient_id") == recipient_map[muted[0].lower()]
topic_cond = func.upper(column("subject")) == func.upper(muted[1])
return and_(stream_cond, topic_cond)
condition = not_(or_(*map(mute_cond, muted_topics)))
return conditions + [condition]
return conditions
@has_request_variables
def get_old_messages_backend(request, user_profile,
anchor = REQ(converter=int),
num_before = REQ(converter=to_non_negative_int),
num_after = REQ(converter=to_non_negative_int),
narrow = REQ('narrow', converter=narrow_parameter, default=None),
use_first_unread_anchor = REQ(default=False, converter=ujson.loads),
apply_markdown=REQ(default=True,
converter=ujson.loads)):
include_history = ok_to_include_history(narrow, user_profile.realm)
if include_history and not use_first_unread_anchor:
query = select([column("id").label("message_id")], None, "zerver_message")
inner_msg_id_col = literal_column("zerver_message.id")
elif narrow is None:
query = select([column("message_id"), column("flags")],
column("user_profile_id") == literal(user_profile.id),
"zerver_usermessage")
inner_msg_id_col = column("message_id")
else:
# TODO: Don't do this join if we're not doing a search
query = select([column("message_id"), column("flags")],
column("user_profile_id") == literal(user_profile.id),
join("zerver_usermessage", "zerver_message",
literal_column("zerver_usermessage.message_id") ==
literal_column("zerver_message.id")))
inner_msg_id_col = column("message_id")
num_extra_messages = 1
is_search = False
if narrow is not None:
# Add some metadata to our logging data for narrows
verbose_operators = []
for term in narrow:
if term['operator'] == "is":
verbose_operators.append("is:" + term['operand'])
else:
verbose_operators.append(term['operator'])
request._log_data['extra'] = "[%s]" % (",".join(verbose_operators),)
# Build the query for the narrow
num_extra_messages = 0
builder = NarrowBuilder(user_profile, inner_msg_id_col)
for term in narrow:
if term['operator'] == 'search' and not is_search:
query = query.column("subject").column("rendered_content")
is_search = True
query = builder.add_term(query, term)
# We add 1 to the number of messages requested if no narrow was
# specified to ensure that the resulting list always contains the
# anchor message. If a narrow was specified, the anchor message
# might not match the narrow anyway.
if num_after != 0:
num_after += num_extra_messages
else:
num_before += num_extra_messages
sa_conn = get_sqlalchemy_connection()
if use_first_unread_anchor:
condition = column("flags").op("&")(UserMessage.flags.read.mask) == 0
# We exclude messages on muted topics when finding the first unread
# message in this narrow
muting_conditions = exclude_muting_conditions(user_profile, narrow)
if muting_conditions:
condition = and_(condition, *muting_conditions)
first_unread_query = query.where(condition)
first_unread_query = first_unread_query.order_by(inner_msg_id_col.asc()).limit(1)
first_unread_result = list(sa_conn.execute(first_unread_query).fetchall())
if len(first_unread_result) > 0:
anchor = first_unread_result[0][0]
else:
anchor = 10000000000000000
before_query = None
after_query = None
if num_before != 0:
before_anchor = anchor
if num_after != 0:
# Don't include the anchor in both the before query and the after query
before_anchor = anchor - 1
before_query = query.where(inner_msg_id_col <= before_anchor) \
.order_by(inner_msg_id_col.desc()).limit(num_before)
if num_after != 0:
after_query = query.where(inner_msg_id_col >= anchor) \
.order_by(inner_msg_id_col.asc()).limit(num_after)
if num_before == 0 and num_after == 0:
# This can happen when a narrow is specified.
after_query = query.where(inner_msg_id_col == anchor)
if before_query is not None:
if after_query is not None:
query = union_all(before_query.self_group(), after_query.self_group())
else:
query = before_query
else:
query = after_query
main_query = alias(query)
query = select(main_query.c, None, main_query).order_by(column("message_id").asc())
# This is a hack to tag the query we use for testing
query = query.prefix_with("/* get_old_messages */")
query_result = list(sa_conn.execute(query).fetchall())
# The following is a little messy, but ensures that the code paths
# are similar regardless of the value of include_history. The
# 'user_messages' dictionary maps each message to the user's
# UserMessage object for that message, which we will attach to the
# rendered message dict before returning it. We attempt to
# bulk-fetch rendered message dicts from memcached using the
# 'messages' list.
search_fields = dict()
message_ids = []
user_message_flags = {}
if include_history:
message_ids = [row[0] for row in query_result]
# TODO: This could be done with an outer join instead of two queries
user_message_flags = dict((user_message.message_id, user_message.flags_list()) for user_message in
UserMessage.objects.filter(user_profile=user_profile,
message__id__in=message_ids))
for row in query_result:
message_id = row[0]
if user_message_flags.get(message_id) is None:
user_message_flags[message_id] = ["read", "historical"]
if is_search:
(_, subject, rendered_content, content_matches, subject_matches) = row
search_fields[message_id] = get_search_fields(rendered_content, subject,
content_matches, subject_matches)
else:
for row in query_result:
message_id = row[0]
flags = row[1]
user_message_flags[message_id] = parse_usermessage_flags(flags)
message_ids.append(message_id)
if is_search:
(_, _, subject, rendered_content, content_matches, subject_matches) = row
search_fields[message_id] = get_search_fields(rendered_content, subject,
content_matches, subject_matches)
cache_transformer = lambda row: Message.build_dict_from_raw_db_row(row, apply_markdown)
id_fetcher = lambda row: row['id']
message_dicts = generic_bulk_cached_fetch(lambda message_id: to_dict_cache_key_id(message_id, apply_markdown),
Message.get_raw_db_rows,
message_ids,
id_fetcher=id_fetcher,
cache_transformer=cache_transformer,
extractor=extract_message_dict,
setter=stringify_message_dict)
message_list = []
for message_id in message_ids:
msg_dict = message_dicts[message_id]
msg_dict.update({"flags": user_message_flags[message_id]})
msg_dict.update(search_fields.get(message_id, {}))
message_list.append(msg_dict)
statsd.incr('loaded_old_messages', len(message_list))
ret = {'messages': message_list,
"result": "success",
"msg": ""}
return json_success(ret)
@authenticated_json_post_view
def json_update_flags(request, user_profile):
return update_message_flags(request, user_profile);
@has_request_variables
def update_message_flags(request, user_profile,
messages=REQ('messages', validator=check_list(check_int)),
operation=REQ('op'), flag=REQ('flag'),
all=REQ('all', validator=check_bool, default=False)):
request._log_data["extra"] = "[%s %s]" % (operation, flag)
do_update_message_flags(user_profile, operation, flag, messages, all)
return json_success({'result': 'success',
'messages': messages,
'msg': ''})
def create_mirrored_message_users(request, user_profile, recipients):
if "sender" not in request.POST:
return (False, None)
sender_email = request.POST["sender"].strip().lower()
referenced_users = set([sender_email])
if request.POST['type'] == 'private':
for email in recipients:
referenced_users.add(email.lower())
if request.client.name == "zephyr_mirror":
user_check = same_realm_zephyr_user
fullname_function = compute_mit_user_fullname
elif request.client.name == "irc_mirror":
user_check = same_realm_irc_user
fullname_function = compute_irc_user_fullname
elif request.client.name in ("jabber_mirror", "JabberMirror"):
user_check = same_realm_jabber_user
fullname_function = compute_jabber_user_fullname
else:
# Unrecognized mirroring client
return (False, None)
for email in referenced_users:
# Check that all referenced users are in our realm:
if not user_check(user_profile, email):
return (False, None)
# Create users for the referenced users, if needed.
for email in referenced_users:
create_mirror_user_if_needed(user_profile.realm, email, fullname_function)
sender = get_user_profile_by_email(sender_email)
return (True, sender)
def same_realm_zephyr_user(user_profile, email):
# Are the sender and recipient both @mit.edu addresses?
# We have to handle this specially, inferring the domain from the
# e-mail address, because the recipient may not existing in Zulip
# and we may need to make a stub MIT user on the fly.
try:
validators.validate_email(email)
except ValidationError:
return False
domain = resolve_email_to_domain(email)
return user_profile.realm.domain == "mit.edu" and domain == "mit.edu"
def same_realm_irc_user(user_profile, email):
# Check whether the target email address is an IRC user in the
# same realm as user_profile, i.e. if the domain were example.com,
# the IRC user would need to be username@irc.example.com
try:
validators.validate_email(email)
except ValidationError:
return False
domain = resolve_email_to_domain(email)
return user_profile.realm.domain == domain.replace("irc.", "")
def same_realm_jabber_user(user_profile, email):
try:
validators.validate_email(email)
except ValidationError:
return False
domain = resolve_email_to_domain(email)
# The ist.mit.edu realm uses mit.edu email addresses so that their accounts
# can receive mail.
if user_profile.realm.domain == 'ist.mit.edu' and domain == 'mit.edu':
return True
return user_profile.realm.domain == domain
@authenticated_api_view
def api_send_message(request, user_profile):
return send_message_backend(request, user_profile)
@authenticated_json_post_view
def json_send_message(request, user_profile):
return send_message_backend(request, user_profile)
# We do not @require_login for send_message_backend, since it is used
# both from the API and the web service. Code calling
# send_message_backend should either check the API key or check that
# the user is logged in.
@has_request_variables
def send_message_backend(request, user_profile,
message_type_name = REQ('type'),
message_to = REQ('to', converter=extract_recipients, default=[]),
forged = REQ(default=False),
subject_name = REQ('subject', lambda x: x.strip(), None),
message_content = REQ('content'),
domain = REQ('domain', default=None),
local_id = REQ(default=None),
queue_id = REQ(default=None)):
client = request.client
is_super_user = request.user.is_api_super_user()
if forged and not is_super_user:
return json_error("User not authorized for this query")
realm = None
if domain and domain != user_profile.realm.domain:
if not is_super_user:
# The email gateway bot needs to be able to send messages in
# any realm.
return json_error("User not authorized for this query")
realm = get_realm(domain)
if not realm:
return json_error("Unknown domain " + domain)
if client.name in ["zephyr_mirror", "irc_mirror", "jabber_mirror", "JabberMirror"]:
# Here's how security works for mirroring:
#
# For private messages, the message must be (1) both sent and
# received exclusively by users in your realm, and (2)
# received by the forwarding user.
#
# For stream messages, the message must be (1) being forwarded
# by an API superuser for your realm and (2) being sent to a
# mirrored stream (any stream for the Zephyr and Jabber
# mirrors, but only streams with names starting with a "#" for
# IRC mirrors)
#
# The security checks are split between the below code
# (especially create_mirrored_message_users which checks the
# same-realm constraint) and recipient_for_emails (which
# checks that PMs are received by the forwarding user)
if "sender" not in request.POST:
return json_error("Missing sender")
if message_type_name != "private" and not is_super_user:
return json_error("User not authorized for this query")
(valid_input, mirror_sender) = \
create_mirrored_message_users(request, user_profile, message_to)
if not valid_input:
return json_error("Invalid mirrored message")
if client.name == "zephyr_mirror" and user_profile.realm.domain != "mit.edu":
return json_error("Invalid mirrored realm")
if (client.name == "irc_mirror" and message_type_name != "private" and
not message_to[0].startswith("#")):
return json_error("IRC stream names must start with #")
sender = mirror_sender
else:
sender = user_profile
ret = check_send_message(sender, client, message_type_name, message_to,
subject_name, message_content, forged=forged,
forged_timestamp = request.POST.get('time'),
forwarder_user_profile=user_profile, realm=realm,
local_id=local_id, sender_queue_id=queue_id)
return json_success({"id": ret})
@authenticated_json_post_view
def json_update_message(request, user_profile):
return update_message_backend(request, user_profile)
@has_request_variables
def update_message_backend(request, user_profile,
message_id=REQ(converter=to_non_negative_int),
subject=REQ(default=None),
propagate_mode=REQ(default="change_one"),
content=REQ(default=None)):
if subject is None and content is None:
return json_error("Nothing to change")
do_update_message(user_profile, message_id, subject, propagate_mode, content)
return json_success()
@authenticated_json_post_view
@has_request_variables
def json_fetch_raw_message(request, user_profile,
message_id=REQ(converter=to_non_negative_int)):
try:
message = Message.objects.get(id=message_id)
except Message.DoesNotExist:
return json_error("No such message")
if message.sender != user_profile:
return json_error("Message was not sent by you")
return json_success({"raw_content": message.content})
@has_request_variables
def render_message_backend(request, user_profile, content=REQ):
rendered_content = bugdown.convert(content, user_profile.realm.domain)
return json_success({"rendered": rendered_content})
@authenticated_json_post_view
def json_messages_in_narrow(request, user_profile):
return messages_in_narrow_backend(request, user_profile)
@has_request_variables
def messages_in_narrow_backend(request, user_profile,
msg_ids = REQ(validator=check_list(check_int)),
narrow = REQ(converter=narrow_parameter)):
# Note that this function will only work on messages the user
# actually received
# TODO: We assume that the narrow is a search. For now this works because
# the browser only ever calls this function for searches, since it can't
# apply that narrow operator itself.
query = select([column("message_id"), column("subject"), column("rendered_content")],
and_(column("user_profile_id") == literal(user_profile.id),
column("message_id").in_(msg_ids)),
join("zerver_usermessage", "zerver_message",
literal_column("zerver_usermessage.message_id") ==
literal_column("zerver_message.id")))
builder = NarrowBuilder(user_profile, column("message_id"))
for term in narrow:
query = builder.add_term(query, term)
sa_conn = get_sqlalchemy_connection()
query_result = list(sa_conn.execute(query).fetchall())
search_fields = dict()
for row in query_result:
(message_id, subject, rendered_content, content_matches, subject_matches) = row
search_fields[message_id] = get_search_fields(rendered_content, subject,
content_matches, subject_matches)
return json_success({"messages": search_fields}) | unknown | codeparrot/codeparrot-clean | ||
#
# The Python Imaging Library.
# $Id$
#
# PDF (Acrobat) file handling
#
# History:
# 1996-07-16 fl Created
# 1997-01-18 fl Fixed header
# 2004-02-21 fl Fixes for 1/L/CMYK images, etc.
# 2004-02-24 fl Fixes for 1 and P images.
#
# Copyright (c) 1997-2004 by Secret Labs AB. All rights reserved.
# Copyright (c) 1996-1997 by Fredrik Lundh.
#
# See the README file for information on usage and redistribution.
#
##
# Image plugin for PDF images (output only).
##
from PIL import Image, ImageFile
from PIL._binary import i8
import io
__version__ = "0.4"
#
# --------------------------------------------------------------------
# object ids:
# 1. catalogue
# 2. pages
# 3. image
# 4. page
# 5. page contents
def _obj(fp, obj, **dict):
fp.write("%d 0 obj\n" % obj)
if dict:
fp.write("<<\n")
for k, v in dict.items():
if v is not None:
fp.write("/%s %s\n" % (k, v))
fp.write(">>\n")
def _endobj(fp):
fp.write("endobj\n")
def _save_all(im, fp, filename):
_save(im, fp, filename, save_all=True)
##
# (Internal) Image save plugin for the PDF format.
def _save(im, fp, filename, save_all=False):
resolution = im.encoderinfo.get("resolution", 72.0)
#
# make sure image data is available
im.load()
xref = [0]
class TextWriter(object):
def __init__(self, fp):
self.fp = fp
def __getattr__(self, name):
return getattr(self.fp, name)
def write(self, value):
self.fp.write(value.encode('latin-1'))
fp = TextWriter(fp)
fp.write("%PDF-1.2\n")
fp.write("% created by PIL PDF driver " + __version__ + "\n")
# FIXME: Should replace ASCIIHexDecode with RunLengthDecode (packbits)
# or LZWDecode (tiff/lzw compression). Note that PDF 1.2 also supports
# Flatedecode (zip compression).
bits = 8
params = None
if im.mode == "1":
filter = "/ASCIIHexDecode"
colorspace = "/DeviceGray"
procset = "/ImageB" # grayscale
bits = 1
elif im.mode == "L":
filter = "/DCTDecode"
# params = "<< /Predictor 15 /Columns %d >>" % (width-2)
colorspace = "/DeviceGray"
procset = "/ImageB" # grayscale
elif im.mode == "P":
filter = "/ASCIIHexDecode"
colorspace = "[ /Indexed /DeviceRGB 255 <"
palette = im.im.getpalette("RGB")
for i in range(256):
r = i8(palette[i*3])
g = i8(palette[i*3+1])
b = i8(palette[i*3+2])
colorspace += "%02x%02x%02x " % (r, g, b)
colorspace += "> ]"
procset = "/ImageI" # indexed color
elif im.mode == "RGB":
filter = "/DCTDecode"
colorspace = "/DeviceRGB"
procset = "/ImageC" # color images
elif im.mode == "CMYK":
filter = "/DCTDecode"
colorspace = "/DeviceCMYK"
procset = "/ImageC" # color images
else:
raise ValueError("cannot save mode %s" % im.mode)
#
# catalogue
xref.append(fp.tell())
_obj(
fp, 1,
Type="/Catalog",
Pages="2 0 R")
_endobj(fp)
#
# pages
numberOfPages = 1
if save_all:
try:
numberOfPages = im.n_frames
except AttributeError:
# Image format does not have n_frames. It is a single frame image
pass
pages = [str(pageNumber*3+4)+" 0 R"
for pageNumber in range(0, numberOfPages)]
xref.append(fp.tell())
_obj(
fp, 2,
Type="/Pages",
Count=len(pages),
Kids="["+"\n".join(pages)+"]")
_endobj(fp)
for pageNumber in range(0, numberOfPages):
im.seek(pageNumber)
#
# image
op = io.BytesIO()
if filter == "/ASCIIHexDecode":
if bits == 1:
# FIXME: the hex encoder doesn't support packed 1-bit
# images; do things the hard way...
data = im.tobytes("raw", "1")
im = Image.new("L", (len(data), 1), None)
im.putdata(data)
ImageFile._save(im, op, [("hex", (0, 0)+im.size, 0, im.mode)])
elif filter == "/DCTDecode":
Image.SAVE["JPEG"](im, op, filename)
elif filter == "/FlateDecode":
ImageFile._save(im, op, [("zip", (0, 0)+im.size, 0, im.mode)])
elif filter == "/RunLengthDecode":
ImageFile._save(im, op, [("packbits", (0, 0)+im.size, 0, im.mode)])
else:
raise ValueError("unsupported PDF filter (%s)" % filter)
#
# Get image characteristics
width, height = im.size
xref.append(fp.tell())
_obj(
fp, pageNumber*3+3,
Type="/XObject",
Subtype="/Image",
Width=width, # * 72.0 / resolution,
Height=height, # * 72.0 / resolution,
Length=len(op.getvalue()),
Filter=filter,
BitsPerComponent=bits,
DecodeParams=params,
ColorSpace=colorspace)
fp.write("stream\n")
fp.fp.write(op.getvalue())
fp.write("\nendstream\n")
_endobj(fp)
#
# page
xref.append(fp.tell())
_obj(fp, pageNumber*3+4)
fp.write(
"<<\n/Type /Page\n/Parent 2 0 R\n"
"/Resources <<\n/ProcSet [ /PDF %s ]\n"
"/XObject << /image %d 0 R >>\n>>\n"
"/MediaBox [ 0 0 %d %d ]\n/Contents %d 0 R\n>>\n" % (
procset,
pageNumber*3+3,
int(width * 72.0 / resolution),
int(height * 72.0 / resolution),
pageNumber*3+5))
_endobj(fp)
#
# page contents
op = TextWriter(io.BytesIO())
op.write(
"q %d 0 0 %d 0 0 cm /image Do Q\n" % (
int(width * 72.0 / resolution),
int(height * 72.0 / resolution)))
xref.append(fp.tell())
_obj(fp, pageNumber*3+5, Length=len(op.fp.getvalue()))
fp.write("stream\n")
fp.fp.write(op.fp.getvalue())
fp.write("\nendstream\n")
_endobj(fp)
#
# trailer
startxref = fp.tell()
fp.write("xref\n0 %d\n0000000000 65535 f \n" % len(xref))
for x in xref[1:]:
fp.write("%010d 00000 n \n" % x)
fp.write("trailer\n<<\n/Size %d\n/Root 1 0 R\n>>\n" % len(xref))
fp.write("startxref\n%d\n%%%%EOF\n" % startxref)
if hasattr(fp, "flush"):
fp.flush()
#
# --------------------------------------------------------------------
Image.register_save("PDF", _save)
Image.register_save_all("PDF", _save_all)
Image.register_extension("PDF", ".pdf")
Image.register_mime("PDF", "application/pdf") | unknown | codeparrot/codeparrot-clean | ||
"""
This module is home to the Page class
"""
from pyecobee.ecobee_object import EcobeeObject
class Page(EcobeeObject):
"""
This class has been auto generated by scraping
https://www.ecobee.com/home/developer/api/documentation/v1/objects/Page.shtml
Attribute names have been generated by converting ecobee property
names from camelCase to snake_case.
A getter property has been generated for each attribute.
A setter property has been generated for each attribute whose value
of READONLY is "no".
An __init__ argument without a default value has been generated if
the value of REQUIRED is "yes".
An __init__ argument with a default value of None has been generated
if the value of REQUIRED is "no".
"""
__slots__ = ['_page', '_total_pages', '_page_size', '_total']
attribute_name_map = {
'page': 'page',
'total_pages': 'totalPages',
'totalPages': 'total_pages',
'page_size': 'pageSize',
'pageSize': 'page_size',
'total': 'total',
}
attribute_type_map = {
'page': 'int',
'total_pages': 'int',
'page_size': 'int',
'total': 'int',
}
def __init__(self, page=None, total_pages=None, page_size=None, total=None):
"""
Construct a Page instance
"""
self._page = page
self._total_pages = total_pages
self._page_size = page_size
self._total = total
@property
def page(self):
"""
Gets the page attribute of this Page instance.
:return: The value of the page attribute of this Page instance.
:rtype: int
"""
return self._page
@property
def total_pages(self):
"""
Gets the total_pages attribute of this Page instance.
:return: The value of the total_pages attribute of this Page
instance.
:rtype: int
"""
return self._total_pages
@property
def page_size(self):
"""
Gets the page_size attribute of this Page instance.
:return: The value of the page_size attribute of this Page
instance.
:rtype: int
"""
return self._page_size
@property
def total(self):
"""
Gets the total attribute of this Page instance.
:return: The value of the total attribute of this Page instance.
:rtype: int
"""
return self._total | unknown | codeparrot/codeparrot-clean | ||
import os
import sys
import itertools
import imp
from distutils.command.build_ext import build_ext as _du_build_ext
from distutils.file_util import copy_file
from distutils.ccompiler import new_compiler
from distutils.sysconfig import customize_compiler, get_config_var
from distutils.errors import DistutilsError
from distutils import log
from setuptools.extension import Library
import six
try:
# Attempt to use Cython for building extensions, if available
from Cython.Distutils.build_ext import build_ext as _build_ext
except ImportError:
_build_ext = _du_build_ext
# make sure _config_vars is initialized
get_config_var("LDSHARED")
from distutils.sysconfig import _config_vars as _CONFIG_VARS
def _customize_compiler_for_shlib(compiler):
if sys.platform == "darwin":
# building .dylib requires additional compiler flags on OSX; here we
# temporarily substitute the pyconfig.h variables so that distutils'
# 'customize_compiler' uses them before we build the shared libraries.
tmp = _CONFIG_VARS.copy()
try:
# XXX Help! I don't have any idea whether these are right...
_CONFIG_VARS['LDSHARED'] = (
"gcc -Wl,-x -dynamiclib -undefined dynamic_lookup")
_CONFIG_VARS['CCSHARED'] = " -dynamiclib"
_CONFIG_VARS['SO'] = ".dylib"
customize_compiler(compiler)
finally:
_CONFIG_VARS.clear()
_CONFIG_VARS.update(tmp)
else:
customize_compiler(compiler)
have_rtld = False
use_stubs = False
libtype = 'shared'
if sys.platform == "darwin":
use_stubs = True
elif os.name != 'nt':
try:
import dl
use_stubs = have_rtld = hasattr(dl, 'RTLD_NOW')
except ImportError:
pass
if_dl = lambda s: s if have_rtld else ''
def get_abi3_suffix():
"""Return the file extension for an abi3-compliant Extension()"""
for suffix, _, _ in (s for s in imp.get_suffixes() if s[2] == imp.C_EXTENSION):
if '.abi3' in suffix: # Unix
return suffix
elif suffix == '.pyd': # Windows
return suffix
class build_ext(_build_ext):
def run(self):
"""Build extensions in build directory, then copy if --inplace"""
old_inplace, self.inplace = self.inplace, 0
_build_ext.run(self)
self.inplace = old_inplace
if old_inplace:
self.copy_extensions_to_source()
def copy_extensions_to_source(self):
build_py = self.get_finalized_command('build_py')
for ext in self.extensions:
fullname = self.get_ext_fullname(ext.name)
filename = self.get_ext_filename(fullname)
modpath = fullname.split('.')
package = '.'.join(modpath[:-1])
package_dir = build_py.get_package_dir(package)
dest_filename = os.path.join(package_dir,
os.path.basename(filename))
src_filename = os.path.join(self.build_lib, filename)
# Always copy, even if source is older than destination, to ensure
# that the right extensions for the current Python/platform are
# used.
copy_file(
src_filename, dest_filename, verbose=self.verbose,
dry_run=self.dry_run
)
if ext._needs_stub:
self.write_stub(package_dir or os.curdir, ext, True)
def get_ext_filename(self, fullname):
filename = _build_ext.get_ext_filename(self, fullname)
if fullname in self.ext_map:
ext = self.ext_map[fullname]
use_abi3 = (
six.PY3
and getattr(ext, 'py_limited_api')
and get_abi3_suffix()
)
if use_abi3:
so_ext = _get_config_var_837('EXT_SUFFIX')
filename = filename[:-len(so_ext)]
filename = filename + get_abi3_suffix()
if isinstance(ext, Library):
fn, ext = os.path.splitext(filename)
return self.shlib_compiler.library_filename(fn, libtype)
elif use_stubs and ext._links_to_dynamic:
d, fn = os.path.split(filename)
return os.path.join(d, 'dl-' + fn)
return filename
def initialize_options(self):
_build_ext.initialize_options(self)
self.shlib_compiler = None
self.shlibs = []
self.ext_map = {}
def finalize_options(self):
_build_ext.finalize_options(self)
self.extensions = self.extensions or []
self.check_extensions_list(self.extensions)
self.shlibs = [ext for ext in self.extensions
if isinstance(ext, Library)]
if self.shlibs:
self.setup_shlib_compiler()
for ext in self.extensions:
ext._full_name = self.get_ext_fullname(ext.name)
for ext in self.extensions:
fullname = ext._full_name
self.ext_map[fullname] = ext
# distutils 3.1 will also ask for module names
# XXX what to do with conflicts?
self.ext_map[fullname.split('.')[-1]] = ext
ltd = self.shlibs and self.links_to_dynamic(ext) or False
ns = ltd and use_stubs and not isinstance(ext, Library)
ext._links_to_dynamic = ltd
ext._needs_stub = ns
filename = ext._file_name = self.get_ext_filename(fullname)
libdir = os.path.dirname(os.path.join(self.build_lib, filename))
if ltd and libdir not in ext.library_dirs:
ext.library_dirs.append(libdir)
if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs:
ext.runtime_library_dirs.append(os.curdir)
def setup_shlib_compiler(self):
compiler = self.shlib_compiler = new_compiler(
compiler=self.compiler, dry_run=self.dry_run, force=self.force
)
_customize_compiler_for_shlib(compiler)
if self.include_dirs is not None:
compiler.set_include_dirs(self.include_dirs)
if self.define is not None:
# 'define' option is a list of (name,value) tuples
for (name, value) in self.define:
compiler.define_macro(name, value)
if self.undef is not None:
for macro in self.undef:
compiler.undefine_macro(macro)
if self.libraries is not None:
compiler.set_libraries(self.libraries)
if self.library_dirs is not None:
compiler.set_library_dirs(self.library_dirs)
if self.rpath is not None:
compiler.set_runtime_library_dirs(self.rpath)
if self.link_objects is not None:
compiler.set_link_objects(self.link_objects)
# hack so distutils' build_extension() builds a library instead
compiler.link_shared_object = link_shared_object.__get__(compiler)
def get_export_symbols(self, ext):
if isinstance(ext, Library):
return ext.export_symbols
return _build_ext.get_export_symbols(self, ext)
def build_extension(self, ext):
ext._convert_pyx_sources_to_lang()
_compiler = self.compiler
try:
if isinstance(ext, Library):
self.compiler = self.shlib_compiler
_build_ext.build_extension(self, ext)
if ext._needs_stub:
cmd = self.get_finalized_command('build_py').build_lib
self.write_stub(cmd, ext)
finally:
self.compiler = _compiler
def links_to_dynamic(self, ext):
"""Return true if 'ext' links to a dynamic lib in the same package"""
# XXX this should check to ensure the lib is actually being built
# XXX as dynamic, and not just using a locally-found version or a
# XXX static-compiled version
libnames = dict.fromkeys([lib._full_name for lib in self.shlibs])
pkg = '.'.join(ext._full_name.split('.')[:-1] + [''])
return any(pkg + libname in libnames for libname in ext.libraries)
def get_outputs(self):
return _build_ext.get_outputs(self) + self.__get_stubs_outputs()
def __get_stubs_outputs(self):
# assemble the base name for each extension that needs a stub
ns_ext_bases = (
os.path.join(self.build_lib, *ext._full_name.split('.'))
for ext in self.extensions
if ext._needs_stub
)
# pair each base with the extension
pairs = itertools.product(ns_ext_bases, self.__get_output_extensions())
return list(base + fnext for base, fnext in pairs)
def __get_output_extensions(self):
yield '.py'
yield '.pyc'
if self.get_finalized_command('build_py').optimize:
yield '.pyo'
def write_stub(self, output_dir, ext, compile=False):
log.info("writing stub loader for %s to %s", ext._full_name,
output_dir)
stub_file = (os.path.join(output_dir, *ext._full_name.split('.')) +
'.py')
if compile and os.path.exists(stub_file):
raise DistutilsError(stub_file + " already exists! Please delete.")
if not self.dry_run:
f = open(stub_file, 'w')
f.write(
'\n'.join([
"def __bootstrap__():",
" global __bootstrap__, __file__, __loader__",
" import sys, os, pkg_resources, imp" + if_dl(", dl"),
" __file__ = pkg_resources.resource_filename"
"(__name__,%r)"
% os.path.basename(ext._file_name),
" del __bootstrap__",
" if '__loader__' in globals():",
" del __loader__",
if_dl(" old_flags = sys.getdlopenflags()"),
" old_dir = os.getcwd()",
" try:",
" os.chdir(os.path.dirname(__file__))",
if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"),
" imp.load_dynamic(__name__,__file__)",
" finally:",
if_dl(" sys.setdlopenflags(old_flags)"),
" os.chdir(old_dir)",
"__bootstrap__()",
"" # terminal \n
])
)
f.close()
if compile:
from distutils.util import byte_compile
byte_compile([stub_file], optimize=0,
force=True, dry_run=self.dry_run)
optimize = self.get_finalized_command('install_lib').optimize
if optimize > 0:
byte_compile([stub_file], optimize=optimize,
force=True, dry_run=self.dry_run)
if os.path.exists(stub_file) and not self.dry_run:
os.unlink(stub_file)
if use_stubs or os.name == 'nt':
# Build shared libraries
#
def link_shared_object(
self, objects, output_libname, output_dir=None, libraries=None,
library_dirs=None, runtime_library_dirs=None, export_symbols=None,
debug=0, extra_preargs=None, extra_postargs=None, build_temp=None,
target_lang=None):
self.link(
self.SHARED_LIBRARY, objects, output_libname,
output_dir, libraries, library_dirs, runtime_library_dirs,
export_symbols, debug, extra_preargs, extra_postargs,
build_temp, target_lang
)
else:
# Build static libraries everywhere else
libtype = 'static'
def link_shared_object(
self, objects, output_libname, output_dir=None, libraries=None,
library_dirs=None, runtime_library_dirs=None, export_symbols=None,
debug=0, extra_preargs=None, extra_postargs=None, build_temp=None,
target_lang=None):
# XXX we need to either disallow these attrs on Library instances,
# or warn/abort here if set, or something...
# libraries=None, library_dirs=None, runtime_library_dirs=None,
# export_symbols=None, extra_preargs=None, extra_postargs=None,
# build_temp=None
assert output_dir is None # distutils build_ext doesn't pass this
output_dir, filename = os.path.split(output_libname)
basename, ext = os.path.splitext(filename)
if self.library_filename("x").startswith('lib'):
# strip 'lib' prefix; this is kludgy if some platform uses
# a different prefix
basename = basename[3:]
self.create_static_lib(
objects, basename, output_dir, debug, target_lang
)
def _get_config_var_837(name):
"""
In https://github.com/pypa/setuptools/pull/837, we discovered
Python 3.3.0 exposes the extension suffix under the name 'SO'.
"""
if sys.version_info < (3, 3, 1):
name = 'SO'
return get_config_var(name) | unknown | codeparrot/codeparrot-clean | ||
# Copyright (c) 2018, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import format_datetime
from frappe import _
def execute(filters=None):
account_details = {}
for acc in frappe.db.sql("""select name, is_group from tabAccount""", as_dict=1):
account_details.setdefault(acc.name, acc)
validate_filters(filters, account_details)
filters = set_account_currency(filters)
columns = get_columns(filters)
res = get_result(filters)
return columns, res
def validate_filters(filters, account_details):
if not filters.get('company'):
frappe.throw(_('{0} is mandatory').format(_('Company')))
if not filters.get('fiscal_year'):
frappe.throw(_('{0} is mandatory').format(_('Fiscal Year')))
def set_account_currency(filters):
filters["company_currency"] = frappe.db.get_value("Company", filters.company, "default_currency")
return filters
def get_columns(filters):
columns = [
_("JournalCode") + "::90", _("JournalLib") + "::90",
_("EcritureNum") + ":Dynamic Link:90", _("EcritureDate") + "::90",
_("CompteNum") + ":Link/Account:100", _("CompteLib") + ":Link/Account:200",
_("CompAuxNum") + "::90", _("CompAuxLib") + "::90",
_("PieceRef") + "::90", _("PieceDate") + "::90",
_("EcritureLib") + "::90", _("Debit") + "::90", _("Credit") + "::90",
_("EcritureLet") + "::90", _("DateLet") +
"::90", _("ValidDate") + "::90",
_("Montantdevise") + "::90", _("Idevise") + "::90"
]
return columns
def get_result(filters):
gl_entries = get_gl_entries(filters)
result = get_result_as_list(gl_entries, filters)
return result
def get_gl_entries(filters):
group_by_condition = "group by voucher_type, voucher_no, account" \
if filters.get("group_by_voucher") else "group by gl.name"
gl_entries = frappe.db.sql("""
select
gl.posting_date as GlPostDate, gl.account, gl.transaction_date,
sum(gl.debit) as debit, sum(gl.credit) as credit,
sum(gl.debit_in_account_currency) as debitCurr, sum(gl.credit_in_account_currency) as creditCurr,
gl.voucher_type, gl.voucher_no, gl.against_voucher_type,
gl.against_voucher, gl.account_currency, gl.against,
gl.party_type, gl.party, gl.is_opening,
inv.name as InvName, inv.posting_date as InvPostDate,
pur.name as PurName, inv.posting_date as PurPostDate,
jnl.cheque_no as JnlRef, jnl.posting_date as JnlPostDate,
pay.name as PayName, pay.posting_date as PayPostDate,
cus.customer_name, cus.name as cusName,
sup.supplier_name, sup.name as supName
from `tabGL Entry` gl
left join `tabSales Invoice` inv on gl.against_voucher = inv.name
left join `tabPurchase Invoice` pur on gl.against_voucher = pur.name
left join `tabJournal Entry` jnl on gl.against_voucher = jnl.name
left join `tabPayment Entry` pay on gl.against_voucher = pay.name
left join `tabCustomer` cus on gl.party = cus.customer_name
left join `tabSupplier` sup on gl.party = sup.supplier_name
where gl.company=%(company)s and gl.fiscal_year=%(fiscal_year)s
{group_by_condition}
order by GlPostDate, voucher_no"""
.format(group_by_condition=group_by_condition), filters, as_dict=1)
return gl_entries
def get_result_as_list(data, filters):
result = []
company_currency = frappe.db.get_value("Company", filters.company, "default_currency")
accounts = frappe.get_all("Account", filters={"Company": filters.company}, fields=["name", "account_number"])
for d in data:
JournalCode = d.get("voucher_no").split("-")[0]
EcritureNum = d.get("voucher_no").split("-")[-1]
EcritureDate = format_datetime(d.get("GlPostDate"), "yyyyMMdd")
account_number = [account.account_number for account in accounts if account.name == d.get("account")]
if account_number[0] is not None:
CompteNum = account_number[0]
else:
frappe.throw(_("Account number for account {0} is not available.<br> Please setup your Chart of Accounts correctly.").format(account.name))
if d.get("party_type") == "Customer":
CompAuxNum = d.get("cusName")
CompAuxLib = d.get("customer_name")
elif d.get("party_type") == "Supplier":
CompAuxNum = d.get("supName")
CompAuxLib = d.get("supplier_name")
else:
CompAuxNum = ""
CompAuxLib = ""
ValidDate = format_datetime(d.get("GlPostDate"), "yyyyMMdd")
if d.get("is_opening") == "Yes":
PieceRef = _("Opening Entry Journal")
PieceDate = format_datetime(d.get("GlPostDate"), "yyyyMMdd")
elif d.get("against_voucher_type") == "Sales Invoice":
PieceRef = _(d.get("InvName"))
PieceDate = format_datetime(d.get("InvPostDate"), "yyyyMMdd")
elif d.get("against_voucher_type") == "Purchase Invoice":
PieceRef = _(d.get("PurName"))
PieceDate = format_datetime(d.get("PurPostDate"), "yyyyMMdd")
elif d.get("against_voucher_type") == "Journal Entry":
PieceRef = _(d.get("JnlRef"))
PieceDate = format_datetime(d.get("JnlPostDate"), "yyyyMMdd")
elif d.get("against_voucher_type") == "Payment Entry":
PieceRef = _(d.get("PayName"))
PieceDate = format_datetime(d.get("PayPostDate"), "yyyyMMdd")
elif d.get("voucher_type") == "Period Closing Voucher":
PieceRef = _("Period Closing Journal")
PieceDate = format_datetime(d.get("GlPostDate"), "yyyyMMdd")
else:
PieceRef = _("No Reference")
PieceDate = format_datetime(d.get("GlPostDate"), "yyyyMMdd")
debit = '{:.2f}'.format(d.get("debit")).replace(".", ",")
credit = '{:.2f}'.format(d.get("credit")).replace(".", ",")
Idevise = d.get("account_currency")
if Idevise != company_currency:
Montantdevise = '{:.2f}'.format(d.get("debitCurr")).replace(".", ",") if d.get("debitCurr") != 0 else '{:.2f}'.format(d.get("creditCurr")).replace(".", ",")
else:
Montantdevise = '{:.2f}'.format(d.get("debit")).replace(".", ",") if d.get("debit") != 0 else '{:.2f}'.format(d.get("credit")).replace(".", ",")
row = [JournalCode, d.get("voucher_type"), EcritureNum, EcritureDate, CompteNum, d.get("account"), CompAuxNum, CompAuxLib,
PieceRef, PieceDate, d.get("voucher_no"), debit, credit, "", "", ValidDate, Montantdevise, Idevise]
result.append(row)
return result | unknown | codeparrot/codeparrot-clean | ||
"""
This module contains the 'base' GEOSGeometry object -- all GEOS Geometries
inherit from this object.
"""
# Python, ctypes and types dependencies.
import re
import warnings
from ctypes import addressof, byref, c_double, c_size_t
# super-class for mutable list behavior
from django.contrib.gis.geos.mutable_list import ListMixin
# GEOS-related dependencies.
from django.contrib.gis.geos.base import GEOSBase, gdal
from django.contrib.gis.geos.coordseq import GEOSCoordSeq
from django.contrib.gis.geos.error import GEOSException, GEOSIndexError
from django.contrib.gis.geos.libgeos import GEOM_PTR, GEOS_PREPARE
from django.contrib.gis.geos.mutable_list import ListMixin
# All other functions in this module come from the ctypes
# prototypes module -- which handles all interaction with
# the underlying GEOS library.
from django.contrib.gis.geos import prototypes as capi
# These functions provide access to a thread-local instance
# of their corresponding GEOS I/O class.
from django.contrib.gis.geos.prototypes.io import wkt_r, wkt_w, wkb_r, wkb_w, ewkb_w, ewkb_w3d
# For recognizing geometry input.
from django.contrib.gis.geometry.regex import hex_regex, wkt_regex, json_regex
class GEOSGeometry(GEOSBase, ListMixin):
"A class that, generally, encapsulates a GEOS geometry."
# Raise GEOSIndexError instead of plain IndexError
# (see ticket #4740 and GEOSIndexError docstring)
_IndexError = GEOSIndexError
ptr_type = GEOM_PTR
#### Python 'magic' routines ####
def __init__(self, geo_input, srid=None):
"""
The base constructor for GEOS geometry objects, and may take the
following inputs:
* strings:
- WKT
- HEXEWKB (a PostGIS-specific canonical form)
- GeoJSON (requires GDAL)
* buffer:
- WKB
The `srid` keyword is used to specify the Source Reference Identifier
(SRID) number for this Geometry. If not set, the SRID will be None.
"""
if isinstance(geo_input, basestring):
if isinstance(geo_input, unicode):
# Encoding to ASCII, WKT or HEXEWKB doesn't need any more.
geo_input = geo_input.encode('ascii')
wkt_m = wkt_regex.match(geo_input)
if wkt_m:
# Handling WKT input.
if wkt_m.group('srid'): srid = int(wkt_m.group('srid'))
g = wkt_r().read(wkt_m.group('wkt'))
elif hex_regex.match(geo_input):
# Handling HEXEWKB input.
g = wkb_r().read(geo_input)
elif gdal.GEOJSON and json_regex.match(geo_input):
# Handling GeoJSON input.
g = wkb_r().read(gdal.OGRGeometry(geo_input).wkb)
else:
raise ValueError('String or unicode input unrecognized as WKT EWKT, and HEXEWKB.')
elif isinstance(geo_input, GEOM_PTR):
# When the input is a pointer to a geomtry (GEOM_PTR).
g = geo_input
elif isinstance(geo_input, buffer):
# When the input is a buffer (WKB).
g = wkb_r().read(geo_input)
elif isinstance(geo_input, GEOSGeometry):
g = capi.geom_clone(geo_input.ptr)
else:
# Invalid geometry type.
raise TypeError('Improper geometry input type: %s' % str(type(geo_input)))
if bool(g):
# Setting the pointer object with a valid pointer.
self.ptr = g
else:
raise GEOSException('Could not initialize GEOS Geometry with given input.')
# Post-initialization setup.
self._post_init(srid)
def _post_init(self, srid):
"Helper routine for performing post-initialization setup."
# Setting the SRID, if given.
if srid and isinstance(srid, int): self.srid = srid
# Setting the class type (e.g., Point, Polygon, etc.)
self.__class__ = GEOS_CLASSES[self.geom_typeid]
# Setting the coordinate sequence for the geometry (will be None on
# geometries that do not have coordinate sequences)
self._set_cs()
def __del__(self):
"""
Destroys this Geometry; in other words, frees the memory used by the
GEOS C++ object.
"""
if self._ptr: capi.destroy_geom(self._ptr)
def __copy__(self):
"""
Returns a clone because the copy of a GEOSGeometry may contain an
invalid pointer location if the original is garbage collected.
"""
return self.clone()
def __deepcopy__(self, memodict):
"""
The `deepcopy` routine is used by the `Node` class of django.utils.tree;
thus, the protocol routine needs to be implemented to return correct
copies (clones) of these GEOS objects, which use C pointers.
"""
return self.clone()
def __str__(self):
"WKT is used for the string representation."
return self.wkt
def __repr__(self):
"Short-hand representation because WKT may be very large."
return '<%s object at %s>' % (self.geom_type, hex(addressof(self.ptr)))
# Pickling support
def __getstate__(self):
# The pickled state is simply a tuple of the WKB (in string form)
# and the SRID.
return str(self.wkb), self.srid
def __setstate__(self, state):
# Instantiating from the tuple state that was pickled.
wkb, srid = state
ptr = wkb_r().read(buffer(wkb))
if not ptr: raise GEOSException('Invalid Geometry loaded from pickled state.')
self.ptr = ptr
self._post_init(srid)
# Comparison operators
def __eq__(self, other):
"""
Equivalence testing, a Geometry may be compared with another Geometry
or a WKT representation.
"""
if isinstance(other, basestring):
return self.wkt == other
elif isinstance(other, GEOSGeometry):
return self.equals_exact(other)
else:
return False
def __ne__(self, other):
"The not equals operator."
return not (self == other)
### Geometry set-like operations ###
# Thanks to Sean Gillies for inspiration:
# http://lists.gispython.org/pipermail/community/2007-July/001034.html
# g = g1 | g2
def __or__(self, other):
"Returns the union of this Geometry and the other."
return self.union(other)
# g = g1 & g2
def __and__(self, other):
"Returns the intersection of this Geometry and the other."
return self.intersection(other)
# g = g1 - g2
def __sub__(self, other):
"Return the difference this Geometry and the other."
return self.difference(other)
# g = g1 ^ g2
def __xor__(self, other):
"Return the symmetric difference of this Geometry and the other."
return self.sym_difference(other)
#### Coordinate Sequence Routines ####
@property
def has_cs(self):
"Returns True if this Geometry has a coordinate sequence, False if not."
# Only these geometries are allowed to have coordinate sequences.
if isinstance(self, (Point, LineString, LinearRing)):
return True
else:
return False
def _set_cs(self):
"Sets the coordinate sequence for this Geometry."
if self.has_cs:
self._cs = GEOSCoordSeq(capi.get_cs(self.ptr), self.hasz)
else:
self._cs = None
@property
def coord_seq(self):
"Returns a clone of the coordinate sequence for this Geometry."
if self.has_cs:
return self._cs.clone()
#### Geometry Info ####
@property
def geom_type(self):
"Returns a string representing the Geometry type, e.g. 'Polygon'"
return capi.geos_type(self.ptr)
@property
def geom_typeid(self):
"Returns an integer representing the Geometry type."
return capi.geos_typeid(self.ptr)
@property
def num_geom(self):
"Returns the number of geometries in the Geometry."
return capi.get_num_geoms(self.ptr)
@property
def num_coords(self):
"Returns the number of coordinates in the Geometry."
return capi.get_num_coords(self.ptr)
@property
def num_points(self):
"Returns the number points, or coordinates, in the Geometry."
return self.num_coords
@property
def dims(self):
"Returns the dimension of this Geometry (0=point, 1=line, 2=surface)."
return capi.get_dims(self.ptr)
def normalize(self):
"Converts this Geometry to normal form (or canonical form)."
return capi.geos_normalize(self.ptr)
#### Unary predicates ####
@property
def empty(self):
"""
Returns a boolean indicating whether the set of points in this Geometry
are empty.
"""
return capi.geos_isempty(self.ptr)
@property
def hasz(self):
"Returns whether the geometry has a 3D dimension."
return capi.geos_hasz(self.ptr)
@property
def ring(self):
"Returns whether or not the geometry is a ring."
return capi.geos_isring(self.ptr)
@property
def simple(self):
"Returns false if the Geometry not simple."
return capi.geos_issimple(self.ptr)
@property
def valid(self):
"This property tests the validity of this Geometry."
return capi.geos_isvalid(self.ptr)
@property
def valid_reason(self):
"""
Returns a string containing the reason for any invalidity.
"""
if not GEOS_PREPARE:
raise GEOSException('Upgrade GEOS to 3.1 to get validity reason.')
return capi.geos_isvalidreason(self.ptr)
#### Binary predicates. ####
def contains(self, other):
"Returns true if other.within(this) returns true."
return capi.geos_contains(self.ptr, other.ptr)
def crosses(self, other):
"""
Returns true if the DE-9IM intersection matrix for the two Geometries
is T*T****** (for a point and a curve,a point and an area or a line and
an area) 0******** (for two curves).
"""
return capi.geos_crosses(self.ptr, other.ptr)
def disjoint(self, other):
"""
Returns true if the DE-9IM intersection matrix for the two Geometries
is FF*FF****.
"""
return capi.geos_disjoint(self.ptr, other.ptr)
def equals(self, other):
"""
Returns true if the DE-9IM intersection matrix for the two Geometries
is T*F**FFF*.
"""
return capi.geos_equals(self.ptr, other.ptr)
def equals_exact(self, other, tolerance=0):
"""
Returns true if the two Geometries are exactly equal, up to a
specified tolerance.
"""
return capi.geos_equalsexact(self.ptr, other.ptr, float(tolerance))
def intersects(self, other):
"Returns true if disjoint returns false."
return capi.geos_intersects(self.ptr, other.ptr)
def overlaps(self, other):
"""
Returns true if the DE-9IM intersection matrix for the two Geometries
is T*T***T** (for two points or two surfaces) 1*T***T** (for two curves).
"""
return capi.geos_overlaps(self.ptr, other.ptr)
def relate_pattern(self, other, pattern):
"""
Returns true if the elements in the DE-9IM intersection matrix for the
two Geometries match the elements in pattern.
"""
if not isinstance(pattern, basestring) or len(pattern) > 9:
raise GEOSException('invalid intersection matrix pattern')
return capi.geos_relatepattern(self.ptr, other.ptr, pattern)
def touches(self, other):
"""
Returns true if the DE-9IM intersection matrix for the two Geometries
is FT*******, F**T***** or F***T****.
"""
return capi.geos_touches(self.ptr, other.ptr)
def within(self, other):
"""
Returns true if the DE-9IM intersection matrix for the two Geometries
is T*F**F***.
"""
return capi.geos_within(self.ptr, other.ptr)
#### SRID Routines ####
def get_srid(self):
"Gets the SRID for the geometry, returns None if no SRID is set."
s = capi.geos_get_srid(self.ptr)
if s == 0: return None
else: return s
def set_srid(self, srid):
"Sets the SRID for the geometry."
capi.geos_set_srid(self.ptr, srid)
srid = property(get_srid, set_srid)
#### Output Routines ####
@property
def ewkt(self):
"""
Returns the EWKT (WKT + SRID) of the Geometry. Note that Z values
are *not* included in this representation because GEOS does not yet
support serializing them.
"""
if self.get_srid(): return 'SRID=%s;%s' % (self.srid, self.wkt)
else: return self.wkt
@property
def wkt(self):
"Returns the WKT (Well-Known Text) representation of this Geometry."
return wkt_w().write(self)
@property
def hex(self):
"""
Returns the WKB of this Geometry in hexadecimal form. Please note
that the SRID and Z values are not included in this representation
because it is not a part of the OGC specification (use the `hexewkb`
property instead).
"""
# A possible faster, all-python, implementation:
# str(self.wkb).encode('hex')
return wkb_w().write_hex(self)
@property
def hexewkb(self):
"""
Returns the EWKB of this Geometry in hexadecimal form. This is an
extension of the WKB specification that includes SRID and Z values
that are a part of this geometry.
"""
if self.hasz:
if not GEOS_PREPARE:
# See: http://trac.osgeo.org/geos/ticket/216
raise GEOSException('Upgrade GEOS to 3.1 to get valid 3D HEXEWKB.')
return ewkb_w3d().write_hex(self)
else:
return ewkb_w().write_hex(self)
@property
def json(self):
"""
Returns GeoJSON representation of this Geometry if GDAL 1.5+
is installed.
"""
if gdal.GEOJSON:
return self.ogr.json
else:
raise GEOSException('GeoJSON output only supported on GDAL 1.5+.')
geojson = json
@property
def wkb(self):
"""
Returns the WKB (Well-Known Binary) representation of this Geometry
as a Python buffer. SRID and Z values are not included, use the
`ewkb` property instead.
"""
return wkb_w().write(self)
@property
def ewkb(self):
"""
Return the EWKB representation of this Geometry as a Python buffer.
This is an extension of the WKB specification that includes any SRID
and Z values that are a part of this geometry.
"""
if self.hasz:
if not GEOS_PREPARE:
# See: http://trac.osgeo.org/geos/ticket/216
raise GEOSException('Upgrade GEOS to 3.1 to get valid 3D EWKB.')
return ewkb_w3d().write(self)
else:
return ewkb_w().write(self)
@property
def kml(self):
"Returns the KML representation of this Geometry."
gtype = self.geom_type
return '<%s>%s</%s>' % (gtype, self.coord_seq.kml, gtype)
@property
def prepared(self):
"""
Returns a PreparedGeometry corresponding to this geometry -- it is
optimized for the contains, intersects, and covers operations.
"""
if GEOS_PREPARE:
return PreparedGeometry(self)
else:
raise GEOSException('GEOS 3.1+ required for prepared geometry support.')
#### GDAL-specific output routines ####
@property
def ogr(self):
"Returns the OGR Geometry for this Geometry."
if gdal.HAS_GDAL:
if self.srid:
return gdal.OGRGeometry(self.wkb, self.srid)
else:
return gdal.OGRGeometry(self.wkb)
else:
raise GEOSException('GDAL required to convert to an OGRGeometry.')
@property
def srs(self):
"Returns the OSR SpatialReference for SRID of this Geometry."
if gdal.HAS_GDAL:
if self.srid:
return gdal.SpatialReference(self.srid)
else:
return None
else:
raise GEOSException('GDAL required to return a SpatialReference object.')
@property
def crs(self):
"Alias for `srs` property."
return self.srs
def transform(self, ct, clone=False):
"""
Requires GDAL. Transforms the geometry according to the given
transformation object, which may be an integer SRID, and WKT or
PROJ.4 string. By default, the geometry is transformed in-place and
nothing is returned. However if the `clone` keyword is set, then this
geometry will not be modified and a transformed clone will be returned
instead.
"""
srid = self.srid
if ct == srid:
# short-circuit where source & dest SRIDs match
if clone:
return self.clone()
else:
return
if (srid is None) or (srid < 0):
warnings.warn("Calling transform() with no SRID set does no transformation!",
stacklevel=2)
warnings.warn("Calling transform() with no SRID will raise GEOSException in v1.5",
FutureWarning, stacklevel=2)
return
if not gdal.HAS_GDAL:
raise GEOSException("GDAL library is not available to transform() geometry.")
# Creating an OGR Geometry, which is then transformed.
g = gdal.OGRGeometry(self.wkb, srid)
g.transform(ct)
# Getting a new GEOS pointer
ptr = wkb_r().read(g.wkb)
if clone:
# User wants a cloned transformed geometry returned.
return GEOSGeometry(ptr, srid=g.srid)
if ptr:
# Reassigning pointer, and performing post-initialization setup
# again due to the reassignment.
capi.destroy_geom(self.ptr)
self.ptr = ptr
self._post_init(g.srid)
else:
raise GEOSException('Transformed WKB was invalid.')
#### Topology Routines ####
def _topology(self, gptr):
"Helper routine to return Geometry from the given pointer."
return GEOSGeometry(gptr, srid=self.srid)
@property
def boundary(self):
"Returns the boundary as a newly allocated Geometry object."
return self._topology(capi.geos_boundary(self.ptr))
def buffer(self, width, quadsegs=8):
"""
Returns a geometry that represents all points whose distance from this
Geometry is less than or equal to distance. Calculations are in the
Spatial Reference System of this Geometry. The optional third parameter sets
the number of segment used to approximate a quarter circle (defaults to 8).
(Text from PostGIS documentation at ch. 6.1.3)
"""
return self._topology(capi.geos_buffer(self.ptr, width, quadsegs))
@property
def centroid(self):
"""
The centroid is equal to the centroid of the set of component Geometries
of highest dimension (since the lower-dimension geometries contribute zero
"weight" to the centroid).
"""
return self._topology(capi.geos_centroid(self.ptr))
@property
def convex_hull(self):
"""
Returns the smallest convex Polygon that contains all the points
in the Geometry.
"""
return self._topology(capi.geos_convexhull(self.ptr))
def difference(self, other):
"""
Returns a Geometry representing the points making up this Geometry
that do not make up other.
"""
return self._topology(capi.geos_difference(self.ptr, other.ptr))
@property
def envelope(self):
"Return the envelope for this geometry (a polygon)."
return self._topology(capi.geos_envelope(self.ptr))
def intersection(self, other):
"Returns a Geometry representing the points shared by this Geometry and other."
return self._topology(capi.geos_intersection(self.ptr, other.ptr))
@property
def point_on_surface(self):
"Computes an interior point of this Geometry."
return self._topology(capi.geos_pointonsurface(self.ptr))
def relate(self, other):
"Returns the DE-9IM intersection matrix for this Geometry and the other."
return capi.geos_relate(self.ptr, other.ptr)
def simplify(self, tolerance=0.0, preserve_topology=False):
"""
Returns the Geometry, simplified using the Douglas-Peucker algorithm
to the specified tolerance (higher tolerance => less points). If no
tolerance provided, defaults to 0.
By default, this function does not preserve topology - e.g. polygons can
be split, collapse to lines or disappear holes can be created or
disappear, and lines can cross. By specifying preserve_topology=True,
the result will have the same dimension and number of components as the
input. This is significantly slower.
"""
if preserve_topology:
return self._topology(capi.geos_preservesimplify(self.ptr, tolerance))
else:
return self._topology(capi.geos_simplify(self.ptr, tolerance))
def sym_difference(self, other):
"""
Returns a set combining the points in this Geometry not in other,
and the points in other not in this Geometry.
"""
return self._topology(capi.geos_symdifference(self.ptr, other.ptr))
def union(self, other):
"Returns a Geometry representing all the points in this Geometry and other."
return self._topology(capi.geos_union(self.ptr, other.ptr))
#### Other Routines ####
@property
def area(self):
"Returns the area of the Geometry."
return capi.geos_area(self.ptr, byref(c_double()))
def distance(self, other):
"""
Returns the distance between the closest points on this Geometry
and the other. Units will be in those of the coordinate system of
the Geometry.
"""
if not isinstance(other, GEOSGeometry):
raise TypeError('distance() works only on other GEOS Geometries.')
return capi.geos_distance(self.ptr, other.ptr, byref(c_double()))
@property
def extent(self):
"""
Returns the extent of this geometry as a 4-tuple, consisting of
(xmin, ymin, xmax, ymax).
"""
env = self.envelope
if isinstance(env, Point):
xmin, ymin = env.tuple
xmax, ymax = xmin, ymin
else:
xmin, ymin = env[0][0]
xmax, ymax = env[0][2]
return (xmin, ymin, xmax, ymax)
@property
def length(self):
"""
Returns the length of this Geometry (e.g., 0 for point, or the
circumfrence of a Polygon).
"""
return capi.geos_length(self.ptr, byref(c_double()))
def clone(self):
"Clones this Geometry."
return GEOSGeometry(capi.geom_clone(self.ptr), srid=self.srid)
# Class mapping dictionary. Has to be at the end to avoid import
# conflicts with GEOSGeometry.
from django.contrib.gis.geos.linestring import LineString, LinearRing
from django.contrib.gis.geos.point import Point
from django.contrib.gis.geos.polygon import Polygon
from django.contrib.gis.geos.collections import GeometryCollection, MultiPoint, MultiLineString, MultiPolygon
GEOS_CLASSES = {0 : Point,
1 : LineString,
2 : LinearRing,
3 : Polygon,
4 : MultiPoint,
5 : MultiLineString,
6 : MultiPolygon,
7 : GeometryCollection,
}
# If supported, import the PreparedGeometry class.
if GEOS_PREPARE:
from django.contrib.gis.geos.prepared import PreparedGeometry | unknown | codeparrot/codeparrot-clean | ||
// Copyright 2014 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package context defines the Context type, which carries deadlines,
// cancellation signals, and other request-scoped values across API boundaries
// and between processes.
//
// Incoming requests to a server should create a [Context], and outgoing
// calls to servers should accept a Context. The chain of function
// calls between them must propagate the Context, optionally replacing
// it with a derived Context created using [WithCancel], [WithDeadline],
// [WithTimeout], or [WithValue].
//
// A Context may be canceled to indicate that work done on its behalf should stop.
// A Context with a deadline is canceled after the deadline passes.
// When a Context is canceled, all Contexts derived from it are also canceled.
//
// The [WithCancel], [WithDeadline], and [WithTimeout] functions take a
// Context (the parent) and return a derived Context (the child) and a
// [CancelFunc]. Calling the CancelFunc directly cancels the child and its
// children, removes the parent's reference to the child, and stops
// any associated timers. Failing to call the CancelFunc leaks the
// child and its children until the parent is canceled. The go vet tool
// checks that CancelFuncs are used on all control-flow paths.
//
// The [WithCancelCause], [WithDeadlineCause], and [WithTimeoutCause] functions
// return a [CancelCauseFunc], which takes an error and records it as
// the cancellation cause. Calling [Cause] on the canceled context
// or any of its children retrieves the cause. If no cause is specified,
// Cause(ctx) returns the same value as ctx.Err().
//
// Programs that use Contexts should follow these rules to keep interfaces
// consistent across packages and enable static analysis tools to check context
// propagation:
//
// Do not store Contexts inside a struct type; instead, pass a Context
// explicitly to each function that needs it. This is discussed further in
// https://go.dev/blog/context-and-structs. The Context should be the first
// parameter, typically named ctx:
//
// func DoSomething(ctx context.Context, arg Arg) error {
// // ... use ctx ...
// }
//
// Do not pass a nil [Context], even if a function permits it. Pass [context.TODO]
// if you are unsure about which Context to use.
//
// Use context Values only for request-scoped data that transits processes and
// APIs, not for passing optional parameters to functions.
//
// The same Context may be passed to functions running in different goroutines;
// Contexts are safe for simultaneous use by multiple goroutines.
//
// See https://go.dev/blog/context for example code for a server that uses
// Contexts.
package context
import (
"errors"
"internal/reflectlite"
"sync"
"sync/atomic"
"time"
)
// A Context carries a deadline, a cancellation signal, and other values across
// API boundaries.
//
// Context's methods may be called by multiple goroutines simultaneously.
type Context interface {
// Deadline returns the time when work done on behalf of this context
// should be canceled. Deadline returns ok==false when no deadline is
// set. Successive calls to Deadline return the same results.
Deadline() (deadline time.Time, ok bool)
// Done returns a channel that's closed when work done on behalf of this
// context should be canceled. Done may return nil if this context can
// never be canceled. Successive calls to Done return the same value.
// The close of the Done channel may happen asynchronously,
// after the cancel function returns.
//
// WithCancel arranges for Done to be closed when cancel is called;
// WithDeadline arranges for Done to be closed when the deadline
// expires; WithTimeout arranges for Done to be closed when the timeout
// elapses.
//
// Done is provided for use in select statements:
//
// // Stream generates values with DoSomething and sends them to out
// // until DoSomething returns an error or ctx.Done is closed.
// func Stream(ctx context.Context, out chan<- Value) error {
// for {
// v, err := DoSomething(ctx)
// if err != nil {
// return err
// }
// select {
// case <-ctx.Done():
// return ctx.Err()
// case out <- v:
// }
// }
// }
//
// See https://go.dev/blog/pipelines for more examples of how to use
// a Done channel for cancellation.
Done() <-chan struct{}
// If Done is not yet closed, Err returns nil.
// If Done is closed, Err returns a non-nil error explaining why:
// DeadlineExceeded if the context's deadline passed,
// or Canceled if the context was canceled for some other reason.
// After Err returns a non-nil error, successive calls to Err return the same error.
Err() error
// Value returns the value associated with this context for key, or nil
// if no value is associated with key. Successive calls to Value with
// the same key returns the same result.
//
// Use context values only for request-scoped data that transits
// processes and API boundaries, not for passing optional parameters to
// functions.
//
// A key identifies a specific value in a Context. Functions that wish
// to store values in Context typically allocate a key in a global
// variable then use that key as the argument to context.WithValue and
// Context.Value. A key can be any type that supports equality;
// packages should define keys as an unexported type to avoid
// collisions.
//
// Packages that define a Context key should provide type-safe accessors
// for the values stored using that key:
//
// // Package user defines a User type that's stored in Contexts.
// package user
//
// import "context"
//
// // User is the type of value stored in the Contexts.
// type User struct {...}
//
// // key is an unexported type for keys defined in this package.
// // This prevents collisions with keys defined in other packages.
// type key int
//
// // userKey is the key for user.User values in Contexts. It is
// // unexported; clients use user.NewContext and user.FromContext
// // instead of using this key directly.
// var userKey key
//
// // NewContext returns a new Context that carries value u.
// func NewContext(ctx context.Context, u *User) context.Context {
// return context.WithValue(ctx, userKey, u)
// }
//
// // FromContext returns the User value stored in ctx, if any.
// func FromContext(ctx context.Context) (*User, bool) {
// u, ok := ctx.Value(userKey).(*User)
// return u, ok
// }
Value(key any) any
}
// Canceled is the error returned by [Context.Err] when the context is canceled
// for some reason other than its deadline passing.
var Canceled = errors.New("context canceled")
// DeadlineExceeded is the error returned by [Context.Err] when the context is canceled
// due to its deadline passing.
var DeadlineExceeded error = deadlineExceededError{}
type deadlineExceededError struct{}
func (deadlineExceededError) Error() string { return "context deadline exceeded" }
func (deadlineExceededError) Timeout() bool { return true }
func (deadlineExceededError) Temporary() bool { return true }
// An emptyCtx is never canceled, has no values, and has no deadline.
// It is the common base of backgroundCtx and todoCtx.
type emptyCtx struct{}
func (emptyCtx) Deadline() (deadline time.Time, ok bool) {
return
}
func (emptyCtx) Done() <-chan struct{} {
return nil
}
func (emptyCtx) Err() error {
return nil
}
func (emptyCtx) Value(key any) any {
return nil
}
type backgroundCtx struct{ emptyCtx }
func (backgroundCtx) String() string {
return "context.Background"
}
type todoCtx struct{ emptyCtx }
func (todoCtx) String() string {
return "context.TODO"
}
// Background returns a non-nil, empty [Context]. It is never canceled, has no
// values, and has no deadline. It is typically used by the main function,
// initialization, and tests, and as the top-level Context for incoming
// requests.
func Background() Context {
return backgroundCtx{}
}
// TODO returns a non-nil, empty [Context]. Code should use context.TODO when
// it's unclear which Context to use or it is not yet available (because the
// surrounding function has not yet been extended to accept a Context
// parameter).
func TODO() Context {
return todoCtx{}
}
// A CancelFunc tells an operation to abandon its work.
// A CancelFunc does not wait for the work to stop.
// A CancelFunc may be called by multiple goroutines simultaneously.
// After the first call, subsequent calls to a CancelFunc do nothing.
type CancelFunc func()
// WithCancel returns a derived context that points to the parent context
// but has a new Done channel. The returned context's Done channel is closed
// when the returned cancel function is called or when the parent context's
// Done channel is closed, whichever happens first.
//
// Canceling this context releases resources associated with it, so code should
// call cancel as soon as the operations running in this [Context] complete.
func WithCancel(parent Context) (ctx Context, cancel CancelFunc) {
c := withCancel(parent)
return c, func() { c.cancel(true, Canceled, nil) }
}
// A CancelCauseFunc behaves like a [CancelFunc] but additionally sets the cancellation cause.
// This cause can be retrieved by calling [Cause] on the canceled Context or on
// any of its derived Contexts.
//
// If the context has already been canceled, CancelCauseFunc does not set the cause.
// For example, if childContext is derived from parentContext:
// - if parentContext is canceled with cause1 before childContext is canceled with cause2,
// then Cause(parentContext) == Cause(childContext) == cause1
// - if childContext is canceled with cause2 before parentContext is canceled with cause1,
// then Cause(parentContext) == cause1 and Cause(childContext) == cause2
type CancelCauseFunc func(cause error)
// WithCancelCause behaves like [WithCancel] but returns a [CancelCauseFunc] instead of a [CancelFunc].
// Calling cancel with a non-nil error (the "cause") records that error in ctx;
// it can then be retrieved using Cause(ctx).
// Calling cancel with nil sets the cause to Canceled.
//
// Example use:
//
// ctx, cancel := context.WithCancelCause(parent)
// cancel(myError)
// ctx.Err() // returns context.Canceled
// context.Cause(ctx) // returns myError
func WithCancelCause(parent Context) (ctx Context, cancel CancelCauseFunc) {
c := withCancel(parent)
return c, func(cause error) { c.cancel(true, Canceled, cause) }
}
func withCancel(parent Context) *cancelCtx {
if parent == nil {
panic("cannot create context from nil parent")
}
c := &cancelCtx{}
c.propagateCancel(parent, c)
return c
}
// Cause returns a non-nil error explaining why c was canceled.
// The first cancellation of c or one of its parents sets the cause.
// If that cancellation happened via a call to CancelCauseFunc(err),
// then [Cause] returns err.
// Otherwise Cause(c) returns the same value as c.Err().
// Cause returns nil if c has not been canceled yet.
func Cause(c Context) error {
err := c.Err()
if err == nil {
return nil
}
if cc, ok := c.Value(&cancelCtxKey).(*cancelCtx); ok {
cc.mu.Lock()
cause := cc.cause
cc.mu.Unlock()
if cause != nil {
return cause
}
// The parent cancelCtx doesn't have a cause,
// so c must have been canceled in some custom context implementation.
}
// We don't have a cause to return from a parent cancelCtx,
// so return the context's error.
return err
}
// AfterFunc arranges to call f in its own goroutine after ctx is canceled.
// If ctx is already canceled, AfterFunc calls f immediately in its own goroutine.
//
// Multiple calls to AfterFunc on a context operate independently;
// one does not replace another.
//
// Calling the returned stop function stops the association of ctx with f.
// It returns true if the call stopped f from being run.
// If stop returns false,
// either the context is canceled and f has been started in its own goroutine;
// or f was already stopped.
// The stop function does not wait for f to complete before returning.
// If the caller needs to know whether f is completed,
// it must coordinate with f explicitly.
//
// If ctx has a "AfterFunc(func()) func() bool" method,
// AfterFunc will use it to schedule the call.
func AfterFunc(ctx Context, f func()) (stop func() bool) {
a := &afterFuncCtx{
f: f,
}
a.cancelCtx.propagateCancel(ctx, a)
return func() bool {
stopped := false
a.once.Do(func() {
stopped = true
})
if stopped {
a.cancel(true, Canceled, nil)
}
return stopped
}
}
type afterFuncer interface {
AfterFunc(func()) func() bool
}
type afterFuncCtx struct {
cancelCtx
once sync.Once // either starts running f or stops f from running
f func()
}
func (a *afterFuncCtx) cancel(removeFromParent bool, err, cause error) {
a.cancelCtx.cancel(false, err, cause)
if removeFromParent {
removeChild(a.Context, a)
}
a.once.Do(func() {
go a.f()
})
}
// A stopCtx is used as the parent context of a cancelCtx when
// an AfterFunc has been registered with the parent.
// It holds the stop function used to unregister the AfterFunc.
type stopCtx struct {
Context
stop func() bool
}
// goroutines counts the number of goroutines ever created; for testing.
var goroutines atomic.Int32
// &cancelCtxKey is the key that a cancelCtx returns itself for.
var cancelCtxKey int
// parentCancelCtx returns the underlying *cancelCtx for parent.
// It does this by looking up parent.Value(&cancelCtxKey) to find
// the innermost enclosing *cancelCtx and then checking whether
// parent.Done() matches that *cancelCtx. (If not, the *cancelCtx
// has been wrapped in a custom implementation providing a
// different done channel, in which case we should not bypass it.)
func parentCancelCtx(parent Context) (*cancelCtx, bool) {
done := parent.Done()
if done == closedchan || done == nil {
return nil, false
}
p, ok := parent.Value(&cancelCtxKey).(*cancelCtx)
if !ok {
return nil, false
}
pdone, _ := p.done.Load().(chan struct{})
if pdone != done {
return nil, false
}
return p, true
}
// removeChild removes a context from its parent.
func removeChild(parent Context, child canceler) {
if s, ok := parent.(stopCtx); ok {
s.stop()
return
}
p, ok := parentCancelCtx(parent)
if !ok {
return
}
p.mu.Lock()
if p.children != nil {
delete(p.children, child)
}
p.mu.Unlock()
}
// A canceler is a context type that can be canceled directly. The
// implementations are *cancelCtx and *timerCtx.
type canceler interface {
cancel(removeFromParent bool, err, cause error)
Done() <-chan struct{}
}
// closedchan is a reusable closed channel.
var closedchan = make(chan struct{})
func init() {
close(closedchan)
}
// A cancelCtx can be canceled. When canceled, it also cancels any children
// that implement canceler.
type cancelCtx struct {
Context
mu sync.Mutex // protects following fields
done atomic.Value // of chan struct{}, created lazily, closed by first cancel call
children map[canceler]struct{} // set to nil by the first cancel call
err atomic.Value // set to non-nil by the first cancel call
cause error // set to non-nil by the first cancel call
}
func (c *cancelCtx) Value(key any) any {
if key == &cancelCtxKey {
return c
}
return value(c.Context, key)
}
func (c *cancelCtx) Done() <-chan struct{} {
d := c.done.Load()
if d != nil {
return d.(chan struct{})
}
c.mu.Lock()
defer c.mu.Unlock()
d = c.done.Load()
if d == nil {
d = make(chan struct{})
c.done.Store(d)
}
return d.(chan struct{})
}
func (c *cancelCtx) Err() error {
// An atomic load is ~5x faster than a mutex, which can matter in tight loops.
if err := c.err.Load(); err != nil {
// Ensure the done channel has been closed before returning a non-nil error.
<-c.Done()
return err.(error)
}
return nil
}
// propagateCancel arranges for child to be canceled when parent is.
// It sets the parent context of cancelCtx.
func (c *cancelCtx) propagateCancel(parent Context, child canceler) {
c.Context = parent
done := parent.Done()
if done == nil {
return // parent is never canceled
}
select {
case <-done:
// parent is already canceled
child.cancel(false, parent.Err(), Cause(parent))
return
default:
}
if p, ok := parentCancelCtx(parent); ok {
// parent is a *cancelCtx, or derives from one.
p.mu.Lock()
if err := p.err.Load(); err != nil {
// parent has already been canceled
child.cancel(false, err.(error), p.cause)
} else {
if p.children == nil {
p.children = make(map[canceler]struct{})
}
p.children[child] = struct{}{}
}
p.mu.Unlock()
return
}
if a, ok := parent.(afterFuncer); ok {
// parent implements an AfterFunc method.
c.mu.Lock()
stop := a.AfterFunc(func() {
child.cancel(false, parent.Err(), Cause(parent))
})
c.Context = stopCtx{
Context: parent,
stop: stop,
}
c.mu.Unlock()
return
}
goroutines.Add(1)
go func() {
select {
case <-parent.Done():
child.cancel(false, parent.Err(), Cause(parent))
case <-child.Done():
}
}()
}
type stringer interface {
String() string
}
func contextName(c Context) string {
if s, ok := c.(stringer); ok {
return s.String()
}
return reflectlite.TypeOf(c).String()
}
func (c *cancelCtx) String() string {
return contextName(c.Context) + ".WithCancel"
}
// cancel closes c.done, cancels each of c's children, and, if
// removeFromParent is true, removes c from its parent's children.
// cancel sets c.cause to cause if this is the first time c is canceled.
func (c *cancelCtx) cancel(removeFromParent bool, err, cause error) {
if err == nil {
panic("context: internal error: missing cancel error")
}
if cause == nil {
cause = err
}
c.mu.Lock()
if c.err.Load() != nil {
c.mu.Unlock()
return // already canceled
}
c.err.Store(err)
c.cause = cause
d, _ := c.done.Load().(chan struct{})
if d == nil {
c.done.Store(closedchan)
} else {
close(d)
}
for child := range c.children {
// NOTE: acquiring the child's lock while holding parent's lock.
child.cancel(false, err, cause)
}
c.children = nil
c.mu.Unlock()
if removeFromParent {
removeChild(c.Context, c)
}
}
// WithoutCancel returns a derived context that points to the parent context
// and is not canceled when parent is canceled.
// The returned context returns no Deadline or Err, and its Done channel is nil.
// Calling [Cause] on the returned context returns nil.
func WithoutCancel(parent Context) Context {
if parent == nil {
panic("cannot create context from nil parent")
}
return withoutCancelCtx{parent}
}
type withoutCancelCtx struct {
c Context
}
func (withoutCancelCtx) Deadline() (deadline time.Time, ok bool) {
return
}
func (withoutCancelCtx) Done() <-chan struct{} {
return nil
}
func (withoutCancelCtx) Err() error {
return nil
}
func (c withoutCancelCtx) Value(key any) any {
return value(c, key)
}
func (c withoutCancelCtx) String() string {
return contextName(c.c) + ".WithoutCancel"
}
// WithDeadline returns a derived context that points to the parent context
// but has the deadline adjusted to be no later than d. If the parent's
// deadline is already earlier than d, WithDeadline(parent, d) is semantically
// equivalent to parent. The returned [Context.Done] channel is closed when
// the deadline expires, when the returned cancel function is called,
// or when the parent context's Done channel is closed, whichever happens first.
//
// Canceling this context releases resources associated with it, so code should
// call cancel as soon as the operations running in this [Context] complete.
func WithDeadline(parent Context, d time.Time) (Context, CancelFunc) {
return WithDeadlineCause(parent, d, nil)
}
// WithDeadlineCause behaves like [WithDeadline] but also sets the cause of the
// returned Context when the deadline is exceeded. The returned [CancelFunc] does
// not set the cause.
func WithDeadlineCause(parent Context, d time.Time, cause error) (Context, CancelFunc) {
if parent == nil {
panic("cannot create context from nil parent")
}
if cur, ok := parent.Deadline(); ok && cur.Before(d) {
// The current deadline is already sooner than the new one.
return WithCancel(parent)
}
c := &timerCtx{
deadline: d,
}
c.cancelCtx.propagateCancel(parent, c)
dur := time.Until(d)
if dur <= 0 {
c.cancel(true, DeadlineExceeded, cause) // deadline has already passed
return c, func() { c.cancel(false, Canceled, nil) }
}
c.mu.Lock()
defer c.mu.Unlock()
if c.err.Load() == nil {
c.timer = time.AfterFunc(dur, func() {
c.cancel(true, DeadlineExceeded, cause)
})
}
return c, func() { c.cancel(true, Canceled, nil) }
}
// A timerCtx carries a timer and a deadline. It embeds a cancelCtx to
// implement Done and Err. It implements cancel by stopping its timer then
// delegating to cancelCtx.cancel.
type timerCtx struct {
cancelCtx
timer *time.Timer // Under cancelCtx.mu.
deadline time.Time
}
func (c *timerCtx) Deadline() (deadline time.Time, ok bool) {
return c.deadline, true
}
func (c *timerCtx) String() string {
return contextName(c.cancelCtx.Context) + ".WithDeadline(" +
c.deadline.String() + " [" +
time.Until(c.deadline).String() + "])"
}
func (c *timerCtx) cancel(removeFromParent bool, err, cause error) {
c.cancelCtx.cancel(false, err, cause)
if removeFromParent {
// Remove this timerCtx from its parent cancelCtx's children.
removeChild(c.cancelCtx.Context, c)
}
c.mu.Lock()
if c.timer != nil {
c.timer.Stop()
c.timer = nil
}
c.mu.Unlock()
}
// WithTimeout returns WithDeadline(parent, time.Now().Add(timeout)).
//
// Canceling this context releases resources associated with it, so code should
// call cancel as soon as the operations running in this [Context] complete:
//
// func slowOperationWithTimeout(ctx context.Context) (Result, error) {
// ctx, cancel := context.WithTimeout(ctx, 100*time.Millisecond)
// defer cancel() // releases resources if slowOperation completes before timeout elapses
// return slowOperation(ctx)
// }
func WithTimeout(parent Context, timeout time.Duration) (Context, CancelFunc) {
return WithDeadline(parent, time.Now().Add(timeout))
}
// WithTimeoutCause behaves like [WithTimeout] but also sets the cause of the
// returned Context when the timeout expires. The returned [CancelFunc] does
// not set the cause.
func WithTimeoutCause(parent Context, timeout time.Duration, cause error) (Context, CancelFunc) {
return WithDeadlineCause(parent, time.Now().Add(timeout), cause)
}
// WithValue returns a derived context that points to the parent Context.
// In the derived context, the value associated with key is val.
//
// Use context Values only for request-scoped data that transits processes and
// APIs, not for passing optional parameters to functions.
//
// The provided key must be comparable and should not be of type
// string or any other built-in type to avoid collisions between
// packages using context. Users of WithValue should define their own
// types for keys. To avoid allocating when assigning to an
// interface{}, context keys often have concrete type
// struct{}. Alternatively, exported context key variables' static
// type should be a pointer or interface.
func WithValue(parent Context, key, val any) Context {
if parent == nil {
panic("cannot create context from nil parent")
}
if key == nil {
panic("nil key")
}
if !reflectlite.TypeOf(key).Comparable() {
panic("key is not comparable")
}
return &valueCtx{parent, key, val}
}
// A valueCtx carries a key-value pair. It implements Value for that key and
// delegates all other calls to the embedded Context.
type valueCtx struct {
Context
key, val any
}
// stringify tries a bit to stringify v, without using fmt, since we don't
// want context depending on the unicode tables. This is only used by
// *valueCtx.String().
func stringify(v any) string {
switch s := v.(type) {
case stringer:
return s.String()
case string:
return s
case nil:
return "<nil>"
}
return reflectlite.TypeOf(v).String()
}
func (c *valueCtx) String() string {
return contextName(c.Context) + ".WithValue(" +
stringify(c.key) + ", " +
stringify(c.val) + ")"
}
func (c *valueCtx) Value(key any) any {
if c.key == key {
return c.val
}
return value(c.Context, key)
}
func value(c Context, key any) any {
for {
switch ctx := c.(type) {
case *valueCtx:
if key == ctx.key {
return ctx.val
}
c = ctx.Context
case *cancelCtx:
if key == &cancelCtxKey {
return c
}
c = ctx.Context
case withoutCancelCtx:
if key == &cancelCtxKey {
// This implements Cause(ctx) == nil
// when ctx is created using WithoutCancel.
return nil
}
c = ctx.c
case *timerCtx:
if key == &cancelCtxKey {
return &ctx.cancelCtx
}
c = ctx.Context
case backgroundCtx, todoCtx:
return nil
default:
return c.Value(key)
}
}
} | go | github | https://github.com/golang/go | src/context/context.go |
from optparse import OptionParser
option_parser = OptionParser(usage='usage: %prog [options] file1 [file2...]')
option_parser.add_option('-v', '--verbose', action='store_true', dest='verbose', default=False,
help='display debug output')
option_parser.add_option('-p', '--properties', dest='properties', action='store_true', default=False,
help='Display properties that can be guessed.')
option_parser.add_option('-l', '--values', dest='values', action='store_true', default=False,
help='Display property values that can be guessed.')
option_parser.add_option('-s', '--transformers', dest='transformers', action='store_true', default=False,
help='Display transformers that can be used.')
option_parser.add_option('-i', '--info', dest='info', default='filename',
help='the desired information type: filename, hash_mpc or a hash from python\'s '
'hashlib module, such as hash_md5, hash_sha1, ...; or a list of any of '
'them, comma-separated')
option_parser.add_option('-n', '--name-only', dest='name_only', action='store_true', default=False,
help='Parse files as name only. Disable folder parsing, extension parsing, and file content analysis.')
option_parser.add_option('-t', '--type', dest='type', default=None,
help='the suggested file type: movie, episode. If undefined, type will be guessed.')
option_parser.add_option('-a', '--advanced', dest='advanced', action='store_true', default=False,
help='display advanced information for filename guesses, as json output')
option_parser.add_option('-y', '--yaml', dest='yaml', action='store_true', default=False,
help='display information for filename guesses as yaml output (like unit-test)')
option_parser.add_option('-d', '--demo', action='store_true', dest='demo', default=False,
help='run a few builtin tests instead of analyzing a file') | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2014 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{'name': 'Switzerland - BVR/ESR Bank statement Completion',
'version': '1.0',
'author': "Camptocamp,Odoo Community Association (OCA)",
'maintainer': 'Camptocamp',
'license': 'AGPL-3',
'category': 'Hidden',
'depends': ['l10n_ch_payment_slip',
'account_statement_base_completion'],
'description': """
Swiss BVR/ESR Bank statement Completion
=======================================
Link module between the Swiss localization BVR/ESR module
(l10n_ch_payment_slip) and the module adding a transaction ID
field in the bank statement (account_statement_base_completion).
It adds a completion rule to search the partner from the invoice
using the BVR/ESR reference.
When importing a BVR/ESR, the transaction ID is also copied to the
transaction id field of the bank statement.
This module is needed if you use the Swiss localization module and the
bank-statement-reconcile project in the banking addons
(https://launchpad.net/banking-addons).
""",
'website': 'http://www.camptocamp.com',
'data': ['data.xml'],
'tests': [],
'installable': False,
'auto_install': True,
} | unknown | codeparrot/codeparrot-clean | ||
// Copyright 2017 The Abseil Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// The file provides the IsStrictlyBaseOfAndConvertibleToSTLContainer type
// trait metafunction to assist in working with the _GLIBCXX_DEBUG debug
// wrappers of STL containers.
//
// DO NOT INCLUDE THIS FILE DIRECTLY. Use this file by including
// absl/strings/str_split.h.
//
// IWYU pragma: private, include "absl/strings/str_split.h"
#ifndef ABSL_STRINGS_INTERNAL_STL_TYPE_TRAITS_H_
#define ABSL_STRINGS_INTERNAL_STL_TYPE_TRAITS_H_
#include <array>
#include <bitset>
#include <deque>
#include <forward_list>
#include <list>
#include <map>
#include <set>
#include <type_traits>
#include <unordered_map>
#include <unordered_set>
#include <vector>
#include "absl/meta/type_traits.h"
namespace absl {
ABSL_NAMESPACE_BEGIN
namespace strings_internal {
template <typename C, template <typename...> class T>
struct IsSpecializationImpl : std::false_type {};
template <template <typename...> class T, typename... Args>
struct IsSpecializationImpl<T<Args...>, T> : std::true_type {};
template <typename C, template <typename...> class T>
using IsSpecialization = IsSpecializationImpl<absl::decay_t<C>, T>;
template <typename C>
struct IsArrayImpl : std::false_type {};
template <template <typename, size_t> class A, typename T, size_t N>
struct IsArrayImpl<A<T, N>> : std::is_same<A<T, N>, std::array<T, N>> {};
template <typename C>
using IsArray = IsArrayImpl<absl::decay_t<C>>;
template <typename C>
struct IsBitsetImpl : std::false_type {};
template <template <size_t> class B, size_t N>
struct IsBitsetImpl<B<N>> : std::is_same<B<N>, std::bitset<N>> {};
template <typename C>
using IsBitset = IsBitsetImpl<absl::decay_t<C>>;
template <typename C>
struct IsSTLContainer
: absl::disjunction<
IsArray<C>, IsBitset<C>, IsSpecialization<C, std::deque>,
IsSpecialization<C, std::forward_list>,
IsSpecialization<C, std::list>, IsSpecialization<C, std::map>,
IsSpecialization<C, std::multimap>, IsSpecialization<C, std::set>,
IsSpecialization<C, std::multiset>,
IsSpecialization<C, std::unordered_map>,
IsSpecialization<C, std::unordered_multimap>,
IsSpecialization<C, std::unordered_set>,
IsSpecialization<C, std::unordered_multiset>,
IsSpecialization<C, std::vector>> {};
template <typename C, template <typename...> class T, typename = void>
struct IsBaseOfSpecializationImpl : std::false_type {};
// IsBaseOfSpecializationImpl needs multiple partial specializations to SFINAE
// on the existence of container dependent types and plug them into the STL
// template.
template <typename C, template <typename, typename> class T>
struct IsBaseOfSpecializationImpl<
C, T, absl::void_t<typename C::value_type, typename C::allocator_type>>
: std::is_base_of<C,
T<typename C::value_type, typename C::allocator_type>> {};
template <typename C, template <typename, typename, typename> class T>
struct IsBaseOfSpecializationImpl<
C, T,
absl::void_t<typename C::key_type, typename C::key_compare,
typename C::allocator_type>>
: std::is_base_of<C, T<typename C::key_type, typename C::key_compare,
typename C::allocator_type>> {};
template <typename C, template <typename, typename, typename, typename> class T>
struct IsBaseOfSpecializationImpl<
C, T,
absl::void_t<typename C::key_type, typename C::mapped_type,
typename C::key_compare, typename C::allocator_type>>
: std::is_base_of<C,
T<typename C::key_type, typename C::mapped_type,
typename C::key_compare, typename C::allocator_type>> {
};
template <typename C, template <typename, typename, typename, typename> class T>
struct IsBaseOfSpecializationImpl<
C, T,
absl::void_t<typename C::key_type, typename C::hasher,
typename C::key_equal, typename C::allocator_type>>
: std::is_base_of<C, T<typename C::key_type, typename C::hasher,
typename C::key_equal, typename C::allocator_type>> {
};
template <typename C,
template <typename, typename, typename, typename, typename> class T>
struct IsBaseOfSpecializationImpl<
C, T,
absl::void_t<typename C::key_type, typename C::mapped_type,
typename C::hasher, typename C::key_equal,
typename C::allocator_type>>
: std::is_base_of<C, T<typename C::key_type, typename C::mapped_type,
typename C::hasher, typename C::key_equal,
typename C::allocator_type>> {};
template <typename C, template <typename...> class T>
using IsBaseOfSpecialization = IsBaseOfSpecializationImpl<absl::decay_t<C>, T>;
template <typename C>
struct IsBaseOfArrayImpl : std::false_type {};
template <template <typename, size_t> class A, typename T, size_t N>
struct IsBaseOfArrayImpl<A<T, N>> : std::is_base_of<A<T, N>, std::array<T, N>> {
};
template <typename C>
using IsBaseOfArray = IsBaseOfArrayImpl<absl::decay_t<C>>;
template <typename C>
struct IsBaseOfBitsetImpl : std::false_type {};
template <template <size_t> class B, size_t N>
struct IsBaseOfBitsetImpl<B<N>> : std::is_base_of<B<N>, std::bitset<N>> {};
template <typename C>
using IsBaseOfBitset = IsBaseOfBitsetImpl<absl::decay_t<C>>;
template <typename C>
struct IsBaseOfSTLContainer
: absl::disjunction<IsBaseOfArray<C>, IsBaseOfBitset<C>,
IsBaseOfSpecialization<C, std::deque>,
IsBaseOfSpecialization<C, std::forward_list>,
IsBaseOfSpecialization<C, std::list>,
IsBaseOfSpecialization<C, std::map>,
IsBaseOfSpecialization<C, std::multimap>,
IsBaseOfSpecialization<C, std::set>,
IsBaseOfSpecialization<C, std::multiset>,
IsBaseOfSpecialization<C, std::unordered_map>,
IsBaseOfSpecialization<C, std::unordered_multimap>,
IsBaseOfSpecialization<C, std::unordered_set>,
IsBaseOfSpecialization<C, std::unordered_multiset>,
IsBaseOfSpecialization<C, std::vector>> {};
template <typename C, template <typename...> class T, typename = void>
struct IsConvertibleToSpecializationImpl : std::false_type {};
// IsConvertibleToSpecializationImpl needs multiple partial specializations to
// SFINAE on the existence of container dependent types and plug them into the
// STL template.
template <typename C, template <typename, typename> class T>
struct IsConvertibleToSpecializationImpl<
C, T, absl::void_t<typename C::value_type, typename C::allocator_type>>
: std::is_convertible<
C, T<typename C::value_type, typename C::allocator_type>> {};
template <typename C, template <typename, typename, typename> class T>
struct IsConvertibleToSpecializationImpl<
C, T,
absl::void_t<typename C::key_type, typename C::key_compare,
typename C::allocator_type>>
: std::is_convertible<C, T<typename C::key_type, typename C::key_compare,
typename C::allocator_type>> {};
template <typename C, template <typename, typename, typename, typename> class T>
struct IsConvertibleToSpecializationImpl<
C, T,
absl::void_t<typename C::key_type, typename C::mapped_type,
typename C::key_compare, typename C::allocator_type>>
: std::is_convertible<
C, T<typename C::key_type, typename C::mapped_type,
typename C::key_compare, typename C::allocator_type>> {};
template <typename C, template <typename, typename, typename, typename> class T>
struct IsConvertibleToSpecializationImpl<
C, T,
absl::void_t<typename C::key_type, typename C::hasher,
typename C::key_equal, typename C::allocator_type>>
: std::is_convertible<
C, T<typename C::key_type, typename C::hasher, typename C::key_equal,
typename C::allocator_type>> {};
template <typename C,
template <typename, typename, typename, typename, typename> class T>
struct IsConvertibleToSpecializationImpl<
C, T,
absl::void_t<typename C::key_type, typename C::mapped_type,
typename C::hasher, typename C::key_equal,
typename C::allocator_type>>
: std::is_convertible<C, T<typename C::key_type, typename C::mapped_type,
typename C::hasher, typename C::key_equal,
typename C::allocator_type>> {};
template <typename C, template <typename...> class T>
using IsConvertibleToSpecialization =
IsConvertibleToSpecializationImpl<absl::decay_t<C>, T>;
template <typename C>
struct IsConvertibleToArrayImpl : std::false_type {};
template <template <typename, size_t> class A, typename T, size_t N>
struct IsConvertibleToArrayImpl<A<T, N>>
: std::is_convertible<A<T, N>, std::array<T, N>> {};
template <typename C>
using IsConvertibleToArray = IsConvertibleToArrayImpl<absl::decay_t<C>>;
template <typename C>
struct IsConvertibleToBitsetImpl : std::false_type {};
template <template <size_t> class B, size_t N>
struct IsConvertibleToBitsetImpl<B<N>>
: std::is_convertible<B<N>, std::bitset<N>> {};
template <typename C>
using IsConvertibleToBitset = IsConvertibleToBitsetImpl<absl::decay_t<C>>;
template <typename C>
struct IsConvertibleToSTLContainer
: absl::disjunction<
IsConvertibleToArray<C>, IsConvertibleToBitset<C>,
IsConvertibleToSpecialization<C, std::deque>,
IsConvertibleToSpecialization<C, std::forward_list>,
IsConvertibleToSpecialization<C, std::list>,
IsConvertibleToSpecialization<C, std::map>,
IsConvertibleToSpecialization<C, std::multimap>,
IsConvertibleToSpecialization<C, std::set>,
IsConvertibleToSpecialization<C, std::multiset>,
IsConvertibleToSpecialization<C, std::unordered_map>,
IsConvertibleToSpecialization<C, std::unordered_multimap>,
IsConvertibleToSpecialization<C, std::unordered_set>,
IsConvertibleToSpecialization<C, std::unordered_multiset>,
IsConvertibleToSpecialization<C, std::vector>> {};
template <typename C>
struct IsStrictlyBaseOfAndConvertibleToSTLContainer
: absl::conjunction<absl::negation<IsSTLContainer<C>>,
IsBaseOfSTLContainer<C>,
IsConvertibleToSTLContainer<C>> {};
} // namespace strings_internal
ABSL_NAMESPACE_END
} // namespace absl
#endif // ABSL_STRINGS_INTERNAL_STL_TYPE_TRAITS_H_ | c | github | https://github.com/mysql/mysql-server | extra/abseil/abseil-cpp-20230802.1/absl/strings/internal/stl_type_traits.h |
# -*- test-case-name: twisted.internet.test.test_sigchld -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
This module is used to integrate child process termination into a
reactor event loop. This is a challenging feature to provide because
most platforms indicate process termination via SIGCHLD and do not
provide a way to wait for that signal and arbitrary I/O events at the
same time. The naive implementation involves installing a Python
SIGCHLD handler; unfortunately this leads to other syscalls being
interrupted (whenever SIGCHLD is received) and failing with EINTR
(which almost no one is prepared to handle). This interruption can be
disabled via siginterrupt(2) (or one of the equivalent mechanisms);
however, if the SIGCHLD is delivered by the platform to a non-main
thread (not a common occurrence, but difficult to prove impossible),
the main thread (waiting on select() or another event notification
API) may not wake up leading to an arbitrary delay before the child
termination is noticed.
The basic solution to all these issues involves enabling SA_RESTART
(ie, disabling system call interruption) and registering a C signal
handler which writes a byte to a pipe. The other end of the pipe is
registered with the event loop, allowing it to wake up shortly after
SIGCHLD is received. See L{twisted.internet.posixbase._SIGCHLDWaker}
for the implementation of the event loop side of this solution. The
use of a pipe this way is known as the U{self-pipe
trick<http://cr.yp.to/docs/selfpipe.html>}.
From Python version 2.6, C{signal.siginterrupt} and C{signal.set_wakeup_fd}
provide the necessary C signal handler which writes to the pipe to be
registered with C{SA_RESTART}.
"""
from __future__ import division, absolute_import
import signal
def installHandler(fd):
"""
Install a signal handler which will write a byte to C{fd} when
I{SIGCHLD} is received.
This is implemented by installing a SIGCHLD handler that does nothing,
setting the I{SIGCHLD} handler as not allowed to interrupt system calls,
and using L{signal.set_wakeup_fd} to do the actual writing.
@param fd: The file descriptor to which to write when I{SIGCHLD} is
received.
@type fd: C{int}
"""
return;
if fd == -1:
signal.signal(signal.SIGCHLD, signal.SIG_DFL)
else:
def noopSignalHandler(*args):
pass
signal.signal(signal.SIGCHLD, noopSignalHandler)
signal.siginterrupt(signal.SIGCHLD, False)
return signal.set_wakeup_fd(fd)
def isDefaultHandler():
"""
Determine whether the I{SIGCHLD} handler is the default or not.
"""
return signal.getsignal(signal.SIGCHLD) == signal.SIG_DFL | unknown | codeparrot/codeparrot-clean | ||
# Copyright 2014 - Mirantis, Inc.
# Copyright 2015 - Huawei Technologies Co. Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
from mistral.db.v2 import api as db_api
from mistral import exceptions as exc
from mistral import utils
def _compare_parameters(expected_input, actual_input):
"""Compares the expected parameters with the actual parameters.
:param expected_input: Expected dict of parameters.
:param actual_input: Actual dict of parameters.
:return: Tuple {missing parameter names, unexpected parameter names}
"""
missing_params = []
unexpected_params = copy.deepcopy(list((actual_input or {}).keys()))
for p_name, p_value in expected_input.items():
if p_value is utils.NotDefined and p_name not in unexpected_params:
missing_params.append(str(p_name))
if p_name in unexpected_params:
unexpected_params.remove(p_name)
return missing_params, unexpected_params
def validate_input(expected_input, actual_input, obj_name, obj_class):
actual_input = actual_input or {}
missing, unexpected = _compare_parameters(
expected_input,
actual_input
)
if missing or unexpected:
msg = 'Invalid input [name=%s, class=%s'
msg_props = [obj_name, obj_class]
if missing:
msg += ', missing=%s'
msg_props.append(missing)
if unexpected:
msg += ', unexpected=%s'
msg_props.append(unexpected)
msg += ']'
raise exc.InputException(msg % tuple(msg_props))
def resolve_workflow_definition(parent_wf_name, parent_wf_spec_name,
namespace, wf_spec_name):
wf_def = None
if parent_wf_name != parent_wf_spec_name:
# If parent workflow belongs to a workbook then
# check child workflow within the same workbook
# (to be able to use short names within workbooks).
# If it doesn't exist then use a name from spec
# to find a workflow in DB.
wb_name = parent_wf_name.rstrip(parent_wf_spec_name)[:-1]
wf_full_name = "%s.%s" % (wb_name, wf_spec_name)
wf_def = db_api.load_workflow_definition(wf_full_name, namespace)
if not wf_def:
wf_def = db_api.load_workflow_definition(wf_spec_name, namespace)
if not wf_def:
raise exc.WorkflowException(
"Failed to find workflow [name=%s] [namespace=%s]" %
(wf_spec_name, namespace)
)
return wf_def | unknown | codeparrot/codeparrot-clean | ||
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
import os
from importlib.metadata import PackageNotFoundError, metadata
from unittest import mock
import pytest
from sqlalchemy import func, select
from sqlalchemy.orm import Session
from airflow.api_fastapi.core_api.datamodels.common import BulkActionResponse, BulkBody
from airflow.api_fastapi.core_api.datamodels.connections import ConnectionBody
from airflow.api_fastapi.core_api.services.public.connections import BulkConnectionService
from airflow.models import Connection
from airflow.secrets.environment_variables import CONN_ENV_PREFIX
from airflow.utils.session import NEW_SESSION, provide_session
from tests_common.test_utils.api_fastapi import _check_last_log
from tests_common.test_utils.asserts import assert_queries_count
from tests_common.test_utils.db import clear_db_connections, clear_db_logs, clear_test_connections
from tests_common.test_utils.markers import skip_if_force_lowest_dependencies_marker
pytestmark = pytest.mark.db_test
TEST_CONN_ID = "test_connection_id"
TEST_CONN_TYPE = "test_type"
TEST_CONN_DESCRIPTION = "some_description_a"
TEST_CONN_HOST = "some_host_a"
TEST_CONN_PORT = 8080
TEST_CONN_LOGIN = "some_login"
TEST_CONN_SCHEMA = "https"
TEST_CONN_EXTRA = '{"extra_key": "extra_value"}'
TEST_CONN_ID_2 = "test_connection_id_2"
TEST_CONN_TYPE_2 = "test_type_2"
TEST_CONN_DESCRIPTION_2 = "some_description_b"
TEST_CONN_HOST_2 = "some_host_b"
TEST_CONN_PORT_2 = 8081
TEST_CONN_LOGIN_2 = "some_login_b"
TEST_CONN_ID_3 = "test_connection_id_3"
TEST_CONN_TYPE_3 = "test_type_3"
@provide_session
def _create_connection(team_name: str | None = None, session: Session = NEW_SESSION) -> None:
connection_model = Connection(
conn_id=TEST_CONN_ID,
conn_type=TEST_CONN_TYPE,
description=TEST_CONN_DESCRIPTION,
host=TEST_CONN_HOST,
port=TEST_CONN_PORT,
login=TEST_CONN_LOGIN,
team_name=team_name,
)
session.add(connection_model)
@provide_session
def _create_connections(session: Session = NEW_SESSION) -> None:
_create_connection(session=session)
connection_model_2 = Connection(
conn_id=TEST_CONN_ID_2,
conn_type=TEST_CONN_TYPE_2,
description=TEST_CONN_DESCRIPTION_2,
host=TEST_CONN_HOST_2,
port=TEST_CONN_PORT_2,
login=TEST_CONN_LOGIN_2,
)
session.add(connection_model_2)
class TestConnectionEndpoint:
@pytest.fixture(autouse=True)
def setup(self) -> None:
clear_test_connections(False)
clear_db_connections(False)
clear_db_logs()
def teardown_method(self) -> None:
clear_db_connections()
def create_connection(self, team_name: str | None = None):
_create_connection(team_name=team_name)
def create_connections(self):
_create_connections()
class TestDeleteConnection(TestConnectionEndpoint):
def test_delete_should_respond_204(self, test_client, session):
self.create_connection()
conns = session.scalars(select(Connection)).all()
assert len(conns) == 1
response = test_client.delete(f"/connections/{TEST_CONN_ID}")
assert response.status_code == 204
connection = session.scalars(select(Connection)).all()
assert len(connection) == 0
_check_last_log(session, dag_id=None, event="delete_connection", logical_date=None)
def test_should_respond_401(self, unauthenticated_test_client):
response = unauthenticated_test_client.delete(f"/connections/{TEST_CONN_ID}")
assert response.status_code == 401
def test_should_respond_403(self, unauthorized_test_client):
response = unauthorized_test_client.delete(f"/connections/{TEST_CONN_ID}")
assert response.status_code == 403
def test_delete_should_respond_404(self, test_client):
response = test_client.delete(f"/connections/{TEST_CONN_ID}")
assert response.status_code == 404
body = response.json()
assert f"The Connection with connection_id: `{TEST_CONN_ID}` was not found" == body["detail"]
class TestGetConnection(TestConnectionEndpoint):
def test_get_should_respond_200(self, test_client, testing_team, session):
self.create_connection(team_name=testing_team.name)
response = test_client.get(f"/connections/{TEST_CONN_ID}")
assert response.status_code == 200
body = response.json()
assert body["connection_id"] == TEST_CONN_ID
assert body["conn_type"] == TEST_CONN_TYPE
assert body["team_name"] == testing_team.name
def test_should_respond_401(self, unauthenticated_test_client):
response = unauthenticated_test_client.get(f"/connections/{TEST_CONN_ID}")
assert response.status_code == 401
def test_should_respond_403(self, unauthorized_test_client):
response = unauthorized_test_client.get(f"/connections/{TEST_CONN_ID}")
assert response.status_code == 403
def test_get_should_respond_404(self, test_client):
response = test_client.get(f"/connections/{TEST_CONN_ID}")
assert response.status_code == 404
body = response.json()
assert f"The Connection with connection_id: `{TEST_CONN_ID}` was not found" == body["detail"]
def test_get_should_respond_200_with_extra(self, test_client, session):
self.create_connection()
connection = session.scalars(select(Connection)).first()
connection.extra = '{"extra_key": "extra_value"}'
session.commit()
response = test_client.get(f"/connections/{TEST_CONN_ID}")
assert response.status_code == 200
body = response.json()
assert body["connection_id"] == TEST_CONN_ID
assert body["conn_type"] == TEST_CONN_TYPE
assert body["extra"] == '{"extra_key": "extra_value"}'
@pytest.mark.enable_redact
def test_get_should_respond_200_with_extra_redacted(self, test_client, session):
self.create_connection()
connection = session.scalars(select(Connection)).first()
connection.extra = '{"password": "test-password"}'
session.commit()
response = test_client.get(f"/connections/{TEST_CONN_ID}")
assert response.status_code == 200
body = response.json()
assert body["connection_id"] == TEST_CONN_ID
assert body["conn_type"] == TEST_CONN_TYPE
assert body["extra"] == '{"password": "***"}'
@pytest.mark.enable_redact
def test_get_should_not_overmask_short_password_value_in_extra(self, test_client, session):
connection = Connection(
conn_id=TEST_CONN_ID, conn_type="generic", login="a", password="a", extra='{"key": "value"}'
)
session.add(connection)
session.commit()
response = test_client.get(f"/connections/{TEST_CONN_ID}")
assert response.status_code == 200
body = response.json()
assert body["connection_id"] == TEST_CONN_ID
assert body["conn_type"] == "generic"
assert body["login"] == "a"
assert body["extra"] == '{"key": "value"}'
class TestGetConnections(TestConnectionEndpoint):
@pytest.mark.parametrize(
("query_params", "expected_total_entries", "expected_ids"),
[
# Filters
({}, 2, [TEST_CONN_ID, TEST_CONN_ID_2]),
({"limit": 1}, 2, [TEST_CONN_ID]),
({"limit": 1, "offset": 1}, 2, [TEST_CONN_ID_2]),
# Sort
({"order_by": "-connection_id"}, 2, [TEST_CONN_ID_2, TEST_CONN_ID]),
({"order_by": "conn_type"}, 2, [TEST_CONN_ID, TEST_CONN_ID_2]),
({"order_by": "-conn_type"}, 2, [TEST_CONN_ID_2, TEST_CONN_ID]),
({"order_by": "description"}, 2, [TEST_CONN_ID, TEST_CONN_ID_2]),
({"order_by": "-description"}, 2, [TEST_CONN_ID_2, TEST_CONN_ID]),
({"order_by": "host"}, 2, [TEST_CONN_ID, TEST_CONN_ID_2]),
({"order_by": "-host"}, 2, [TEST_CONN_ID_2, TEST_CONN_ID]),
({"order_by": "port"}, 2, [TEST_CONN_ID, TEST_CONN_ID_2]),
({"order_by": "-port"}, 2, [TEST_CONN_ID_2, TEST_CONN_ID]),
({"order_by": "id"}, 2, [TEST_CONN_ID, TEST_CONN_ID_2]),
({"order_by": "-id"}, 2, [TEST_CONN_ID_2, TEST_CONN_ID]),
# Search
({"connection_id_pattern": "n_id_2"}, 1, [TEST_CONN_ID_2]),
],
)
def test_should_respond_200(
self, test_client, session, query_params, expected_total_entries, expected_ids
):
self.create_connections()
with assert_queries_count(3):
response = test_client.get("/connections", params=query_params)
assert response.status_code == 200
body = response.json()
assert body["total_entries"] == expected_total_entries
assert [connection["connection_id"] for connection in body["connections"]] == expected_ids
def test_should_respond_401(self, unauthenticated_test_client):
response = unauthenticated_test_client.get("/connections", params={})
assert response.status_code == 401
def test_should_respond_403(self, unauthorized_test_client):
response = unauthorized_test_client.get("/connections", params={})
assert response.status_code == 403
@mock.patch(
"airflow.api_fastapi.auth.managers.base_auth_manager.BaseAuthManager.get_authorized_connections"
)
def test_should_call_get_authorized_connections(self, mock_get_authorized_connections, test_client):
self.create_connections()
mock_get_authorized_connections.return_value = {TEST_CONN_ID}
response = test_client.get("/connections")
mock_get_authorized_connections.assert_called_once_with(user=mock.ANY, method="GET")
assert response.status_code == 200
body = response.json()
assert body["total_entries"] == 1
assert [connection["connection_id"] for connection in body["connections"]] == [TEST_CONN_ID]
class TestPostConnection(TestConnectionEndpoint):
@pytest.mark.parametrize(
"body",
[
{"connection_id": TEST_CONN_ID, "conn_type": TEST_CONN_TYPE},
{"connection_id": TEST_CONN_ID, "conn_type": TEST_CONN_TYPE, "extra": None},
{"connection_id": TEST_CONN_ID, "conn_type": TEST_CONN_TYPE, "extra": "{}"},
{"connection_id": TEST_CONN_ID, "conn_type": TEST_CONN_TYPE, "extra": '{"key": "value"}'},
{
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
"description": "test_description",
"host": "test_host",
"login": "test_login",
"schema": "test_schema",
"port": 8080,
"extra": '{"key": "value"}',
},
],
)
def test_post_should_respond_201(self, test_client, session, body):
response = test_client.post("/connections", json=body)
assert response.status_code == 201
connection = session.scalars(select(Connection)).all()
assert len(connection) == 1
_check_last_log(session, dag_id=None, event="post_connection", logical_date=None)
def test_post_should_respond_201_with_team(self, test_client, session, testing_team):
response = test_client.post(
"/connections",
json={"connection_id": TEST_CONN_ID, "conn_type": TEST_CONN_TYPE, "team_name": testing_team.name},
)
assert response.status_code == 201
assert response.json() == {
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
"description": None,
"extra": None,
"host": None,
"login": None,
"password": None,
"port": None,
"schema": None,
"team_name": testing_team.name,
}
def test_should_respond_401(self, unauthenticated_test_client):
response = unauthenticated_test_client.post("/connections", json={})
assert response.status_code == 401
def test_should_respond_403(self, unauthorized_test_client):
response = unauthorized_test_client.post("/connections", json={})
assert response.status_code == 403
@pytest.mark.parametrize(
"body",
[
{"connection_id": "****", "conn_type": TEST_CONN_TYPE},
{"connection_id": "test()", "conn_type": TEST_CONN_TYPE},
{"connection_id": "this_^$#is_invalid", "conn_type": TEST_CONN_TYPE},
{"connection_id": "iam_not@#$_connection_id", "conn_type": TEST_CONN_TYPE},
],
)
def test_post_should_respond_422_for_invalid_conn_id(self, test_client, body):
response = test_client.post("/connections", json=body)
assert response.status_code == 422
# This regex is used for validation in ConnectionBody
assert response.json() == {
"detail": [
{
"ctx": {"pattern": r"^[\w.-]+$"},
"input": f"{body['connection_id']}",
"loc": ["body", "connection_id"],
"msg": "String should match pattern '^[\\w.-]+$'",
"type": "string_pattern_mismatch",
}
]
}
@pytest.mark.parametrize(
"body",
[
{"connection_id": TEST_CONN_ID, "conn_type": TEST_CONN_TYPE},
],
)
def test_post_should_respond_already_exist(self, test_client, body):
response = test_client.post("/connections", json=body)
assert response.status_code == 201
# Another request
response = test_client.post("/connections", json=body)
assert response.status_code == 409
response_json = response.json()
assert "detail" in response_json
assert list(response_json["detail"].keys()) == ["reason", "statement", "orig_error", "message"]
@pytest.mark.enable_redact
@pytest.mark.parametrize(
("body", "expected_response"),
[
(
{"connection_id": TEST_CONN_ID, "conn_type": TEST_CONN_TYPE, "password": "test-password"},
{
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
"description": None,
"extra": None,
"host": None,
"login": None,
"password": "***",
"port": None,
"schema": None,
"team_name": None,
},
),
(
{"connection_id": TEST_CONN_ID, "conn_type": TEST_CONN_TYPE, "password": "?>@#+!_%()#"},
{
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
"description": None,
"extra": None,
"host": None,
"login": None,
"password": "***",
"port": None,
"schema": None,
"team_name": None,
},
),
(
{
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
"password": "A!rF|0wi$aw3s0m3",
"extra": '{"password": "test-password"}',
},
{
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
"description": None,
"extra": '{"password": "***"}',
"host": None,
"login": None,
"password": "***",
"port": None,
"schema": None,
"team_name": None,
},
),
],
)
def test_post_should_response_201_redacted_password(self, test_client, body, expected_response, session):
response = test_client.post("/connections", json=body)
assert response.status_code == 201
assert response.json() == expected_response
_check_last_log(session, dag_id=None, event="post_connection", logical_date=None, check_masked=True)
class TestPatchConnection(TestConnectionEndpoint):
@pytest.mark.parametrize(
("body", "expected_result"),
[
(
{"connection_id": TEST_CONN_ID, "conn_type": "new_type", "extra": '{"key": "var"}'},
{
"conn_type": "new_type",
"connection_id": TEST_CONN_ID,
"description": TEST_CONN_DESCRIPTION,
"extra": '{"key": "var"}',
"host": TEST_CONN_HOST,
"login": TEST_CONN_LOGIN,
"password": None,
"port": TEST_CONN_PORT,
"schema": None,
"team_name": None,
},
),
(
{"connection_id": TEST_CONN_ID, "conn_type": "type_patch", "host": "test_host_patch"},
{
"conn_type": "type_patch",
"connection_id": TEST_CONN_ID,
"description": TEST_CONN_DESCRIPTION,
"extra": None,
"host": "test_host_patch",
"login": TEST_CONN_LOGIN,
"password": None,
"port": TEST_CONN_PORT,
"schema": None,
"team_name": None,
},
),
(
{
"connection_id": TEST_CONN_ID,
"conn_type": "surprise",
"host": "test_host_patch",
"port": 80,
},
{
"conn_type": "surprise",
"connection_id": TEST_CONN_ID,
"description": TEST_CONN_DESCRIPTION,
"extra": None,
"host": "test_host_patch",
"login": TEST_CONN_LOGIN,
"password": None,
"port": 80,
"schema": None,
"team_name": None,
},
),
(
{"connection_id": TEST_CONN_ID, "conn_type": "really_new_type", "login": "test_login_patch"},
{
"conn_type": "really_new_type",
"connection_id": TEST_CONN_ID,
"description": TEST_CONN_DESCRIPTION,
"extra": None,
"host": TEST_CONN_HOST,
"login": "test_login_patch",
"password": None,
"port": TEST_CONN_PORT,
"schema": None,
"team_name": None,
},
),
(
{"connection_id": TEST_CONN_ID, "conn_type": TEST_CONN_TYPE, "port": 80},
{
"conn_type": TEST_CONN_TYPE,
"connection_id": TEST_CONN_ID,
"description": TEST_CONN_DESCRIPTION,
"extra": None,
"host": TEST_CONN_HOST,
"login": TEST_CONN_LOGIN,
"password": None,
"port": 80,
"schema": None,
"team_name": None,
},
),
(
{
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
"port": 80,
"login": "test_login_patch",
"password": "test_password_patch",
},
{
"conn_type": TEST_CONN_TYPE,
"connection_id": TEST_CONN_ID,
"description": TEST_CONN_DESCRIPTION,
"extra": None,
"host": TEST_CONN_HOST,
"login": "test_login_patch",
"password": "test_password_patch",
"port": 80,
"schema": None,
"team_name": None,
},
),
(
# Sensitive "***" should be ignored.
{
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
"port": 80,
"login": "test_login_patch",
"password": "***",
},
{
"conn_type": TEST_CONN_TYPE,
"connection_id": TEST_CONN_ID,
"description": TEST_CONN_DESCRIPTION,
"extra": None,
"host": TEST_CONN_HOST,
"login": "test_login_patch",
"password": None,
"port": 80,
"schema": None,
"team_name": None,
},
),
(
{
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
"schema": "http_patch",
"extra": '{"extra_key_patch": "extra_value_patch"}',
},
{
"conn_type": TEST_CONN_TYPE,
"connection_id": TEST_CONN_ID,
"description": TEST_CONN_DESCRIPTION,
"extra": '{"extra_key_patch": "extra_value_patch"}',
"host": TEST_CONN_HOST,
"login": TEST_CONN_LOGIN,
"password": None,
"port": TEST_CONN_PORT,
"schema": "http_patch",
"team_name": None,
},
),
(
{ # Explicitly test that None is applied compared to if not provided
"conn_type": TEST_CONN_TYPE,
"connection_id": TEST_CONN_ID,
"description": None,
"extra": None,
"host": None,
"login": None,
"password": None,
"port": None,
"schema": None,
},
{
"conn_type": TEST_CONN_TYPE,
"connection_id": TEST_CONN_ID,
"description": None,
"extra": None,
"host": None,
"login": None,
"password": None,
"port": None,
"schema": None,
"team_name": None,
},
),
],
)
def test_patch_should_respond_200(
self, test_client, body: dict[str, str], expected_result: dict[str, str], session
):
self.create_connection()
response = test_client.patch(f"/connections/{TEST_CONN_ID}", json=body)
assert response.status_code == 200
_check_last_log(session, dag_id=None, event="patch_connection", logical_date=None)
assert response.json() == expected_result
def test_patch_with_team_should_respond_200(self, test_client, testing_team, session):
self.create_connection()
response = test_client.patch(
f"/connections/{TEST_CONN_ID}",
json={"connection_id": TEST_CONN_ID, "conn_type": "new_type", "team_name": testing_team.name},
)
assert response.status_code == 200
_check_last_log(session, dag_id=None, event="patch_connection", logical_date=None)
assert response.json() == {
"conn_type": "new_type",
"connection_id": TEST_CONN_ID,
"description": TEST_CONN_DESCRIPTION,
"extra": None,
"host": TEST_CONN_HOST,
"login": TEST_CONN_LOGIN,
"password": None,
"port": TEST_CONN_PORT,
"schema": None,
"team_name": testing_team.name,
}
def test_should_respond_401(self, unauthenticated_test_client):
response = unauthenticated_test_client.patch(f"/connections/{TEST_CONN_ID}", json={})
assert response.status_code == 401
def test_should_respond_403(self, unauthorized_test_client):
response = unauthorized_test_client.patch(f"/connections/{TEST_CONN_ID}", json={})
assert response.status_code == 403
@pytest.mark.parametrize(
("body", "updated_connection", "update_mask"),
[
(
{
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
"extra": '{"key": "var"}',
"login": TEST_CONN_LOGIN,
"port": TEST_CONN_PORT,
},
{
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
"extra": None,
"host": TEST_CONN_HOST,
"login": TEST_CONN_LOGIN,
"port": TEST_CONN_PORT,
"schema": None,
"password": None,
"description": TEST_CONN_DESCRIPTION,
"team_name": None,
},
{"update_mask": ["login", "port"]},
),
(
{
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
"extra": '{"key": "var"}',
"login": None,
"port": None,
},
{
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
"extra": None,
"host": TEST_CONN_HOST,
"login": None,
"port": None,
"schema": None,
"password": None,
"description": TEST_CONN_DESCRIPTION,
"team_name": None,
},
{"update_mask": ["login", "port"]},
),
(
{"connection_id": TEST_CONN_ID, "conn_type": TEST_CONN_TYPE, "host": "test_host_patch"},
{
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
"extra": None,
"host": "test_host_patch",
"login": TEST_CONN_LOGIN,
"port": TEST_CONN_PORT,
"schema": None,
"password": None,
"description": TEST_CONN_DESCRIPTION,
"team_name": None,
},
{"update_mask": ["host"]},
),
(
{
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
"host": "test_host_patch",
"port": 80,
},
{
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
"extra": None,
"host": "test_host_patch",
"login": TEST_CONN_LOGIN,
"port": 80,
"schema": None,
"password": None,
"description": TEST_CONN_DESCRIPTION,
"team_name": None,
},
{"update_mask": ["host", "port"]},
),
(
{"connection_id": TEST_CONN_ID, "conn_type": TEST_CONN_TYPE, "login": "test_login_patch"},
{
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
"extra": None,
"host": TEST_CONN_HOST,
"login": "test_login_patch",
"port": TEST_CONN_PORT,
"schema": None,
"password": None,
"description": TEST_CONN_DESCRIPTION,
"team_name": None,
},
{"update_mask": ["login"]},
),
(
{
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
"host": TEST_CONN_HOST,
"port": 80,
},
{
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
"extra": None,
"host": TEST_CONN_HOST,
"login": TEST_CONN_LOGIN,
"port": TEST_CONN_PORT,
"password": None,
"schema": None,
"description": TEST_CONN_DESCRIPTION,
"team_name": None,
},
{"update_mask": ["host"]},
),
(
{
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
"extra": '{"new_extra_key": "new_extra_value"}',
"host": TEST_CONN_HOST,
"schema": "new_schema",
"port": 80,
},
{
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
"extra": '{"new_extra_key": "new_extra_value"}',
"host": TEST_CONN_HOST,
"login": TEST_CONN_LOGIN,
"port": TEST_CONN_PORT,
"password": None,
"schema": "new_schema",
"description": TEST_CONN_DESCRIPTION,
"team_name": None,
},
{"update_mask": ["schema", "extra"]},
),
],
)
def test_patch_should_respond_200_with_update_mask(
self, test_client, session, body, updated_connection, update_mask
):
self.create_connection()
response = test_client.patch(f"/connections/{TEST_CONN_ID}", json=body, params=update_mask)
assert response.status_code == 200
connection = session.scalars(select(Connection).where(Connection.conn_id == TEST_CONN_ID)).first()
assert connection.password is None
assert response.json() == updated_connection
@pytest.mark.parametrize(
"body",
[
{
"connection_id": "i_am_not_a_connection",
"conn_type": TEST_CONN_TYPE,
"extra": '{"key": "var"}',
},
{
"connection_id": "i_am_not_a_connection",
"conn_type": TEST_CONN_TYPE,
"host": "test_host_patch",
},
{
"connection_id": "i_am_not_a_connection",
"conn_type": TEST_CONN_TYPE,
"host": "test_host_patch",
"port": 80,
},
{
"connection_id": "i_am_not_a_connection",
"conn_type": TEST_CONN_TYPE,
"login": "test_login_patch",
},
{"connection_id": "i_am_not_a_connection", "conn_type": TEST_CONN_TYPE, "port": 80},
{
"connection_id": "i_am_not_a_connection",
"conn_type": TEST_CONN_TYPE,
"port": 80,
"login": "test_login_patch",
},
],
)
def test_patch_should_respond_400(self, test_client, body):
self.create_connection()
response = test_client.patch(f"/connections/{TEST_CONN_ID}", json=body)
assert response.status_code == 400
assert response.json() == {
"detail": "The connection_id in the request body does not match the URL parameter",
}
@pytest.mark.parametrize(
"body",
[
{
"connection_id": "i_am_not_a_connection",
"conn_type": TEST_CONN_TYPE,
"extra": '{"key": "var"}',
},
{
"connection_id": "i_am_not_a_connection",
"conn_type": TEST_CONN_TYPE,
"host": "test_host_patch",
},
{
"connection_id": "i_am_not_a_connection",
"conn_type": TEST_CONN_TYPE,
"host": "test_host_patch",
"port": 80,
},
{
"connection_id": "i_am_not_a_connection",
"conn_type": TEST_CONN_TYPE,
"login": "test_login_patch",
},
{"connection_id": "i_am_not_a_connection", "conn_type": TEST_CONN_TYPE, "port": 80},
{
"connection_id": "i_am_not_a_connection",
"conn_type": TEST_CONN_TYPE,
"port": 80,
"login": "test_login_patch",
},
],
)
def test_patch_should_respond_404(self, test_client, body):
response = test_client.patch(f"/connections/{body['connection_id']}", json=body)
assert response.status_code == 404
assert response.json() == {
"detail": f"The Connection with connection_id: `{body['connection_id']}` was not found",
}
@pytest.mark.enable_redact
@pytest.mark.parametrize(
("body", "expected_response", "update_mask"),
[
(
{"connection_id": TEST_CONN_ID, "conn_type": TEST_CONN_TYPE, "password": "test-password"},
{
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
"description": "some_description_a",
"extra": None,
"host": "some_host_a",
"login": "some_login",
"password": "***",
"port": 8080,
"schema": None,
"team_name": None,
},
{"update_mask": ["password"]},
),
(
{"connection_id": TEST_CONN_ID, "conn_type": TEST_CONN_TYPE, "password": "?>@#+!_%()#"},
{
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
"description": "some_description_a",
"extra": None,
"host": "some_host_a",
"login": "some_login",
"password": "***",
"port": 8080,
"schema": None,
"team_name": None,
},
{"update_mask": ["password"]},
),
(
{
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
"password": "A!rF|0wi$aw3s0m3",
"extra": '{"password": "test-password"}',
},
{
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
"description": "some_description_a",
"extra": '{"password": "***"}',
"host": "some_host_a",
"login": "some_login",
"password": "***",
"port": 8080,
"schema": None,
"team_name": None,
},
{"update_mask": ["password", "extra"]},
),
],
)
def test_patch_should_response_200_redacted_password(
self, test_client, session, body, expected_response, update_mask
):
self.create_connections()
response = test_client.patch(f"/connections/{TEST_CONN_ID}", json=body, params=update_mask)
assert response.status_code == 200
assert response.json() == expected_response
_check_last_log(session, dag_id=None, event="patch_connection", logical_date=None, check_masked=True)
class TestConnection(TestConnectionEndpoint):
def setup_method(self):
try:
metadata("apache-airflow-providers-sqlite")
except PackageNotFoundError:
pytest.skip("The SQlite distribution package is not installed.")
@mock.patch.dict(os.environ, {"AIRFLOW__CORE__TEST_CONNECTION": "Enabled"})
@pytest.mark.parametrize(
("body", "message"),
[
({"connection_id": TEST_CONN_ID, "conn_type": "sqlite"}, "Connection successfully tested"),
(
{"connection_id": TEST_CONN_ID, "conn_type": "fs", "extra": '{"path": "/"}'},
"Path / is existing.",
),
],
)
def test_should_respond_200(self, test_client, body, message):
response = test_client.post("/connections/test", json=body)
assert response.status_code == 200
assert response.json() == {
"status": True,
"message": message,
}
def test_should_respond_401(self, unauthenticated_test_client):
response = unauthenticated_test_client.post(
"/connections/test", json={"connection_id": TEST_CONN_ID, "conn_type": "sqlite"}
)
assert response.status_code == 401
def test_should_respond_403(self, unauthorized_test_client):
response = unauthorized_test_client.post(
"/connections/test", json={"connection_id": TEST_CONN_ID, "conn_type": "sqlite"}
)
assert response.status_code == 403
@skip_if_force_lowest_dependencies_marker
@mock.patch.dict(os.environ, {"AIRFLOW__CORE__TEST_CONNECTION": "Enabled"})
@pytest.mark.parametrize(
"body",
[
{"connection_id": TEST_CONN_ID, "conn_type": "sqlite"},
{"connection_id": TEST_CONN_ID, "conn_type": "ftp"},
],
)
def test_connection_env_is_cleaned_after_run(self, test_client, body):
test_client.post("/connections/test", json=body)
assert not any([key.startswith(CONN_ENV_PREFIX) for key in os.environ.keys()])
@pytest.mark.parametrize(
"body",
[
{"connection_id": TEST_CONN_ID, "conn_type": "sqlite"},
{"connection_id": TEST_CONN_ID, "conn_type": "ftp"},
],
)
def test_should_respond_403_by_default(self, test_client, body):
response = test_client.post("/connections/test", json=body)
assert response.status_code == 403
assert response.json() == {
"detail": "Testing connections is disabled in Airflow configuration. "
"Contact your deployment admin to enable it."
}
@mock.patch.dict(os.environ, {"AIRFLOW__CORE__TEST_CONNECTION": "Enabled"})
def test_should_merge_password_with_existing_connection(self, test_client, session):
connection = Connection(
conn_id=TEST_CONN_ID,
conn_type="sqlite",
password="existing_password",
)
session.add(connection)
session.commit()
initial_count = session.scalar(select(func.count()).select_from(Connection))
captured_value = {}
def mock_test_connection(self):
captured_value["password"] = self.password
captured_value["conn_type"] = self.conn_type
return True, "mocked"
body = {
"connection_id": TEST_CONN_ID,
"conn_type": "new_sqlite",
"password": "***",
}
with mock.patch.object(Connection, "test_connection", mock_test_connection):
response = test_client.post("/connections/test", json=body)
assert response.status_code == 200
assert response.json()["status"] is True
# Verify that the existing password was used, not "***"
assert captured_value["password"] == "existing_password"
# Verify that payload info were used for other fields
assert captured_value["conn_type"] == "new_sqlite"
# Verify DB was not mutated
session.expire_all()
db_conn = session.scalar(select(Connection).filter_by(conn_id=TEST_CONN_ID))
assert db_conn.password == "existing_password"
assert session.scalar(select(func.count()).select_from(Connection)) == initial_count
@mock.patch.dict(os.environ, {"AIRFLOW__CORE__TEST_CONNECTION": "Enabled"})
def test_should_merge_extra_with_existing_connection(self, test_client, session):
connection = Connection(
conn_id=TEST_CONN_ID,
conn_type="fs",
extra='{"path": "/", "existing_key": "existing_value"}',
)
session.add(connection)
session.commit()
initial_count = session.scalar(select(func.count()).select_from(Connection))
captured_extra = {}
def mock_test_connection(self):
captured_extra["value"] = self.extra
return True, "mocked"
body = {
"connection_id": TEST_CONN_ID,
"conn_type": "fs",
"extra": '{"path": "/", "new_key": "new_value"}',
}
with mock.patch.object(Connection, "test_connection", mock_test_connection):
response = test_client.post("/connections/test", json=body)
assert response.status_code == 200
assert response.json()["status"] is True
# Verify that new_key is reflected in the merged extra
merged_extra = json.loads(captured_extra["value"])
assert merged_extra["new_key"] == "new_value"
assert merged_extra["path"] == "/"
# Verify DB was not mutated
session.expire_all()
db_conn = session.scalar(select(Connection).filter_by(conn_id=TEST_CONN_ID))
assert json.loads(db_conn.extra) == {"path": "/", "existing_key": "existing_value"}
assert session.scalar(select(func.count()).select_from(Connection)) == initial_count
@mock.patch.dict(os.environ, {"AIRFLOW__CORE__TEST_CONNECTION": "Enabled"})
def test_should_merge_both_password_and_extra(self, test_client, session):
connection = Connection(
conn_id=TEST_CONN_ID,
conn_type="fs",
password="existing_password",
extra='{"path": "/", "existing_key": "existing_value"}',
)
session.add(connection)
session.commit()
initial_count = session.scalar(select(func.count()).select_from(Connection))
captured_values = {}
def mock_test_connection(self):
captured_values["password"] = self.password
captured_values["extra"] = self.extra
return True, "mocked"
body = {
"connection_id": TEST_CONN_ID,
"conn_type": "fs",
"password": "***",
"extra": '{"path": "/", "new_key": "new_value"}',
}
with mock.patch.object(Connection, "test_connection", mock_test_connection):
response = test_client.post("/connections/test", json=body)
assert response.status_code == 200
assert response.json()["status"] is True
# Verify that the existing password was used, not "***"
assert captured_values["password"] == "existing_password"
# Verify that new_key is reflected in the merged extra
merged_extra = json.loads(captured_values["extra"])
assert merged_extra["new_key"] == "new_value"
assert merged_extra["path"] == "/"
# Verify DB was not mutated
session.expire_all()
db_conn = session.scalar(select(Connection).filter_by(conn_id=TEST_CONN_ID))
assert db_conn.password == "existing_password"
assert json.loads(db_conn.extra) == {"path": "/", "existing_key": "existing_value"}
assert session.scalar(select(func.count()).select_from(Connection)) == initial_count
@mock.patch.dict(os.environ, {"AIRFLOW__CORE__TEST_CONNECTION": "Enabled"})
def test_should_test_new_connection_without_existing(self, test_client):
body = {
"connection_id": "non_existent_conn",
"conn_type": "sqlite",
}
response = test_client.post("/connections/test", json=body)
assert response.status_code == 200
assert response.json()["status"] is True
class TestCreateDefaultConnections(TestConnectionEndpoint):
def test_should_respond_204(self, test_client, session):
response = test_client.post("/connections/defaults")
assert response.status_code == 204
assert response.content == b""
_check_last_log(session, dag_id=None, event="create_default_connections", logical_date=None)
def test_should_respond_401(self, unauthenticated_test_client):
response = unauthenticated_test_client.post("/connections/defaults")
assert response.status_code == 401
def test_should_respond_403(self, unauthorized_test_client):
response = unauthorized_test_client.post("/connections/defaults")
assert response.status_code == 403
@mock.patch("airflow.api_fastapi.core_api.routes.public.connections.db_create_default_connections")
def test_should_call_db_create_default_connections(self, mock_db_create_default_connections, test_client):
response = test_client.post("/connections/defaults")
assert response.status_code == 204
mock_db_create_default_connections.assert_called_once()
class TestBulkConnections(TestConnectionEndpoint):
@pytest.mark.parametrize(
("actions", "expected_results"),
[
pytest.param(
{
"actions": [
{
"action": "create",
"entities": [
{
"connection_id": "NOT_EXISTING_CONN_ID",
"conn_type": "NOT_EXISTING_CONN_TYPE",
}
],
"action_on_existence": "skip",
}
]
},
{
"create": {
"success": ["NOT_EXISTING_CONN_ID"],
"errors": [],
}
},
id="test_successful_create",
),
pytest.param(
{
"actions": [
{
"action": "create",
"entities": [
{
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
},
{
"connection_id": "NOT_EXISTING_CONN_ID",
"conn_type": "NOT_EXISTING_CONN_TYPE",
},
],
"action_on_existence": "skip",
}
]
},
{
"create": {
"success": ["NOT_EXISTING_CONN_ID"],
"errors": [],
}
},
id="test_successful_create_with_skip",
),
pytest.param(
{
"actions": [
{
"action": "create",
"entities": [
{
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
"description": "new_description",
}
],
"action_on_existence": "overwrite",
}
]
},
{
"create": {
"success": [TEST_CONN_ID],
"errors": [],
}
},
id="test_create_with_overwrite",
),
pytest.param(
{
"actions": [
{
"action": "create",
"entities": [
{
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
"description": TEST_CONN_DESCRIPTION,
"host": TEST_CONN_HOST,
"port": TEST_CONN_PORT,
"login": TEST_CONN_LOGIN,
},
],
"action_on_existence": "fail",
}
]
},
{
"create": {
"success": [],
"errors": [
{
"error": "The connections with these connection_ids: {'test_connection_id'} already exist.",
"status_code": 409,
},
],
}
},
id="test_create_conflict",
),
pytest.param(
{
"actions": [
{
"action": "update",
"entities": [
{
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
"description": "new_description",
}
],
"action_on_non_existence": "skip",
}
]
},
{
"update": {
"success": [TEST_CONN_ID],
"errors": [],
}
},
id="test_successful_update",
),
pytest.param(
{
"actions": [
{
"action": "update",
"entities": [
{
"connection_id": "NOT_EXISTING_CONN_ID",
"conn_type": "NOT_EXISTING_CONN_TYPE",
}
],
"action_on_non_existence": "skip",
}
]
},
{
"update": {
"success": [],
"errors": [],
}
},
id="test_update_with_skip",
),
pytest.param(
{
"actions": [
{
"action": "update",
"entities": [
{
"connection_id": "NOT_EXISTING_CONN_ID",
"conn_type": "NOT_EXISTING_CONN_TYPE",
}
],
"action_on_non_existence": "fail",
}
]
},
{
"update": {
"success": [],
"errors": [
{
"error": "The connections with these connection_ids: {'NOT_EXISTING_CONN_ID'} were not found.",
"status_code": 404,
}
],
}
},
id="test_update_with_fail",
),
pytest.param(
{
"actions": [
{
"action": "update",
"entities": [
{
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
"description": "updated_description",
}
],
"update_mask": ["description"],
"action_on_non_existence": "fail",
}
]
},
{"update": {"success": [TEST_CONN_ID], "errors": []}},
id="test_connection_update_with_valid_update_mask",
),
pytest.param(
{
"actions": [
{
"action": "delete",
"entities": [TEST_CONN_ID],
}
]
},
{
"delete": {
"success": [TEST_CONN_ID],
"errors": [],
}
},
id="test_successful_delete",
),
pytest.param(
{
"actions": [
{
"action": "delete",
"entities": ["NOT_EXISTING_CONN_ID"],
"action_on_non_existence": "skip",
}
]
},
{
"delete": {
"success": [],
"errors": [],
}
},
id="test_delete_with_skip",
),
pytest.param(
{
"actions": [
{
"action": "delete",
"entities": ["NOT_EXISTING_CONN_ID"],
"action_on_non_existence": "fail",
}
]
},
{
"delete": {
"success": [],
"errors": [
{
"error": "The connections with these connection_ids: {'NOT_EXISTING_CONN_ID'} were not found.",
"status_code": 404,
}
],
}
},
id="test_delete_not_found",
),
pytest.param(
{
"actions": [
{
"action": "create",
"entities": [
{
"connection_id": "NOT_EXISTING_CONN_ID",
"conn_type": "NOT_EXISTING_CONN_TYPE",
}
],
"action_on_existence": "skip",
},
{
"action": "update",
"entities": [
{
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
"description": "new_description",
}
],
"action_on_non_existence": "skip",
},
{
"action": "delete",
"entities": [TEST_CONN_ID],
"action_on_non_existence": "skip",
},
]
},
{
"create": {
"success": ["NOT_EXISTING_CONN_ID"],
"errors": [],
},
"update": {
"success": [TEST_CONN_ID],
"errors": [],
},
"delete": {
"success": [TEST_CONN_ID],
"errors": [],
},
},
id="test_create_update_delete",
),
pytest.param(
{
"actions": [
{
"action": "update",
"entities": [
{
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
"description": "updated_description",
}
],
"update_mask": ["description"],
"action_on_non_existence": "fail",
},
{
"action": "delete",
"entities": [TEST_CONN_ID],
"action_on_non_existence": "fail",
},
]
},
{
"update": {"success": [TEST_CONN_ID], "errors": []},
"delete": {"success": [TEST_CONN_ID], "errors": []},
},
id="test_connection_create_update_delete_with_update_mask",
),
],
)
def test_bulk_connections(self, test_client, actions, expected_results, session):
self.create_connections()
response = test_client.patch("/connections", json=actions)
response_data = response.json()
for connection_id, value in expected_results.items():
assert response_data[connection_id] == value
_check_last_log(session, dag_id=None, event="bulk_connections", logical_date=None)
def test_should_respond_401(self, unauthenticated_test_client):
response = unauthenticated_test_client.patch("/connections", json={})
assert response.status_code == 401
def test_should_respond_403(self, unauthorized_test_client):
response = unauthorized_test_client.patch(
"/connections",
json={
"actions": [
{
"action": "create",
"entities": [
{"connection_id": "test1", "conn_type": "test1"},
],
},
]
},
)
assert response.status_code == 403
def test_bulk_update_avoids_n_plus_one_queries(self, session):
self.create_connections()
session.expire_all()
request = BulkBody[ConnectionBody].model_validate(
{
"actions": [
{
"action": "update",
"entities": [
{
"connection_id": TEST_CONN_ID,
"conn_type": TEST_CONN_TYPE,
"description": "updated_description",
},
{
"connection_id": TEST_CONN_ID_2,
"conn_type": TEST_CONN_TYPE_2,
"description": "updated_description_2",
},
],
"update_mask": ["description"],
"action_on_non_existence": "fail",
}
]
}
)
service = BulkConnectionService(session=session, request=request)
results = BulkActionResponse()
with assert_queries_count(1, session=session):
service.handle_bulk_update(request.actions[0], results)
assert sorted(results.success) == [TEST_CONN_ID, TEST_CONN_ID_2]
def test_bulk_delete_avoids_n_plus_one_queries(self, session):
self.create_connections()
session.expire_all()
request = BulkBody[ConnectionBody].model_validate(
{
"actions": [
{
"action": "delete",
"entities": [TEST_CONN_ID, TEST_CONN_ID_2],
"action_on_non_existence": "fail",
}
]
}
)
service = BulkConnectionService(session=session, request=request)
results = BulkActionResponse()
with assert_queries_count(1, session=session):
service.handle_bulk_delete(request.actions[0], results)
assert sorted(results.success) == [TEST_CONN_ID, TEST_CONN_ID_2]
class TestPostConnectionExtraBackwardCompatibility(TestConnectionEndpoint):
def test_post_should_accept_empty_string_as_extra(self, test_client, session):
body = {"connection_id": TEST_CONN_ID, "conn_type": TEST_CONN_TYPE, "extra": ""}
response = test_client.post("/connections", json=body)
assert response.status_code == 201
connection = session.scalars(select(Connection).where(Connection.conn_id == TEST_CONN_ID)).first()
assert connection is not None
assert connection.extra == "{}" # Backward compatibility: treat "" as empty JSON object
@pytest.mark.parametrize(
("extra", "expected_error_message"),
[
("[1,2,3]", "Expected JSON object in `extra` field, got non-dict JSON"),
("some_string", "Encountered non-JSON in `extra` field"),
],
)
def test_post_should_fail_with_non_json_object_as_extra(
self, test_client, extra, expected_error_message, session
):
"""JSON primitives are a valid JSON and should raise 422 validation error."""
body = {"connection_id": TEST_CONN_ID, "conn_type": TEST_CONN_TYPE, "extra": extra}
response = test_client.post("/connections", json=body)
assert response.status_code == 422
assert (
"Value error, The `extra` field must be a valid JSON object (e.g., {'key': 'value'})"
in response.json()["detail"][0]["msg"]
)
_check_last_log(
session,
dag_id=None,
event="post_connection",
logical_date=None,
expected_extra={
"connection_id": "test_connection_id",
"conn_type": "test_type",
"extra": expected_error_message,
"method": "POST",
},
) | python | github | https://github.com/apache/airflow | airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_connections.py |
#include "foobar.h"
int x = foo();
// RUN: clang-include-cleaner -print=changes %s -- -I%S/Inputs/ | FileCheck --check-prefix=CHANGE %s
// CHANGE: - "foobar.h"
// CHANGE-NEXT: + "foo.h"
// RUN: clang-include-cleaner -disable-remove -print=changes %s -- -I%S/Inputs/ | FileCheck --check-prefix=INSERT %s
// INSERT-NOT: - "foobar.h"
// INSERT: + "foo.h"
// RUN: clang-include-cleaner -disable-insert -print=changes %s -- -I%S/Inputs/ | FileCheck --check-prefix=REMOVE %s
// REMOVE: - "foobar.h"
// REMOVE-NOT: + "foo.h"
// RUN: clang-include-cleaner -print=changes %s --ignore-headers="foobar\.h,foo\.h" -- -I%S/Inputs/ | FileCheck --match-full-lines --allow-empty --check-prefix=IGNORE %s
// IGNORE-NOT: - "foobar.h"
// IGNORE-NOT: + "foo.h"
// RUN: clang-include-cleaner -print=changes %s --ignore-headers="foobar.*\.h" -- -I%S/Inputs/ | FileCheck --match-full-lines --allow-empty --check-prefix=IGNORE2 %s
// IGNORE2-NOT: - "foobar.h"
// IGNORE2: + "foo.h"
// RUN: clang-include-cleaner -print=changes %s --ignore-headers= -- -I%S/Inputs/ | FileCheck --allow-empty --check-prefix=IGNORE3 %s
// IGNORE3: - "foobar.h"
// IGNORE3: + "foo.h"
// RUN: clang-include-cleaner -print=changes %s --only-headers="foo\.h" -- -I%S/Inputs/ | FileCheck --match-full-lines --allow-empty --check-prefix=ONLY %s
// ONLY-NOT: - "foobar.h"
// ONLY: + "foo.h"
// RUN: clang-include-cleaner -print=changes %s --only-headers= -- -I%S/Inputs/ | FileCheck --allow-empty --check-prefix=ONLY2 %s
// ONLY2: - "foobar.h"
// ONLY2: + "foo.h"
// RUN: clang-include-cleaner -print %s -- -I%S/Inputs/ | FileCheck --match-full-lines --check-prefix=PRINT %s
// PRINT: #include "foo.h"
// PRINT-NOT: {{^}}#include "foobar.h"{{$}}
// RUN: cp %s %t.cpp
// RUN: clang-include-cleaner -edit %t.cpp -- -I%S/Inputs/
// RUN: FileCheck --match-full-lines --check-prefix=EDIT %s < %t.cpp
// EDIT: #include "foo.h"
// EDIT-NOT: {{^}}#include "foobar.h"{{$}}
// RUN: cp %s %t.cpp
// RUN: clang-include-cleaner -edit --ignore-headers="foobar\.h,foo\.h" %t.cpp -- -I%S/Inputs/
// RUN: FileCheck --match-full-lines --check-prefix=EDIT2 %s < %t.cpp
// EDIT2-NOT: {{^}}#include "foo.h"{{$}}
// RUN: rm -rf %t.dir && mkdir -p %t.dir
// RUN: cp %s %t.cpp
// RUN: echo "[{\"directory\":\"%t.dir\",\"file\":\"../%{t:stem}.tmp.cpp\",\"command\":\":clang++ -I%S/Inputs/ ../%{t:stem}.tmp.cpp\"}]" | sed -e 's/\\/\\\\/g' > %t.dir/compile_commands.json
// RUN: pushd %t.dir
// RUN: clang-include-cleaner -p %{t:stem}.tmp.dir -edit ../%{t:stem}.tmp.cpp
// RUN: popd
// RUN: FileCheck --match-full-lines --check-prefix=EDIT3 %s < %t.cpp
// EDIT3: #include "foo.h"
// EDIT3-NOT: {{^}}#include "foobar.h"{{$}}
// RUN: clang-include-cleaner -insert=false -print=changes %s -- -I%S/Inputs/ 2>&1 | \
// RUN: FileCheck --check-prefix=DEPRECATED-INSERT %s
// DEPRECATED-INSERT: warning: '-insert=0' is deprecated in favor of '-disable-insert'. The old flag was confusing since it suggested that inserts were disabled by default, when they were actually enabled.
// RUN: clang-include-cleaner -remove=false -print=changes %s -- -I%S/Inputs/ 2>&1 | \
// RUN: FileCheck --check-prefix=DEPRECATED-REMOVE %s
// DEPRECATED-REMOVE: warning: '-remove=0' is deprecated in favor of '-disable-remove'. The old flag was confusing since it suggested that removes were disabled by default, when they were actually enabled.
// RUN: clang-include-cleaner -insert=false -remove=false -print=changes %s -- -I%S/Inputs/ 2>&1 | \
// RUN: FileCheck --check-prefix=DEPRECATED-BOTH %s
// DEPRECATED-BOTH: warning: '-insert=0' is deprecated in favor of '-disable-insert'. The old flag was confusing since it suggested that inserts were disabled by default, when they were actually enabled.
// DEPRECATED-BOTH: warning: '-remove=0' is deprecated in favor of '-disable-remove'. The old flag was confusing since it suggested that removes were disabled by default, when they were actually enabled. | cpp | github | https://github.com/llvm/llvm-project | clang-tools-extra/include-cleaner/test/tool.cpp |
"""
This module implements the FormRequest class which is a more covenient class
(than Request) to generate Requests based on form data.
See documentation in docs/topics/request-response.rst
"""
import urllib
import lxml.html
from scrapy.http.request import Request
from scrapy.utils.python import unicode_to_str
class FormRequest(Request):
def __init__(self, *args, **kwargs):
formdata = kwargs.pop('formdata', None)
if formdata and kwargs.get('method') is None:
kwargs['method'] = 'POST'
super(FormRequest, self).__init__(*args, **kwargs)
if formdata:
items = formdata.iteritems() if isinstance(formdata, dict) else formdata
querystr = _urlencode(items, self.encoding)
if self.method == 'POST':
self.headers.setdefault('Content-Type', 'application/x-www-form-urlencoded')
self._set_body(querystr)
else:
self._set_url(self.url + ('&' if '?' in self.url else '?') + querystr)
@classmethod
def from_response(cls, response, formname=None, formnumber=0, formdata=None,
clickdata=None, dont_click=False, formxpath=None, **kwargs):
kwargs.setdefault('encoding', response.encoding)
form = _get_form(response, formname, formnumber, formxpath)
formdata = _get_inputs(form, formdata, dont_click, clickdata, response)
url = form.action or form.base_url
method = kwargs.pop('method', form.method)
return cls(url, method=method, formdata=formdata, **kwargs)
def _urlencode(seq, enc):
values = [(unicode_to_str(k, enc), unicode_to_str(v, enc))
for k, vs in seq
for v in (vs if hasattr(vs, '__iter__') else [vs])]
return urllib.urlencode(values, doseq=1)
def _get_form(response, formname, formnumber, formxpath):
"""Find the form element """
from scrapy.selector.lxmldocument import LxmlDocument
root = LxmlDocument(response, lxml.html.HTMLParser)
forms = root.xpath('//form')
if not forms:
raise ValueError("No <form> element found in %s" % response)
if formname is not None:
f = root.xpath('//form[@name="%s"]' % formname)
if f:
return f[0]
# Get form element from xpath, if not found, go up
if formxpath is not None:
nodes = root.xpath(formxpath)
if nodes:
el = nodes[0]
while True:
if el.tag == 'form':
return el
el = el.getparent()
if el is None:
break
raise ValueError('No <form> element found with %s' % formxpath)
# If we get here, it means that either formname was None
# or invalid
if formnumber is not None:
try:
form = forms[formnumber]
except IndexError:
raise IndexError("Form number %d not found in %s" %
(formnumber, response))
else:
return form
def _get_inputs(form, formdata, dont_click, clickdata, response):
try:
formdata = dict(formdata or ())
except (ValueError, TypeError):
raise ValueError('formdata should be a dict or iterable of tuples')
inputs = form.xpath('descendant::textarea'
'|descendant::select'
'|descendant::input[@type!="submit" and @type!="image" and @type!="reset"'
'and ((@type!="checkbox" and @type!="radio") or @checked)]')
values = [(k, u'' if v is None else v) \
for k, v in (_value(e) for e in inputs) \
if k and k not in formdata]
if not dont_click:
clickable = _get_clickable(clickdata, form)
if clickable and clickable[0] not in formdata and not clickable[0] is None:
values.append(clickable)
values.extend(formdata.iteritems())
return values
def _value(ele):
n = ele.name
v = ele.value
if ele.tag == 'select':
return _select_value(ele, n, v)
return n, v
def _select_value(ele, n, v):
multiple = ele.multiple
if v is None and not multiple:
# Match browser behaviour on simple select tag without options selected
# And for select tags wihout options
o = ele.value_options
return (n, o[0]) if o else (None, None)
elif v is not None and multiple:
# This is a workround to bug in lxml fixed 2.3.1
# fix https://github.com/lxml/lxml/commit/57f49eed82068a20da3db8f1b18ae00c1bab8b12#L1L1139
selected_options = ele.xpath('.//option[@selected]')
v = [(o.get('value') or o.text or u'').strip() for o in selected_options]
return n, v
def _get_clickable(clickdata, form):
"""
Returns the clickable element specified in clickdata,
if the latter is given. If not, it returns the first
clickable element found
"""
clickables = [el for el in form.xpath('.//input[@type="submit"]')]
if not clickables:
return
# If we don't have clickdata, we just use the first clickable element
if clickdata is None:
el = clickables[0]
return (el.name, el.value)
# If clickdata is given, we compare it to the clickable elements to find a
# match. We first look to see if the number is specified in clickdata,
# because that uniquely identifies the element
nr = clickdata.get('nr', None)
if nr is not None:
try:
el = list(form.inputs)[nr]
except IndexError:
pass
else:
return (el.name, el.value)
# We didn't find it, so now we build an XPath expression out of the other
# arguments, because they can be used as such
xpath = u'.//*' + \
u''.join(u'[@%s="%s"]' % c for c in clickdata.iteritems())
el = form.xpath(xpath)
if len(el) == 1:
return (el[0].name, el[0].value)
elif len(el) > 1:
raise ValueError("Multiple elements found (%r) matching the criteria "
"in clickdata: %r" % (el, clickdata))
else:
raise ValueError('No clickable element matching clickdata: %r' % (clickdata,)) | unknown | codeparrot/codeparrot-clean | ||
{
"RESET": {
"summary": "Resets the connection.",
"complexity": "O(1)",
"group": "connection",
"since": "6.2.0",
"arity": 1,
"function": "resetCommand",
"command_flags": [
"NOSCRIPT",
"LOADING",
"STALE",
"FAST",
"NO_AUTH",
"ALLOW_BUSY"
],
"acl_categories": [
"CONNECTION"
],
"reply_schema": {
"const": "RESET"
}
}
} | json | github | https://github.com/redis/redis | src/commands/reset.json |
#ifndef BOOST_BIND_PROTECT_HPP_INCLUDED
#define BOOST_BIND_PROTECT_HPP_INCLUDED
//
// protect.hpp
//
// Copyright 2002, 2020 Peter Dimov
// Copyright 2009 Steven Watanabe
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
#include <utility>
namespace boost
{
namespace _bi
{
template<class T> struct protect_make_void
{
typedef void type;
};
template<class F, class E = void> struct protect_result_type
{
};
template<class F> struct protect_result_type< F, typename protect_make_void<typename F::result_type>::type >
{
typedef typename F::result_type result_type;
};
template<class F> class protected_bind_t: public protect_result_type<F>
{
private:
F f_;
public:
explicit protected_bind_t( F f ): f_( f )
{
}
template<class... A> auto operator()( A&&... a ) -> decltype( f_( std::forward<A>(a)... ) )
{
return f_( std::forward<A>(a)... );
}
template<class... A> auto operator()( A&&... a ) const -> decltype( f_( std::forward<A>(a)... ) )
{
return f_( std::forward<A>(a)... );
}
};
} // namespace _bi
template<class F> _bi::protected_bind_t<F> protect(F f)
{
return _bi::protected_bind_t<F>(f);
}
} // namespace boost
#endif // #ifndef BOOST_BIND_PROTECT_HPP_INCLUDED | unknown | github | https://github.com/mysql/mysql-server | extra/boost/boost_1_87_0/boost/bind/protect.hpp |
from mitmproxy.net.http import http1
from mitmproxy import exceptions
from mitmproxy import ctx
from mitmproxy.utils import human
class StreamBodies:
def __init__(self):
self.max_size = None
def configure(self, updated):
if "stream_large_bodies" in updated and ctx.options.stream_large_bodies:
try:
self.max_size = human.parse_size(ctx.options.stream_large_bodies)
except ValueError as e:
raise exceptions.OptionsError(e)
def run(self, f, is_request):
if self.max_size:
r = f.request if is_request else f.response
try:
expected_size = http1.expected_http_body_size(
f.request, f.response if not is_request else None
)
except exceptions.HttpException:
f.reply.kill()
return
if expected_size and not r.raw_content and not (0 <= expected_size <= self.max_size):
# r.stream may already be a callable, which we want to preserve.
r.stream = r.stream or True
ctx.log.info("Streaming {} {}".format("response from" if not is_request else "request to", f.request.host))
def requestheaders(self, f):
self.run(f, True)
def responseheaders(self, f):
self.run(f, False)
def websocket_start(self, f):
if ctx.options.stream_websockets:
f.stream = True
ctx.log.info("Streaming WebSocket messages between {client} and {server}".format(
client=human.format_address(f.client_conn.address),
server=human.format_address(f.server_conn.address))
) | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python
import os, signal, time, re
import unittest
import psutil
from subprocess import PIPE
class GpsshTestCase(unittest.TestCase):
# return count of stranded ssh processes
def searchForProcessOrChildren(self):
euid = os.getuid()
count = 0
for p in psutil.process_iter():
if p.uids().effective != euid:
continue
if not re.search('ssh', ' '.join(p.cmdline())):
continue
if p.ppid() != 1:
continue
count += 1
return count
def test00_gpssh_sighup(self):
"""Verify that gppsh handles sighup
and terminates cleanly.
"""
before_count = self.searchForProcessOrChildren()
p = psutil.Popen("gpssh -h localhost", shell=True, stdin=PIPE, stdout=PIPE, stderr=PIPE)
pid = p.pid
time.sleep(3)
try:
os.kill(int(pid), signal.SIGHUP)
except Exception:
pass
max_attempts = 6
for i in range(max_attempts):
after_count = self.searchForProcessOrChildren()
error_count = after_count - before_count
if error_count:
if (i + 1) == max_attempts:
self.fail("Found %d new stranded gpssh processes after issuing sig HUP" % error_count)
time.sleep(.5)
if __name__ == "__main__":
unittest.main() | unknown | codeparrot/codeparrot-clean | ||
@file:Suppress("NAMED_ARGUMENTS_NOT_ALLOWED", "DEPRECATION") // KT-21913
package kotlinx.coroutines
import kotlinx.coroutines.testing.*
import kotlin.test.*
class DelayTest : TestBase() {
@Test
fun testCancellation() = runTest(expected = {it is CancellationException }) {
runAndCancel(1000)
}
@Test
fun testMaxLongValue()= runTest(expected = {it is CancellationException }) {
runAndCancel(Long.MAX_VALUE)
}
@Test
fun testMaxIntValue()= runTest(expected = {it is CancellationException }) {
runAndCancel(Int.MAX_VALUE.toLong())
}
@Test
fun testRegularDelay() = runTest {
val deferred = async {
expect(2)
delay(1)
expect(3)
}
expect(1)
yield()
deferred.await()
finish(4)
}
private suspend fun runAndCancel(time: Long) = coroutineScope {
expect(1)
val deferred = async {
expect(2)
delay(time)
expectUnreached()
}
yield()
expect(3)
require(deferred.isActive)
deferred.cancel()
finish(4)
deferred.await()
}
} | kotlin | github | https://github.com/Kotlin/kotlinx.coroutines | kotlinx-coroutines-core/common/test/DelayTest.kt |
import copy
import numpy as np
TEST_CYCLES = 100
"""
GOAL: Take a 100 x 100 grid and square all values to the power of 2 and Test
several difference approaches to completing this task, with python lists and
numpy arrays.
"""
def make_2d_grid(x,y):
list_based_2d_grid = []
for i in range(x):
list_based_2d_grid.append(range(y))
return list_based_2d_grid
default_array = np.array(make_2d_grid(100,100))
PYTHON_GRID = make_2d_grid(100,100)
def square_2d_list(input_grid=PYTHON_GRID):
""" This will raise each element in a 2d grid to the power of 2.
Input:
input_grid <list> List of lists whos elements will be squared to a power of 2.
Return:
power_grid <list> The input list elements square to the power of 2.
"""
# Must do this since Lists are mutable
power_grid = copy.deepcopy(input_grid)
for ix, x in enumerate(input_grid):
for iy, y in enumerate(x):
# Final assignment should be to the copied grid
power_grid[ix][iy] = pow(y, 2)
return power_grid
def square_2d_list_w_map(input_grid=PYTHON_GRID):
""" This will square each element in a 2d grid to the power of 2.
Input:
input_grid <list> List of lists whos elements will be squared to a power of 2.
Return:
power_grid <list> The input list elements square to the power of 2.
"""
# Must do this since Lists are mutable
power_grid = copy.deepcopy(input_grid)
for ix, x in enumerate(power_grid):
power_grid[ix] = map(lambda element: pow(element, 2), x)
return power_grid
def square_2d_list_w_list_comp(input_grid=PYTHON_GRID):
""" This will square each element in a 2d grid to the power of 2.
Input:
input_grid <list> List of lists whos elements will be squared to a power of 2.
Return:
power_grid <list> The input list elements square to the power of 2.
"""
# Must do this since Lists are mutable
power_grid = copy.deepcopy(input_grid)
grid = [[pow(y,2) for y in x] for x in power_grid]
return grid
def square_2d_array_slow_looping(input_array=default_array):
""" This will square each element in a 2d array to the power of 2, using a
typical pythonic FOR LOOP approach.
Input:
input_grid <numpy.ndarray> 2d Array whos elements will be squared to a power of two.
Return:
power_grid <numpy.ndarray> The input array elements square to the power of 2.
"""
power_grid = input_array.copy()
for ix, x in enumerate(input_array):
for iy, y in enumerate(x):
# Final assignment should be to the copied grid
power_grid[ix][iy] = y**2
return power_grid
def square_2d_array_vectorized(input_array=default_array):
""" This will square each element in a 2d array to the power of 2, using the
numpy vectorize approach.
Input:
input_grid <numpy.ndarray> 2d Array whos elements will be squared to a power of two.
Return:
power_grid <numpy.ndarray> The input array elements square to the power of 2.
"""
# Keeping the copy function in here to compare fairly
power_grid_copy = input_array.copy()
power_grid = input_array**2
return power_grid
if __name__ == '__main__':
import timeit
print("Python List Based Test")
print(timeit.timeit("square_2d_list()", setup="from __main__ import square_2d_list", number=TEST_CYCLES))
print("Python List Test Using map built-in function")
print(timeit.timeit("square_2d_list_w_map()", setup="from __main__ import square_2d_list_w_map", number=TEST_CYCLES))
print("Python List Test Using List Comprehension")
print(timeit.timeit("square_2d_list_w_list_comp()", setup="from __main__ import square_2d_list_w_list_comp", number=TEST_CYCLES))
print("Numpy Array using Pythonic Looping")
print(timeit.timeit("square_2d_array_slow_looping()", setup="from __main__ import square_2d_array_slow_looping", number=TEST_CYCLES))
print("Numpy Array using Numpy Vectorize")
print(timeit.timeit("square_2d_array_vectorized()", setup="from __main__ import square_2d_array_vectorized", number=TEST_CYCLES)) | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (C) 2005-2007 Carabos Coop. V. All rights reserved
# Copyright (C) 2008-2019 Vicent Mas. All rights reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author: Vicent Mas - vmas@vitables.org
"""
This module defines a data structure to be used for the model of the databases
tree. The data structure is equivalent to a (non root) group node in a
`PyTables` file.
"""
__docformat__ = 'restructuredtext'
import vitables.utils
from vitables.h5db import tnode_editor
from vitables.nodeprops import nodeinfo
from vitables.nodeprops import grouppropdlg
class GroupNode(object):
"""
A group node in the tree of databases model.
:Parameters:
- `parent`: the parent of the node.
- `name`: the name of the node
"""
def __init__(self, model, parent, name):
"""Create a group node for the tree of databases model.
A GroupNode represents a (non root) group of a `HDF5` file and has
a parent (another group node of the tree of databases model) and
a name.
"""
self.dbt_model = model
self.updated = False
self.children = []
self.parent = parent
self.node = parent.node._f_get_child(name)
self.node_kind = 'group'
self.has_view = False
# Attributes that the tree of databases view will use
# name --> DisplayRole
# nodepath --> ToolTipRole
# as_record --> StatusTipRole
# icon --> DecorationRole
self.name = name
parentpath = parent.nodepath
if parentpath.endswith('/'):
parentpath = parentpath[:-1]
self.nodepath = '{0}/{1}'.format(parentpath, name)
self.filepath = parent.filepath
self.as_record = '{0}->{1}'.format(self.filepath, self.nodepath)
icons = vitables.utils.getIcons()
self.closed_folder = icons['folder']
self.open_folder = icons['document-open-folder']
self.icon = icons['folder']
def __len__(self):
"""The number of children of this grup."""
return len(self.children)
def insertChild(self, child, position=0):
"""Insert a child in a group node.
:Parameters:
- `child`: the node being inserted
- `position`: the insertion position
"""
self.children.insert(position, child)
def childAtRow(self, row):
"""The row-th child of this node.
:Parameter row: the position of the retrieved child
"""
assert 0 <= row <= len(self.children)
return self.children[row]
def rowOfChild(self, child):
"""The row index of a given child.
:Parameter child: the child node whose position is being retrieved.
"""
for pos, node in enumerate(self.children):
if node == child:
return pos
return -1
def row(self):
"""The position of this node in the parent's list of children.
"""
if self.parent:
return self.parent.children.index(self)
return 0
def findChild(self, childname):
"""The child node with a given name.
:Parameter childname: the name of the wanted child node.
"""
for node in self.children:
if node.name == childname:
return node
return None
def editor(self):
"""Return an instance of `TNodeEditor`.
"""
return tnode_editor.TNodeEditor(self.dbt_model.getDBDoc(self.filepath))
def properties(self):
"""The Properties dialog for this node.
"""
info = nodeinfo.NodeInfo(self)
grouppropdlg.GroupPropDlg(info) | unknown | codeparrot/codeparrot-clean | ||
--
-- Regression Tests for Label Management
--
--
-- Setup
--
CREATE TABLE t1 (a int, b text);
INSERT INTO t1 VALUES (1, 'aaa'), (2, 'bbb'), (3, 'ccc');
CREATE TABLE t2 AS SELECT * FROM t1 WHERE a % 2 = 0;
CREATE FUNCTION f1 () RETURNS text
AS 'SELECT sepgsql_getcon()'
LANGUAGE sql;
CREATE FUNCTION f2 () RETURNS text
AS 'SELECT sepgsql_getcon()'
LANGUAGE sql;
SECURITY LABEL ON FUNCTION f2()
IS 'system_u:object_r:sepgsql_trusted_proc_exec_t:s0';
CREATE FUNCTION f3 () RETURNS text
AS 'BEGIN
RAISE EXCEPTION ''an exception from f3()'';
RETURN NULL;
END;' LANGUAGE plpgsql;
SECURITY LABEL ON FUNCTION f3()
IS 'system_u:object_r:sepgsql_trusted_proc_exec_t:s0';
CREATE FUNCTION f4 () RETURNS text
AS 'SELECT sepgsql_getcon()'
LANGUAGE sql;
SECURITY LABEL ON FUNCTION f4()
IS 'system_u:object_r:sepgsql_nosuch_trusted_proc_exec_t:s0';
CREATE FUNCTION f5 (text) RETURNS bool
AS 'SELECT sepgsql_setcon($1)'
LANGUAGE sql;
SECURITY LABEL ON FUNCTION f5(text)
IS 'system_u:object_r:sepgsql_regtest_trusted_proc_exec_t:s0';
CREATE TABLE auth_tbl(uname text, credential text, label text);
INSERT INTO auth_tbl
VALUES ('foo', 'acbd18db4cc2f85cedef654fccc4a4d8', 'sepgsql_regtest_foo_t:s0'),
('var', 'b2145aac704ce76dbe1ac7adac535b23', 'sepgsql_regtest_var_t:s0'),
('baz', 'b2145aac704ce76dbe1ac7adac535b23', 'sepgsql_regtest_baz_t:s0');
SECURITY LABEL ON TABLE auth_tbl
IS 'system_u:object_r:sepgsql_secret_table_t:s0';
CREATE FUNCTION auth_func(text, text) RETURNS bool
LANGUAGE sql
AS 'SELECT sepgsql_setcon(regexp_replace(sepgsql_getcon(), ''_r:.*$'', ''_r:'' || label))
FROM auth_tbl WHERE uname = $1 AND credential = $2';
SECURITY LABEL ON FUNCTION auth_func(text,text)
IS 'system_u:object_r:sepgsql_regtest_trusted_proc_exec_t:s0';
CREATE TABLE foo_tbl(a int, b text);
INSERT INTO foo_tbl VALUES (1, 'aaa'), (2,'bbb'), (3,'ccc'), (4,'ddd');
SECURITY LABEL ON TABLE foo_tbl
IS 'system_u:object_r:sepgsql_regtest_foo_table_t:s0';
CREATE TABLE var_tbl(x int, y text);
INSERT INTO var_tbl VALUES (2,'xxx'), (3,'yyy'), (4,'zzz'), (5,'xyz');
SECURITY LABEL ON TABLE var_tbl
IS 'system_u:object_r:sepgsql_regtest_var_table_t:s0';
CREATE TABLE foo_ptbl(o int, p text) PARTITION BY RANGE (o);
CREATE TABLE foo_ptbl_ones PARTITION OF foo_ptbl FOR VALUES FROM ('0') TO ('10');
CREATE TABLE foo_ptbl_tens PARTITION OF foo_ptbl FOR VALUES FROM ('10') TO ('100');
INSERT INTO foo_ptbl VALUES (0, 'aaa'), (9,'bbb'), (10,'ccc'), (99,'ddd');
SECURITY LABEL ON TABLE foo_ptbl
IS 'system_u:object_r:sepgsql_regtest_foo_table_t:s0';
CREATE TABLE var_ptbl(q int, r text) PARTITION BY RANGE (q);
CREATE TABLE var_ptbl_ones PARTITION OF var_ptbl FOR VALUES FROM ('0') TO ('10');
CREATE TABLE var_ptbl_tens PARTITION OF var_ptbl FOR VALUES FROM ('10') TO ('100');
INSERT INTO var_ptbl VALUES (0,'xxx'), (9,'yyy'), (10,'zzz'), (99,'xyz');
SECURITY LABEL ON TABLE var_ptbl
IS 'system_u:object_r:sepgsql_regtest_var_table_t:s0';
--
-- Tests for default labeling behavior
--
-- @SECURITY-CONTEXT=unconfined_u:unconfined_r:sepgsql_regtest_user_t:s0
CREATE TABLE t3 (s int, t text);
INSERT INTO t3 VALUES (1, 'sss'), (2, 'ttt'), (3, 'uuu');
-- @SECURITY-CONTEXT=unconfined_u:unconfined_r:sepgsql_regtest_dba_t:s0
CREATE TABLE t4 (m int, n text);
INSERT INTO t4 VALUES (1,'mmm'), (2,'nnn'), (3,'ooo');
-- @SECURITY-CONTEXT=unconfined_u:unconfined_r:sepgsql_regtest_user_t:s0
CREATE TABLE tpart (o int, p text) PARTITION BY RANGE (o);
CREATE TABLE tpart_ones PARTITION OF tpart FOR VALUES FROM ('0') TO ('10');
-- @SECURITY-CONTEXT=unconfined_u:unconfined_r:sepgsql_regtest_dba_t:s0
CREATE TABLE tpart_tens PARTITION OF tpart FOR VALUES FROM ('10') TO ('100');
INSERT INTO tpart VALUES (0, 'aaa');
INSERT INTO tpart VALUES (9, 'bbb');
INSERT INTO tpart VALUES (99, 'ccc');
SELECT objtype, objname, label FROM pg_seclabels
WHERE provider = 'selinux' AND objtype = 'table' AND objname in ('t1', 't2', 't3',
'tpart',
'tpart_ones',
'tpart_tens')
ORDER BY objname COLLATE "C" ASC;
SELECT objtype, objname, label FROM pg_seclabels
WHERE provider = 'selinux' AND objtype = 'column' AND (objname like 't3.%'
OR objname like 't4.%'
OR objname like 'tpart.%'
OR objname like 'tpart_ones.%'
OR objname like 'tpart_tens.%')
ORDER BY objname COLLATE "C" ASC;
--
-- Tests for SECURITY LABEL
--
-- @SECURITY-CONTEXT=unconfined_u:unconfined_r:sepgsql_regtest_dba_t:s0
SECURITY LABEL ON TABLE t1
IS 'system_u:object_r:sepgsql_ro_table_t:s0'; -- ok
SECURITY LABEL ON TABLE t2
IS 'invalid security context'; -- be failed
SECURITY LABEL ON COLUMN t2
IS 'system_u:object_r:sepgsql_ro_table_t:s0'; -- be failed
SECURITY LABEL ON COLUMN t2.b
IS 'system_u:object_r:sepgsql_ro_table_t:s0'; -- ok
SECURITY LABEL ON TABLE tpart
IS 'system_u:object_r:sepgsql_ro_table_t:s0'; -- ok
SECURITY LABEL ON TABLE tpart
IS 'invalid security context'; -- failed
SECURITY LABEL ON COLUMN tpart
IS 'system_u:object_r:sepgsql_ro_table_t:s0'; -- failed
SECURITY LABEL ON COLUMN tpart.o
IS 'system_u:object_r:sepgsql_ro_table_t:s0'; -- ok
--
-- Tests for Trusted Procedures
--
-- @SECURITY-CONTEXT=unconfined_u:unconfined_r:sepgsql_regtest_user_t:s0
SET sepgsql.debug_audit = true;
SET client_min_messages = log;
SELECT f1(); -- normal procedure
SELECT f2(); -- trusted procedure
SELECT f3(); -- trusted procedure that raises an error
SELECT f4(); -- failed on domain transition
SELECT sepgsql_getcon(); -- client's label must be restored
--
-- Test for Dynamic Domain Transition
--
-- validation of transaction aware dynamic-transition
-- @SECURITY-CONTEXT=unconfined_u:unconfined_r:sepgsql_regtest_superuser_t:s0:c0.c25
SELECT sepgsql_setcon('unconfined_u:unconfined_r:sepgsql_regtest_superuser_t:s0:c0.c15');
SELECT sepgsql_getcon();
SELECT sepgsql_setcon(NULL); -- failed to reset
SELECT sepgsql_getcon();
BEGIN;
SELECT sepgsql_setcon('unconfined_u:unconfined_r:sepgsql_regtest_superuser_t:s0:c0.c12');
SELECT sepgsql_getcon();
SAVEPOINT svpt_1;
SELECT sepgsql_setcon('unconfined_u:unconfined_r:sepgsql_regtest_superuser_t:s0:c0.c9');
SELECT sepgsql_getcon();
SAVEPOINT svpt_2;
SELECT sepgsql_setcon('unconfined_u:unconfined_r:sepgsql_regtest_superuser_t:s0:c0.c6');
SELECT sepgsql_getcon();
SAVEPOINT svpt_3;
SELECT sepgsql_setcon('unconfined_u:unconfined_r:sepgsql_regtest_superuser_t:s0:c0.c3');
SELECT sepgsql_getcon();
ROLLBACK TO SAVEPOINT svpt_2;
SELECT sepgsql_getcon(); -- should be 's0:c0.c9'
ROLLBACK TO SAVEPOINT svpt_1;
SELECT sepgsql_getcon(); -- should be 's0:c0.c12'
ABORT;
SELECT sepgsql_getcon(); -- should be 's0:c0.c15'
BEGIN;
SELECT sepgsql_setcon('unconfined_u:unconfined_r:sepgsql_regtest_superuser_t:s0:c0.c8');
SELECT sepgsql_getcon();
SAVEPOINT svpt_1;
SELECT sepgsql_setcon('unconfined_u:unconfined_r:sepgsql_regtest_superuser_t:s0:c0.c4');
SELECT sepgsql_getcon();
ROLLBACK TO SAVEPOINT svpt_1;
SELECT sepgsql_getcon(); -- should be 's0:c0.c8'
SELECT sepgsql_setcon('unconfined_u:unconfined_r:sepgsql_regtest_superuser_t:s0:c0.c6');
COMMIT;
SELECT sepgsql_getcon(); -- should be 's0:c0.c6'
-- sepgsql_regtest_user_t is not available dynamic-transition,
-- unless sepgsql_setcon() is called inside of trusted-procedure
-- @SECURITY-CONTEXT=unconfined_u:unconfined_r:sepgsql_regtest_user_t:s0:c0.c15
-- sepgsql_regtest_user_t has no permission to switch current label
SELECT sepgsql_setcon('unconfined_u:unconfined_r:sepgsql_regtest_user_t:s0'); -- failed
SELECT sepgsql_getcon();
-- trusted procedure allows to switch, but unavailable to override MCS rules
SELECT f5('unconfined_u:unconfined_r:sepgsql_regtest_user_t:s0:c0.c7'); -- OK
SELECT sepgsql_getcon();
SELECT f5('unconfined_u:unconfined_r:sepgsql_regtest_user_t:s0:c0.c31'); -- Failed
SELECT sepgsql_getcon();
SELECT f5(NULL); -- Failed
SELECT sepgsql_getcon();
BEGIN;
SELECT f5('unconfined_u:unconfined_r:sepgsql_regtest_user_t:s0:c0.c3'); -- OK
SELECT sepgsql_getcon();
ABORT;
SELECT sepgsql_getcon();
--
-- Test for simulation of typical connection pooling server
--
-- @SECURITY-CONTEXT=unconfined_u:unconfined_r:sepgsql_regtest_pool_t:s0
-- we shouldn't allow to switch client label without trusted procedure
SELECT sepgsql_setcon('unconfined_u:unconfined_r:sepgsql_regtest_foo_t:s0');
SELECT * FROM auth_tbl; -- failed, no permission to reference
-- switch to "foo"
SELECT auth_func('foo', 'acbd18db4cc2f85cedef654fccc4a4d8');
SELECT sepgsql_getcon();
SELECT * FROM foo_tbl; -- OK
SELECT * FROM foo_ptbl; -- OK
SELECT * FROM var_tbl; -- failed
SELECT * FROM var_ptbl; -- failed
SELECT * FROM auth_tbl; -- failed
SELECT sepgsql_setcon(NULL); -- end of session
SELECT sepgsql_getcon();
-- the pooler cannot touch these tables directly
SELECT * FROM foo_tbl; -- failed
SELECT * FROM foo_ptbl; -- failed
SELECT * FROM var_tbl; -- failed
SELECT * FROM var_ptbl; -- failed
-- switch to "var"
SELECT auth_func('var', 'b2145aac704ce76dbe1ac7adac535b23');
SELECT sepgsql_getcon();
SELECT * FROM foo_tbl; -- failed
SELECT * FROM foo_ptbl; -- failed
SELECT * FROM var_tbl; -- OK
SELECT * FROM var_ptbl; -- OK
SELECT * FROM auth_tbl; -- failed
SELECT sepgsql_setcon(NULL); -- end of session
-- misc checks
SELECT auth_func('var', 'invalid credential'); -- not works
SELECT sepgsql_getcon();
--
-- Clean up
--
-- @SECURITY-CONTEXT=unconfined_u:unconfined_r:sepgsql_regtest_superuser_t:s0-s0:c0.c255
DROP TABLE IF EXISTS t1 CASCADE;
DROP TABLE IF EXISTS t2 CASCADE;
DROP TABLE IF EXISTS t3 CASCADE;
DROP TABLE IF EXISTS t4 CASCADE;
DROP TABLE IF EXISTS tpart CASCADE;
DROP FUNCTION IF EXISTS f1() CASCADE;
DROP FUNCTION IF EXISTS f2() CASCADE;
DROP FUNCTION IF EXISTS f3() CASCADE;
DROP FUNCTION IF EXISTS f4() CASCADE;
DROP FUNCTION IF EXISTS f5(text) CASCADE; | sql | github | https://github.com/postgres/postgres | contrib/sepgsql/sql/label.sql |
/*
* Copyright 2010-2024 JetBrains s.r.o. and Kotlin Programming Language contributors.
* Use of this source code is governed by the Apache 2.0 license that can be found in the license/LICENSE.txt file.
*/
package org.jetbrains.kotlin.analysis.api.fir.test.cases.generated.cases.components.resolver;
import com.intellij.testFramework.TestDataPath;
import org.jetbrains.kotlin.test.util.KtTestUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.kotlin.analysis.api.fir.test.configurators.AnalysisApiFirTestConfiguratorFactory;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.AnalysisApiTestConfiguratorFactoryData;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.AnalysisApiTestConfigurator;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.TestModuleKind;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.FrontendKind;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.AnalysisSessionMode;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.AnalysisApiMode;
import org.jetbrains.kotlin.analysis.api.impl.base.test.cases.components.resolver.AbstractNonPhysicalResolveDanglingFileReferenceTest;
import org.jetbrains.kotlin.test.TestMetadata;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import java.io.File;
import java.util.regex.Pattern;
/** This class is generated by {@link org.jetbrains.kotlin.generators.tests.analysis.api.GenerateAnalysisApiTestsKt}. DO NOT MODIFY MANUALLY */
@SuppressWarnings("all")
@TestMetadata("analysis/analysis-api/testData/danglingFileReferenceResolve")
@TestDataPath("$PROJECT_ROOT")
public class FirIdeDependentAnalysisSourceModuleNonPhysicalResolveDanglingFileReferenceTestGenerated extends AbstractNonPhysicalResolveDanglingFileReferenceTest {
@NotNull
@Override
public AnalysisApiTestConfigurator getConfigurator() {
return AnalysisApiFirTestConfiguratorFactory.INSTANCE.createConfigurator(
new AnalysisApiTestConfiguratorFactoryData(
FrontendKind.Fir,
TestModuleKind.Source,
AnalysisSessionMode.Dependent,
AnalysisApiMode.Ide
)
);
}
@Test
public void testAllFilesPresentInDanglingFileReferenceResolve() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/danglingFileReferenceResolve"), Pattern.compile("^([^.]+)\\.kt$"), null, true);
}
@Test
@TestMetadata("classDifferentFile.kt")
public void testClassDifferentFile() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/classDifferentFile.kt");
}
@Test
@TestMetadata("classSameFile.kt")
public void testClassSameFile() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/classSameFile.kt");
}
@Test
@TestMetadata("functionDifferentFile.kt")
public void testFunctionDifferentFile() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/functionDifferentFile.kt");
}
@Test
@TestMetadata("functionSameFile.kt")
public void testFunctionSameFile() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/functionSameFile.kt");
}
@Nested
@TestMetadata("analysis/analysis-api/testData/danglingFileReferenceResolve/ignoreSelf")
@TestDataPath("$PROJECT_ROOT")
public class IgnoreSelf {
@Test
public void testAllFilesPresentInIgnoreSelf() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/danglingFileReferenceResolve/ignoreSelf"), Pattern.compile("^([^.]+)\\.kt$"), null, true);
}
@Test
@TestMetadata("classPrivateConstructorParameter.kt")
public void testClassPrivateConstructorParameter() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/ignoreSelf/classPrivateConstructorParameter.kt");
}
@Test
@TestMetadata("classTypeParameter.kt")
public void testClassTypeParameter() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/ignoreSelf/classTypeParameter.kt");
}
@Test
@TestMetadata("companionObject.kt")
public void testCompanionObject() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/ignoreSelf/companionObject.kt");
}
@Test
@TestMetadata("constructorValueParameterFromPrecedingParameterDefaultValue.kt")
public void testConstructorValueParameterFromPrecedingParameterDefaultValue() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/ignoreSelf/constructorValueParameterFromPrecedingParameterDefaultValue.kt");
}
@Test
@TestMetadata("functionTypeParameter.kt")
public void testFunctionTypeParameter() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/ignoreSelf/functionTypeParameter.kt");
}
@Test
@TestMetadata("functionValueParameterFromPrecedingParameterDefaultValue.kt")
public void testFunctionValueParameterFromPrecedingParameterDefaultValue() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/ignoreSelf/functionValueParameterFromPrecedingParameterDefaultValue.kt");
}
@Test
@TestMetadata("localClass.kt")
public void testLocalClass() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/ignoreSelf/localClass.kt");
}
@Test
@TestMetadata("localVariable.kt")
public void testLocalVariable() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/ignoreSelf/localVariable.kt");
}
@Test
@TestMetadata("outerClassProperty.kt")
public void testOuterClassProperty() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/ignoreSelf/outerClassProperty.kt");
}
@Test
@TestMetadata("primaryConstructorParameter.kt")
public void testPrimaryConstructorParameter() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/ignoreSelf/primaryConstructorParameter.kt");
}
@Test
@TestMetadata("privateClass.kt")
public void testPrivateClass() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/ignoreSelf/privateClass.kt");
}
@Test
@TestMetadata("privateFunction.kt")
public void testPrivateFunction() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/ignoreSelf/privateFunction.kt");
}
@Test
@TestMetadata("privateProperty.kt")
public void testPrivateProperty() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/ignoreSelf/privateProperty.kt");
}
@Test
@TestMetadata("publicFunction.kt")
public void testPublicFunction() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/ignoreSelf/publicFunction.kt");
}
@Test
@TestMetadata("publicProperty.kt")
public void testPublicProperty() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/ignoreSelf/publicProperty.kt");
}
@Test
@TestMetadata("samConstructor.kt")
public void testSamConstructor() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/ignoreSelf/samConstructor.kt");
}
@Test
@TestMetadata("syntheticFieldVariable.kt")
public void testSyntheticFieldVariable() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/ignoreSelf/syntheticFieldVariable.kt");
}
@Test
@TestMetadata("topLevelFunction.kt")
public void testTopLevelFunction() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/ignoreSelf/topLevelFunction.kt");
}
@Test
@TestMetadata("topLevelPrivateDeclaration.kt")
public void testTopLevelPrivateDeclaration() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/ignoreSelf/topLevelPrivateDeclaration.kt");
}
@Test
@TestMetadata("userDataCopy.kt")
public void testUserDataCopy() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/ignoreSelf/userDataCopy.kt");
}
@Test
@TestMetadata("valueParameter.kt")
public void testValueParameter() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/ignoreSelf/valueParameter.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/danglingFileReferenceResolve/preferSelf")
@TestDataPath("$PROJECT_ROOT")
public class PreferSelf {
@Test
public void testAllFilesPresentInPreferSelf() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/danglingFileReferenceResolve/preferSelf"), Pattern.compile("^([^.]+)\\.kt$"), null, true);
}
@Test
@TestMetadata("classTypeParameter.kt")
public void testClassTypeParameter() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/preferSelf/classTypeParameter.kt");
}
@Test
@TestMetadata("companionObject.kt")
public void testCompanionObject() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/preferSelf/companionObject.kt");
}
@Test
@TestMetadata("functionTypeParameter.kt")
public void testFunctionTypeParameter() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/preferSelf/functionTypeParameter.kt");
}
@Test
@TestMetadata("localClass.kt")
public void testLocalClass() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/preferSelf/localClass.kt");
}
@Test
@TestMetadata("localVariable.kt")
public void testLocalVariable() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/preferSelf/localVariable.kt");
}
@Test
@TestMetadata("outerClassProperty.kt")
public void testOuterClassProperty() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/preferSelf/outerClassProperty.kt");
}
@Test
@TestMetadata("primaryConstructorParameter.kt")
public void testPrimaryConstructorParameter() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/preferSelf/primaryConstructorParameter.kt");
}
@Test
@TestMetadata("privateClass.kt")
public void testPrivateClass() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/preferSelf/privateClass.kt");
}
@Test
@TestMetadata("privateFunction.kt")
public void testPrivateFunction() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/preferSelf/privateFunction.kt");
}
@Test
@TestMetadata("privateProperty.kt")
public void testPrivateProperty() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/preferSelf/privateProperty.kt");
}
@Test
@TestMetadata("publicFunction.kt")
public void testPublicFunction() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/preferSelf/publicFunction.kt");
}
@Test
@TestMetadata("publicProperty.kt")
public void testPublicProperty() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/preferSelf/publicProperty.kt");
}
@Test
@TestMetadata("syntheticFieldVariable.kt")
public void testSyntheticFieldVariable() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/preferSelf/syntheticFieldVariable.kt");
}
@Test
@TestMetadata("topLevelFunction.kt")
public void testTopLevelFunction() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/preferSelf/topLevelFunction.kt");
}
@Test
@TestMetadata("userDataCopy.kt")
public void testUserDataCopy() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/preferSelf/userDataCopy.kt");
}
@Test
@TestMetadata("valueParameter.kt")
public void testValueParameter() {
runTest("analysis/analysis-api/testData/danglingFileReferenceResolve/preferSelf/valueParameter.kt");
}
}
} | java | github | https://github.com/JetBrains/kotlin | analysis/analysis-api-fir/tests-gen/org/jetbrains/kotlin/analysis/api/fir/test/cases/generated/cases/components/resolver/FirIdeDependentAnalysisSourceModuleNonPhysicalResolveDanglingFileReferenceTestGenerated.java |
from models import *
from services.member_service import MemberService
from services.rate_service import RateService
from services.user_service import UserService
from services.milkcollection_service import MilkCollectionService
from configuration_manager import ConfigurationManager
from datetime import datetime
import random
def frange(start, stop, step=0.1):
return [start+round(x*step,1) for x in range(0, ((stop-start)*10)+1) if start+round(x*step,1) <= stop]
class DefaultDbData:
def __init__(self):
pass
def create_members(self):
mservice = MemberService()
for i in range(1,100):
name = "John" + str(random.randint(0, 9999))
mobile = str(random.randint(1111111111, 9999999999))
cattle_type = CattleType.BUFFALO
if i%2 == 0:
cattle_type = CattleType.COW
member_id = mservice.add(name=name,
cattle_type=cattle_type,
mobile=mobile,
created_by=1,
created_at=datetime.now())
member = mservice.get(member_id)
collection = {}
collection['member'] = member
collection['shift'] = CollectionShift.MORNING
collection['fat'] = random.randint(3, 7)
collection['snf'] = random.randint(5, 12)
collection['qty'] = random.randint(1, 5)
collection['clr'] = 28.63
collection['aw'] = 89.24
collection['rate'] = 20.48
collection['can_no'] = 1
collection['total'] = collection['rate'] * collection['qty']
collection['created_at'] = datetime.now()
collection['created_by'] = 1
collection['status'] = True
colService = MilkCollectionService()
col_id = colService.add(collection)
def datetime_test(self):
from datetime import datetime
from helpers.datetime_util import set_system_datetime
d = datetime.now()
print "BEFORE:", d
time_tuple = (d.year, # Year
d.month, # Month
d.day + 1, # Day
d.hour, # Hour
d.minute, # Minute
d.second, # Second
d.microsecond, # Millisecond
)
set_system_datetime(time_tuple)
d = datetime.now()
print "AFTER:", d
def test_settings(self):
configManager = ConfigurationManager()
settings = {}
settings[SystemSettings.LANGUAGE] = "en"
settings[SystemSettings.SOCIETY_NAME] = "JEPPIAAR MILK COLLECTION CENTER"
settings[SystemSettings.SOCIETY_ADDRESS] = "NO.6, Andiyur Post, Uthangarai Taluk, Krishnagiri - 635307."
settings[SystemSettings.SOCIETY_ADDRESS1] = ""
settings[SystemSettings.HEADER_LINE1] = "Milk center"
settings[SystemSettings.HEADER_LINE2] = "address"
settings[SystemSettings.HEADER_LINE3] = "phone"
settings[SystemSettings.HEADER_LINE4] = ""
settings[SystemSettings.FOOTER_LINE1] = ""
settings[SystemSettings.FOOTER_LINE2] = "Thank you"
settings[SystemSettings.SCALE_TYPE] = ScaleType.OPAL
settings[SystemSettings.ANALYZER_TYPE] = AnalyzerType.ULTRA
settings[SystemSettings.RATE_TYPE] = CollectionRateType.FAT
settings[SystemSettings.BILL_OVERWRITE] = True
settings[SystemSettings.MANUAL_FAT] = True
settings[SystemSettings.MANUAL_SNF] = True
settings[SystemSettings.MANUAL_QTY] = True
settings[SystemSettings.PRINT_CLR] = False
settings[SystemSettings.PRINT_WATER] = False
settings[SystemSettings.PRINT_BILL] = True
settings[SystemSettings.SEND_SMS] = False
settings[SystemSettings.QUANTITY_2_DECIMAL] = True
settings[SystemSettings.EXTERNAL_DISPLAY] = False
settings[SystemSettings.COLLECTION_PRINTER_TYPE] = "Thermal"
settings[SystemSettings.DATA_EXPORT_FORMAT] = "PDF"
settings[SystemSettings.CAN_CAPACITY] = 38.0
#sensor ports
settings[SystemSettings.ANALYZER_PORT] = "/dev/ttyUSB0"
settings[SystemSettings.WEIGH_SCALE_PORT] = "/dev/ttyUSB1"
settings[SystemSettings.GSM_PORT] = "/dev/ttyUSB2"
settings[SystemSettings.THERMAL_PRINTER_PORT] = "/dev/ttyUSB3"
configManager.set_all_settings(settings)
#settings = configManager.get_all_settings()
#for k in settings.keys():
# print k, " = ", settings[k]
def test_rate_setup(self):
rate_service = RateService()
rate_service.update_fat_collection_rate("COW",1,2.0,3.5,150)
rate_service.update_fat_collection_rate("COW",2,3.6,4.5,170)
rate_service.update_fat_collection_rate("COW",3,4.6,6.0,195)
rate_service.update_fat_collection_rate("COW",4,6.1,22.0,205)
rate_service.update_fat_collection_rate("BUFFALO",5,2.0,3.5,150)
rate_service.update_fat_collection_rate("BUFFALO",6,3.6,4.5,170)
rate_service.update_fat_collection_rate("BUFFALO",7,4.6,6.0,195)
rate_service.update_fat_collection_rate("BUFFALO",8,6.1,22.0,205)
rate_service.save_ts2_collection_rate("COW", 1, 5, 10, 150)
rate_service.save_ts2_collection_rate("COW", 2, 10.1, 11, 185)
rate_service.save_ts2_collection_rate("COW", 3, 11.1, 13.5, 200)
rate_service.save_ts2_collection_rate("COW", 4, 13.6, 22, 205)
rate_service.save_ts2_collection_rate("BUFFALO", 5, 5, 10, 150)
rate_service.save_ts2_collection_rate("BUFFALO", 6, 10.1, 11, 185)
rate_service.save_ts2_collection_rate("BUFFALO", 7, 11.1, 13.5, 200)
rate_service.save_ts2_collection_rate("BUFFALO", 8, 13.6, 22, 205)
data = []
for fat in frange(3, 12):
for snf in frange(7,22):
data.append({ "fat_value": fat,
"snf_value": snf,
"rate":20.0+random.random()})
rate_service.set_fat_and_snf_collection_rate(cattle_type="COW",data=data)
rate_service.set_fat_and_snf_collection_rate(cattle_type="BUFFALO",data=data)
rate_service.save_ts1_collection_rate(id=None, cattle_type="COW", data={
"min_fat": 2.5,"max_fat": 3.5,"fat_rate": 150,"min_snf": 6.5,"max_snf": 7.5,"snf_rate": 150})
rate_service.save_ts1_collection_rate(id=None, cattle_type="COW", data={
"min_fat": 3.6,"max_fat": 4.5,"fat_rate": 180,"min_snf": 7.6,"max_snf": 8.5,"snf_rate": 180})
rate_service.save_ts1_collection_rate(id=None, cattle_type="COW", data={
"min_fat": 4.6,"max_fat": 5.5,"fat_rate": 195,"min_snf": 8.6,"max_snf": 11.0,"snf_rate": 195})
rate_service.save_ts1_collection_rate(id=None, cattle_type="COW", data={
"min_fat": 5.6,"max_fat": 6.5,"fat_rate": 205,"min_snf": 11.0,"max_snf": 12,"snf_rate": 205})
rate_service.save_ts1_collection_rate(id=None, cattle_type="BUFFALO", data={
"min_fat": 2.5,"max_fat": 3.5,"fat_rate": 150,"min_snf": 6.5,"max_snf": 7.5,"snf_rate": 150})
rate_service.save_ts1_collection_rate(id=None, cattle_type="BUFFALO", data={
"min_fat": 3.6,"max_fat": 4.5,"fat_rate": 180,"min_snf": 7.6,"max_snf": 8.5,"snf_rate": 180})
rate_service.save_ts1_collection_rate(id=None, cattle_type="BUFFALO", data={
"min_fat": 4.6,"max_fat": 5.5,"fat_rate": 195,"min_snf": 8.6,"max_snf": 11.0,"snf_rate": 195})
rate_service.save_ts1_collection_rate(id=None, cattle_type="BUFFALO", data={
"min_fat": 5.6,"max_fat": 6.5,"fat_rate": 205,"min_snf": 11.0,"max_snf": 12,"snf_rate": 205})
def create_default_users(self):
user_service = UserService()
created_by = 4
created_at=datetime.now()
user_service.add("basic", "$1$yWq10SD.$WQlvdj6kmHOY9KjHhuIGn1", "basic@milkpos.in", ["basic"], created_by, created_at)
user_service.add("setup", "$1$Ii9Edtkd$cpxJMzTgpCmFxEhka2nKs/", "setup@milkpos.in", ["setup"], created_by, created_at)
user_service.add("data", "$1$P/A0YAOn$O8SuzMiowBVJAorhfY239/", "data@milkpos.in", ["data"], created_by, created_at)
user_service.add("admin", "$1$doG2/gED$vTLr/Iob7T9z0.nydnJxD1", "admin@milkpos.in", ["admin"], created_by, created_at) | unknown | codeparrot/codeparrot-clean | ||
# Copyright 2013 Embrane, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heleosapi import info as h_info
from oslo_log import log as logging
from neutron.common import constants
from neutron.db import models_v2
from neutron.i18n import _LI
LOG = logging.getLogger(__name__)
def set_db_item_state(context, neutron_item, new_state):
with context.session.begin(subtransactions=True):
if neutron_item["status"] != new_state:
neutron_item["status"] = new_state
context.session.merge(neutron_item)
def retrieve_subnet(context, subnet_id):
return (context.session.query(
models_v2.Subnet).filter(models_v2.Subnet.id == subnet_id).one())
def retrieve_ip_allocation_info(context, neutron_port):
"""Retrieves ip allocation info for a specific port if any."""
try:
subnet_id = neutron_port["fixed_ips"][0]["subnet_id"]
except (KeyError, IndexError):
LOG.info(_LI("No ip allocation set"))
return
subnet = retrieve_subnet(context, subnet_id)
allocated_ip = neutron_port["fixed_ips"][0]["ip_address"]
is_gw_port = (neutron_port["device_owner"] ==
constants.DEVICE_OWNER_ROUTER_GW)
gateway_ip = subnet["gateway_ip"]
ip_allocation_info = h_info.IpAllocationInfo(
is_gw=is_gw_port,
ip_version=subnet["ip_version"],
prefix=subnet["cidr"].split("/")[1],
ip_address=allocated_ip,
port_id=neutron_port["id"],
gateway_ip=gateway_ip)
return ip_allocation_info
def retrieve_nat_info(context, fip, fixed_prefix, floating_prefix, router):
nat_info = h_info.NatInfo(source_address=fip["floating_ip_address"],
source_prefix=floating_prefix,
destination_address=fip["fixed_ip_address"],
destination_prefix=fixed_prefix,
floating_ip_id=fip["id"],
fixed_port_id=fip["port_id"])
return nat_info | unknown | codeparrot/codeparrot-clean | ||
An attempt was made to assign to a borrowed value.
Erroneous code example:
```compile_fail,E0506
struct FancyNum {
num: u8,
}
let mut fancy_num = FancyNum { num: 5 };
let fancy_ref = &fancy_num;
fancy_num = FancyNum { num: 6 };
// error: cannot assign to `fancy_num` because it is borrowed
println!("Num: {}, Ref: {}", fancy_num.num, fancy_ref.num);
```
Because `fancy_ref` still holds a reference to `fancy_num`, `fancy_num` can't
be assigned to a new value as it would invalidate the reference.
Alternatively, we can move out of `fancy_num` into a second `fancy_num`:
```
struct FancyNum {
num: u8,
}
let mut fancy_num = FancyNum { num: 5 };
let moved_num = fancy_num;
fancy_num = FancyNum { num: 6 };
println!("Num: {}, Moved num: {}", fancy_num.num, moved_num.num);
```
If the value has to be borrowed, try limiting the lifetime of the borrow using
a scoped block:
```
struct FancyNum {
num: u8,
}
let mut fancy_num = FancyNum { num: 5 };
{
let fancy_ref = &fancy_num;
println!("Ref: {}", fancy_ref.num);
}
// Works because `fancy_ref` is no longer in scope
fancy_num = FancyNum { num: 6 };
println!("Num: {}", fancy_num.num);
```
Or by moving the reference into a function:
```
struct FancyNum {
num: u8,
}
fn print_fancy_ref(fancy_ref: &FancyNum){
println!("Ref: {}", fancy_ref.num);
}
let mut fancy_num = FancyNum { num: 5 };
print_fancy_ref(&fancy_num);
// Works because function borrow has ended
fancy_num = FancyNum { num: 6 };
println!("Num: {}", fancy_num.num);
``` | unknown | github | https://github.com/rust-lang/rust | compiler/rustc_error_codes/src/error_codes/E0506.md |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010-2011 OpenStack, LLC
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from glance import context
from glance.openstack.common import local
from glance.tests.unit import utils as unit_utils
from glance.tests import utils
def _fake_image(owner, is_public):
return {
'id': None,
'owner': owner,
'is_public': is_public,
}
def _fake_membership(can_share=False):
return {'can_share': can_share}
class TestContext(utils.BaseTestCase):
def setUp(self):
super(TestContext, self).setUp()
self.db_api = unit_utils.FakeDB()
def do_visible(self, exp_res, img_owner, img_public, **kwargs):
"""
Perform a context visibility test. Creates a (fake) image
with the specified owner and is_public attributes, then
creates a context with the given keyword arguments and expects
exp_res as the result of an is_image_visible() call on the
context.
"""
img = _fake_image(img_owner, img_public)
ctx = context.RequestContext(**kwargs)
self.assertEqual(self.db_api.is_image_visible(ctx, img), exp_res)
def do_sharable(self, exp_res, img_owner, membership=None, **kwargs):
"""
Perform a context sharability test. Creates a (fake) image
with the specified owner and is_public attributes, then
creates a context with the given keyword arguments and expects
exp_res as the result of an is_image_sharable() call on the
context. If membership is not None, its value will be passed
in as the 'membership' keyword argument of
is_image_sharable().
"""
img = _fake_image(img_owner, True)
ctx = context.RequestContext(**kwargs)
sharable_args = {}
if membership is not None:
sharable_args['membership'] = membership
output = self.db_api.is_image_sharable(ctx, img, **sharable_args)
self.assertEqual(exp_res, output)
def test_empty_public(self):
"""
Tests that an empty context (with is_admin set to True) can
access an image with is_public set to True.
"""
self.do_visible(True, None, True, is_admin=True)
def test_empty_public_owned(self):
"""
Tests that an empty context (with is_admin set to True) can
access an owned image with is_public set to True.
"""
self.do_visible(True, 'pattieblack', True, is_admin=True)
def test_empty_private(self):
"""
Tests that an empty context (with is_admin set to True) can
access an image with is_public set to False.
"""
self.do_visible(True, None, False, is_admin=True)
def test_empty_private_owned(self):
"""
Tests that an empty context (with is_admin set to True) can
access an owned image with is_public set to False.
"""
self.do_visible(True, 'pattieblack', False, is_admin=True)
def test_empty_shared(self):
"""
Tests that an empty context (with is_admin set to False) can
not share an image, with or without membership.
"""
self.do_sharable(False, 'pattieblack', None, is_admin=False)
self.do_sharable(False, 'pattieblack', _fake_membership(True),
is_admin=False)
def test_anon_public(self):
"""
Tests that an anonymous context (with is_admin set to False)
can access an image with is_public set to True.
"""
self.do_visible(True, None, True)
def test_anon_public_owned(self):
"""
Tests that an anonymous context (with is_admin set to False)
can access an owned image with is_public set to True.
"""
self.do_visible(True, 'pattieblack', True)
def test_anon_private(self):
"""
Tests that an anonymous context (with is_admin set to False)
can access an unowned image with is_public set to False.
"""
self.do_visible(True, None, False)
def test_anon_private_owned(self):
"""
Tests that an anonymous context (with is_admin set to False)
cannot access an owned image with is_public set to False.
"""
self.do_visible(False, 'pattieblack', False)
def test_anon_shared(self):
"""
Tests that an empty context (with is_admin set to True) can
not share an image, with or without membership.
"""
self.do_sharable(False, 'pattieblack', None)
self.do_sharable(False, 'pattieblack', _fake_membership(True))
def test_auth_public(self):
"""
Tests that an authenticated context (with is_admin set to
False) can access an image with is_public set to True.
"""
self.do_visible(True, None, True, tenant='froggy')
def test_auth_public_unowned(self):
"""
Tests that an authenticated context (with is_admin set to
False) can access an image (which it does not own) with
is_public set to True.
"""
self.do_visible(True, 'pattieblack', True, tenant='froggy')
def test_auth_public_owned(self):
"""
Tests that an authenticated context (with is_admin set to
False) can access an image (which it does own) with is_public
set to True.
"""
self.do_visible(True, 'pattieblack', True, tenant='pattieblack')
def test_auth_private(self):
"""
Tests that an authenticated context (with is_admin set to
False) can access an image with is_public set to False.
"""
self.do_visible(True, None, False, tenant='froggy')
def test_auth_private_unowned(self):
"""
Tests that an authenticated context (with is_admin set to
False) cannot access an image (which it does not own) with
is_public set to False.
"""
self.do_visible(False, 'pattieblack', False, tenant='froggy')
def test_auth_private_owned(self):
"""
Tests that an authenticated context (with is_admin set to
False) can access an image (which it does own) with is_public
set to False.
"""
self.do_visible(True, 'pattieblack', False, tenant='pattieblack')
def test_auth_sharable(self):
"""
Tests that an authenticated context (with is_admin set to
False) cannot share an image it neither owns nor is shared
with it.
"""
self.do_sharable(False, 'pattieblack', None, tenant='froggy')
def test_auth_sharable_admin(self):
"""
Tests that an authenticated context (with is_admin set to
True) can share an image it neither owns nor is shared with
it.
"""
self.do_sharable(True, 'pattieblack', None, tenant='froggy',
is_admin=True)
def test_auth_sharable_owned(self):
"""
Tests that an authenticated context (with is_admin set to
False) can share an image it owns, even if it is not shared
with it.
"""
self.do_sharable(True, 'pattieblack', None, tenant='pattieblack')
def test_auth_sharable_cannot_share(self):
"""
Tests that an authenticated context (with is_admin set to
False) cannot share an image it does not own even if it is
shared with it, but with can_share = False.
"""
self.do_sharable(False, 'pattieblack', _fake_membership(False),
tenant='froggy')
def test_auth_sharable_can_share(self):
"""
Tests that an authenticated context (with is_admin set to
False) can share an image it does not own if it is shared with
it with can_share = True.
"""
self.do_sharable(True, 'pattieblack', _fake_membership(True),
tenant='froggy')
def test_request_id(self):
contexts = [context.RequestContext().request_id for _ in range(5)]
# Check for uniqueness -- set() will normalize its argument
self.assertEqual(5, len(set(contexts)))
def test_service_catalog(self):
ctx = context.RequestContext(service_catalog=['foo'])
self.assertEqual(['foo'], ctx.service_catalog)
def test_context_local_store(self):
if hasattr(local.store, 'context'):
del local.store.context
ctx = context.RequestContext()
self.assertTrue(hasattr(local.store, 'context'))
self.assertEqual(ctx, local.store.context) | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python
# -*- mode: python; coding: utf-8; -*-
##---------------------------------------------------------------------------##
##
## Copyright (C) 1998-2003 Markus Franz Xaver Johannes Oberhumer
## Copyright (C) 2003 Mt. Hood Playing Card Co.
## Copyright (C) 2005-2009 Skomoroh
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
##
##---------------------------------------------------------------------------##
__all__ = ['ColorsDialog']
# imports
import Tkinter
from tkColorChooser import askcolor
# PySol imports
from pysollib.mfxutil import KwStruct
# Toolkit imports
from tkwidget import MfxDialog
# ************************************************************************
# *
# ************************************************************************
class ColorsDialog(MfxDialog):
def __init__(self, parent, title, app, **kw):
kw = self.initKw(kw)
MfxDialog.__init__(self, parent, title, kw.resizable, kw.default)
top_frame, bottom_frame = self.createFrames(kw)
self.createBitmaps(top_frame, kw)
frame = Tkinter.Frame(top_frame)
frame.pack(expand=True, fill='both', padx=5, pady=10)
frame.columnconfigure(0, weight=1)
self.text_var = Tkinter.StringVar()
self.text_var.set(app.opt.colors['text'])
self.piles_var = Tkinter.StringVar()
self.piles_var.set(app.opt.colors['piles'])
self.cards_1_var = Tkinter.StringVar()
self.cards_1_var.set(app.opt.colors['cards_1'])
self.cards_2_var = Tkinter.StringVar()
self.cards_2_var.set(app.opt.colors['cards_2'])
self.samerank_1_var = Tkinter.StringVar()
self.samerank_1_var.set(app.opt.colors['samerank_1'])
self.samerank_2_var = Tkinter.StringVar()
self.samerank_2_var.set(app.opt.colors['samerank_2'])
self.hintarrow_var = Tkinter.StringVar()
self.hintarrow_var.set(app.opt.colors['hintarrow'])
self.not_matching_var = Tkinter.StringVar()
self.not_matching_var.set(app.opt.colors['not_matching'])
#
row = 0
for title, var in (
(_('Text foreground:'), self.text_var),
(_('Highlight piles:'), self.piles_var),
(_('Highlight cards 1:'), self.cards_1_var),
(_('Highlight cards 2:'), self.cards_2_var),
(_('Highlight same rank 1:'), self.samerank_1_var),
(_('Highlight same rank 2:'), self.samerank_2_var),
(_('Hint arrow:'), self.hintarrow_var),
(_('Highlight not matching:'), self.not_matching_var),
):
Tkinter.Label(frame, text=title, anchor='w',
).grid(row=row, column=0, sticky='we')
l = Tkinter.Label(frame, width=10, height=2,
bg=var.get(), textvariable=var)
l.grid(row=row, column=1, padx=5)
b = Tkinter.Button(frame, text=_('Change...'), width=10,
command=lambda l=l: self.selectColor(l))
b.grid(row=row, column=2)
row += 1
#
focus = self.createButtons(bottom_frame, kw)
self.mainloop(focus, kw.timeout)
#
self.text_color = self.text_var.get()
self.piles_color = self.piles_var.get()
self.cards_1_color = self.cards_1_var.get()
self.cards_2_color = self.cards_2_var.get()
self.samerank_1_color = self.samerank_1_var.get()
self.samerank_2_color = self.samerank_2_var.get()
self.hintarrow_color = self.hintarrow_var.get()
self.not_matching_color = self.not_matching_var.get()
def selectColor(self, label):
try:
c = askcolor(parent=self.top, initialcolor=label.cget('bg'),
title=_("Select color"))
except:
pass
else:
if c and c[1]:
label.configure(bg=c[1])
#label.configure(text=c[1]) # don't work
label.setvar(label.cget('textvariable'), c[1])
def initKw(self, kw):
kw = KwStruct(kw,
strings=(_("&OK"), _("&Cancel")),
default=0,
)
return MfxDialog.initKw(self, kw) | unknown | codeparrot/codeparrot-clean | ||
from django.contrib import admin
from django.contrib.auth.models import User as AuthUser
from django.contrib.contenttypes.models import ContentType
from django.core import checks, management
from django.db import DEFAULT_DB_ALIAS, models
from django.db.models import signals
from django.test import TestCase, override_settings
from django.test.utils import isolate_apps
from django.urls import reverse
from .admin import admin as force_admin_model_registration # NOQA
from .models import (
Abstract, BaseUser, Bug, Country, Improvement, Issue, LowerStatusPerson,
MultiUserProxy, MyPerson, MyPersonProxy, OtherPerson, Person, ProxyBug,
ProxyImprovement, ProxyProxyBug, ProxyTrackerUser, State, StateProxy,
StatusPerson, TrackerUser, User, UserProxy, UserProxyProxy,
)
class ProxyModelTests(TestCase):
def test_same_manager_queries(self):
"""
The MyPerson model should be generating the same database queries as
the Person model (when the same manager is used in each case).
"""
my_person_sql = MyPerson.other.all().query.get_compiler(
DEFAULT_DB_ALIAS).as_sql()
person_sql = Person.objects.order_by("name").query.get_compiler(
DEFAULT_DB_ALIAS).as_sql()
self.assertEqual(my_person_sql, person_sql)
def test_inheritance_new_table(self):
"""
The StatusPerson models should have its own table (it's using ORM-level
inheritance).
"""
sp_sql = StatusPerson.objects.all().query.get_compiler(
DEFAULT_DB_ALIAS).as_sql()
p_sql = Person.objects.all().query.get_compiler(
DEFAULT_DB_ALIAS).as_sql()
self.assertNotEqual(sp_sql, p_sql)
def test_basic_proxy(self):
"""
Creating a Person makes them accessible through the MyPerson proxy.
"""
person = Person.objects.create(name="Foo McBar")
self.assertEqual(len(Person.objects.all()), 1)
self.assertEqual(len(MyPerson.objects.all()), 1)
self.assertEqual(MyPerson.objects.get(name="Foo McBar").id, person.id)
self.assertFalse(MyPerson.objects.get(id=person.id).has_special_name())
def test_no_proxy(self):
"""
Person is not proxied by StatusPerson subclass.
"""
Person.objects.create(name="Foo McBar")
self.assertEqual(list(StatusPerson.objects.all()), [])
def test_basic_proxy_reverse(self):
"""
A new MyPerson also shows up as a standard Person.
"""
MyPerson.objects.create(name="Bazza del Frob")
self.assertEqual(len(MyPerson.objects.all()), 1)
self.assertEqual(len(Person.objects.all()), 1)
LowerStatusPerson.objects.create(status="low", name="homer")
lsps = [lsp.name for lsp in LowerStatusPerson.objects.all()]
self.assertEqual(lsps, ["homer"])
def test_correct_type_proxy_of_proxy(self):
"""
Correct type when querying a proxy of proxy
"""
Person.objects.create(name="Foo McBar")
MyPerson.objects.create(name="Bazza del Frob")
LowerStatusPerson.objects.create(status="low", name="homer")
pp = sorted(mpp.name for mpp in MyPersonProxy.objects.all())
self.assertEqual(pp, ['Bazza del Frob', 'Foo McBar', 'homer'])
def test_proxy_included_in_ancestors(self):
"""
Proxy models are included in the ancestors for a model's DoesNotExist
and MultipleObjectsReturned
"""
Person.objects.create(name="Foo McBar")
MyPerson.objects.create(name="Bazza del Frob")
LowerStatusPerson.objects.create(status="low", name="homer")
max_id = Person.objects.aggregate(max_id=models.Max('id'))['max_id']
with self.assertRaises(Person.DoesNotExist):
MyPersonProxy.objects.get(name='Zathras')
with self.assertRaises(Person.MultipleObjectsReturned):
MyPersonProxy.objects.get(id__lt=max_id + 1)
with self.assertRaises(Person.DoesNotExist):
StatusPerson.objects.get(name='Zathras')
StatusPerson.objects.create(name='Bazza Jr.')
StatusPerson.objects.create(name='Foo Jr.')
max_id = Person.objects.aggregate(max_id=models.Max('id'))['max_id']
with self.assertRaises(Person.MultipleObjectsReturned):
StatusPerson.objects.get(id__lt=max_id + 1)
def test_abstract_base_with_model_fields(self):
msg = "Abstract base class containing model fields not permitted for proxy model 'NoAbstract'."
with self.assertRaisesMessage(TypeError, msg):
class NoAbstract(Abstract):
class Meta:
proxy = True
def test_too_many_concrete_classes(self):
msg = "Proxy model 'TooManyBases' has more than one non-abstract model base class."
with self.assertRaisesMessage(TypeError, msg):
class TooManyBases(User, Person):
class Meta:
proxy = True
def test_no_base_classes(self):
msg = "Proxy model 'NoBaseClasses' has no non-abstract model base class."
with self.assertRaisesMessage(TypeError, msg):
class NoBaseClasses(models.Model):
class Meta:
proxy = True
@isolate_apps('proxy_models')
def test_new_fields(self):
class NoNewFields(Person):
newfield = models.BooleanField()
class Meta:
proxy = True
errors = NoNewFields.check()
expected = [
checks.Error(
"Proxy model 'NoNewFields' contains model fields.",
id='models.E017',
)
]
self.assertEqual(errors, expected)
@override_settings(TEST_SWAPPABLE_MODEL='proxy_models.AlternateModel')
@isolate_apps('proxy_models')
def test_swappable(self):
class SwappableModel(models.Model):
class Meta:
swappable = 'TEST_SWAPPABLE_MODEL'
class AlternateModel(models.Model):
pass
# You can't proxy a swapped model
with self.assertRaises(TypeError):
class ProxyModel(SwappableModel):
class Meta:
proxy = True
def test_myperson_manager(self):
Person.objects.create(name="fred")
Person.objects.create(name="wilma")
Person.objects.create(name="barney")
resp = [p.name for p in MyPerson.objects.all()]
self.assertEqual(resp, ['barney', 'fred'])
resp = [p.name for p in MyPerson._default_manager.all()]
self.assertEqual(resp, ['barney', 'fred'])
def test_otherperson_manager(self):
Person.objects.create(name="fred")
Person.objects.create(name="wilma")
Person.objects.create(name="barney")
resp = [p.name for p in OtherPerson.objects.all()]
self.assertEqual(resp, ['barney', 'wilma'])
resp = [p.name for p in OtherPerson.excluder.all()]
self.assertEqual(resp, ['barney', 'fred'])
resp = [p.name for p in OtherPerson._default_manager.all()]
self.assertEqual(resp, ['barney', 'wilma'])
def test_permissions_created(self):
from django.contrib.auth.models import Permission
Permission.objects.get(name="May display users information")
def test_proxy_model_signals(self):
"""
Test save signals for proxy models
"""
output = []
def make_handler(model, event):
def _handler(*args, **kwargs):
output.append('%s %s save' % (model, event))
return _handler
h1 = make_handler('MyPerson', 'pre')
h2 = make_handler('MyPerson', 'post')
h3 = make_handler('Person', 'pre')
h4 = make_handler('Person', 'post')
signals.pre_save.connect(h1, sender=MyPerson)
signals.post_save.connect(h2, sender=MyPerson)
signals.pre_save.connect(h3, sender=Person)
signals.post_save.connect(h4, sender=Person)
MyPerson.objects.create(name="dino")
self.assertEqual(output, [
'MyPerson pre save',
'MyPerson post save'
])
output = []
h5 = make_handler('MyPersonProxy', 'pre')
h6 = make_handler('MyPersonProxy', 'post')
signals.pre_save.connect(h5, sender=MyPersonProxy)
signals.post_save.connect(h6, sender=MyPersonProxy)
MyPersonProxy.objects.create(name="pebbles")
self.assertEqual(output, [
'MyPersonProxy pre save',
'MyPersonProxy post save'
])
signals.pre_save.disconnect(h1, sender=MyPerson)
signals.post_save.disconnect(h2, sender=MyPerson)
signals.pre_save.disconnect(h3, sender=Person)
signals.post_save.disconnect(h4, sender=Person)
signals.pre_save.disconnect(h5, sender=MyPersonProxy)
signals.post_save.disconnect(h6, sender=MyPersonProxy)
def test_content_type(self):
ctype = ContentType.objects.get_for_model
self.assertIs(ctype(Person), ctype(OtherPerson))
def test_user_proxy_models(self):
User.objects.create(name='Bruce')
resp = [u.name for u in User.objects.all()]
self.assertEqual(resp, ['Bruce'])
resp = [u.name for u in UserProxy.objects.all()]
self.assertEqual(resp, ['Bruce'])
resp = [u.name for u in UserProxyProxy.objects.all()]
self.assertEqual(resp, ['Bruce'])
self.assertEqual([u.name for u in MultiUserProxy.objects.all()], ['Bruce'])
def test_proxy_for_model(self):
self.assertEqual(UserProxy, UserProxyProxy._meta.proxy_for_model)
def test_concrete_model(self):
self.assertEqual(User, UserProxyProxy._meta.concrete_model)
def test_proxy_delete(self):
"""
Proxy objects can be deleted
"""
User.objects.create(name='Bruce')
u2 = UserProxy.objects.create(name='George')
resp = [u.name for u in UserProxy.objects.all()]
self.assertEqual(resp, ['Bruce', 'George'])
u2.delete()
resp = [u.name for u in UserProxy.objects.all()]
self.assertEqual(resp, ['Bruce'])
def test_select_related(self):
"""
We can still use `select_related()` to include related models in our
querysets.
"""
country = Country.objects.create(name='Australia')
State.objects.create(name='New South Wales', country=country)
resp = [s.name for s in State.objects.select_related()]
self.assertEqual(resp, ['New South Wales'])
resp = [s.name for s in StateProxy.objects.select_related()]
self.assertEqual(resp, ['New South Wales'])
self.assertEqual(StateProxy.objects.get(name='New South Wales').name, 'New South Wales')
resp = StateProxy.objects.select_related().get(name='New South Wales')
self.assertEqual(resp.name, 'New South Wales')
def test_filter_proxy_relation_reverse(self):
tu = TrackerUser.objects.create(name='Contributor', status='contrib')
ptu = ProxyTrackerUser.objects.get()
issue = Issue.objects.create(assignee=tu)
self.assertEqual(tu.issues.get(), issue)
self.assertEqual(ptu.issues.get(), issue)
self.assertSequenceEqual(TrackerUser.objects.filter(issues=issue), [tu])
self.assertSequenceEqual(ProxyTrackerUser.objects.filter(issues=issue), [ptu])
def test_proxy_bug(self):
contributor = ProxyTrackerUser.objects.create(name='Contributor', status='contrib')
someone = BaseUser.objects.create(name='Someone')
Bug.objects.create(summary='fix this', version='1.1beta', assignee=contributor, reporter=someone)
pcontributor = ProxyTrackerUser.objects.create(name='OtherContributor', status='proxy')
Improvement.objects.create(
summary='improve that', version='1.1beta',
assignee=contributor, reporter=pcontributor,
associated_bug=ProxyProxyBug.objects.all()[0],
)
# Related field filter on proxy
resp = ProxyBug.objects.get(version__icontains='beta')
self.assertEqual(repr(resp), '<ProxyBug: ProxyBug:fix this>')
# Select related + filter on proxy
resp = ProxyBug.objects.select_related().get(version__icontains='beta')
self.assertEqual(repr(resp), '<ProxyBug: ProxyBug:fix this>')
# Proxy of proxy, select_related + filter
resp = ProxyProxyBug.objects.select_related().get(
version__icontains='beta'
)
self.assertEqual(repr(resp), '<ProxyProxyBug: ProxyProxyBug:fix this>')
# Select related + filter on a related proxy field
resp = ProxyImprovement.objects.select_related().get(
reporter__name__icontains='butor'
)
self.assertEqual(
repr(resp),
'<ProxyImprovement: ProxyImprovement:improve that>'
)
# Select related + filter on a related proxy of proxy field
resp = ProxyImprovement.objects.select_related().get(
associated_bug__summary__icontains='fix'
)
self.assertEqual(
repr(resp),
'<ProxyImprovement: ProxyImprovement:improve that>'
)
def test_proxy_load_from_fixture(self):
management.call_command('loaddata', 'mypeople.json', verbosity=0)
p = MyPerson.objects.get(pk=100)
self.assertEqual(p.name, 'Elvis Presley')
def test_eq(self):
self.assertEqual(MyPerson(id=100), Person(id=100))
@override_settings(ROOT_URLCONF='proxy_models.urls')
class ProxyModelAdminTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = AuthUser.objects.create(is_superuser=True, is_staff=True)
cls.tu1 = ProxyTrackerUser.objects.create(name='Django Pony', status='emperor')
cls.i1 = Issue.objects.create(summary="Pony's Issue", assignee=cls.tu1)
def test_cascade_delete_proxy_model_admin_warning(self):
"""
Test if admin gives warning about cascade deleting models referenced
to concrete model by deleting proxy object.
"""
tracker_user = TrackerUser.objects.all()[0]
base_user = BaseUser.objects.all()[0]
issue = Issue.objects.all()[0]
with self.assertNumQueries(6):
collector = admin.utils.NestedObjects('default')
collector.collect(ProxyTrackerUser.objects.all())
self.assertIn(tracker_user, collector.edges.get(None, ()))
self.assertIn(base_user, collector.edges.get(None, ()))
self.assertIn(issue, collector.edges.get(tracker_user, ()))
def test_delete_str_in_model_admin(self):
"""
Test if the admin delete page shows the correct string representation
for a proxy model.
"""
user = TrackerUser.objects.get(name='Django Pony')
proxy = ProxyTrackerUser.objects.get(name='Django Pony')
user_str = 'Tracker user: <a href="%s">%s</a>' % (
reverse('admin_proxy:proxy_models_trackeruser_change', args=(user.pk,)), user
)
proxy_str = 'Proxy tracker user: <a href="%s">%s</a>' % (
reverse('admin_proxy:proxy_models_proxytrackeruser_change', args=(proxy.pk,)), proxy
)
self.client.force_login(self.superuser)
response = self.client.get(reverse('admin_proxy:proxy_models_trackeruser_delete', args=(user.pk,)))
delete_str = response.context['deleted_objects'][0]
self.assertEqual(delete_str, user_str)
response = self.client.get(reverse('admin_proxy:proxy_models_proxytrackeruser_delete', args=(proxy.pk,)))
delete_str = response.context['deleted_objects'][0]
self.assertEqual(delete_str, proxy_str) | unknown | codeparrot/codeparrot-clean | ||
# Really simple expression types to handle arithmetic expressions referring
# to other # defines
class MacroExp: pass
class MacroRef(MacroExp):
def __init__(s, name):
s._name = name;
def eval(s):
return lookup(s._name)
def __add__(s, o):
return MacroAdd(s, o)
def __radd__(s, o):
return MacroAdd(o, s)
class MacroAdd(MacroExp):
def __init__(s, l, r):
s.l = l;
s.r = r;
def eval(s):
l = s.l.eval() if (isinstance(s.l, MacroExp)) else s.l
r = s.r.eval() if (isinstance(s.r, MacroExp)) else s.r
return l + r
def __add__(s, o):
return MacroAdd(s, o)
def __radd__(s, o):
return MacroAdd(o, s)
def lookup(n):
v = _consts[n]
if isinstance(v, MacroExp):
return v.eval()
else:
return v
# Macro constants
_consts = {
"EI_NIDENT": 16 , #
"EI_MAG0": 0 , # File identification byte 0 index
"ELFMAG0": 0x7f , # Magic number byte 0
"EI_MAG1": 1 , # File identification byte 1 index
"ELFMAG1": 'E' , # Magic number byte 1
"EI_MAG2": 2 , # File identification byte 2 index
"ELFMAG2": 'L' , # Magic number byte 2
"EI_MAG3": 3 , # File identification byte 3 index
"ELFMAG3": 'F' , # Magic number byte 3
"ELFMAG": "\177ELF" , #
"SELFMAG": 4 , #
"EI_CLASS": 4 , # File class byte index
"ELFCLASSNONE": 0 , # Invalid class
"ELFCLASS32": 1 , # 32-bit objects
"ELFCLASS64": 2 , # 64-bit objects
"ELFCLASSNUM": 3 , #
"EI_DATA": 5 , # Data encoding byte index
"ELFDATANONE": 0 , # Invalid data encoding
"ELFDATA2LSB": 1 , # 2's complement, little endian
"ELFDATA2MSB": 2 , # 2's complement, big endian
"ELFDATANUM": 3 , #
"EI_VERSION": 6 , # File version byte index
"EI_OSABI": 7 , # OS ABI identification
"ELFOSABI_NONE": 0 , # UNIX System V ABI
"ELFOSABI_SYSV": 0 , # Alias.
"ELFOSABI_HPUX": 1 , # HP-UX
"ELFOSABI_NETBSD": 2 , # NetBSD.
"ELFOSABI_GNU": 3 , # Object uses GNU ELF extensions.
"ELFOSABI_LINUX": MacroRef("ELFOSABI_GNU") , # Compatibility alias.
"ELFOSABI_SOLARIS": 6 , # Sun Solaris.
"ELFOSABI_AIX": 7 , # IBM AIX.
"ELFOSABI_IRIX": 8 , # SGI Irix.
"ELFOSABI_FREEBSD": 9 , # FreeBSD.
"ELFOSABI_TRU64": 10 , # Compaq TRU64 UNIX.
"ELFOSABI_MODESTO": 11 , # Novell Modesto.
"ELFOSABI_OPENBSD": 12 , # OpenBSD.
"ELFOSABI_ARM_AEABI": 64 , # ARM EABI
"ELFOSABI_ARM": 97 , # ARM
"ELFOSABI_STANDALONE": 255 , # Standalone (embedded) application
"EI_ABIVERSION": 8 , # ABI version
"EI_PAD": 9 , # Byte index of padding bytes
"ET_NONE": 0 , # No file type
"ET_REL": 1 , # Relocatable file
"ET_EXEC": 2 , # Executable file
"ET_DYN": 3 , # Shared object file
"ET_CORE": 4 , # Core file
"ET_NUM": 5 , # Number of defined types
"ET_LOOS": 0xfe00 , # OS-specific range start
"ET_HIOS": 0xfeff , # OS-specific range end
"ET_LOPROC": 0xff00 , # Processor-specific range start
"ET_HIPROC": 0xffff , # Processor-specific range end
"EM_NONE": 0 , # No machine
"EM_M32": 1 , # AT&T WE 32100
"EM_SPARC": 2 , # SUN SPARC
"EM_386": 3 , # Intel 80386
"EM_68K": 4 , # Motorola m68k family
"EM_88K": 5 , # Motorola m88k family
"EM_860": 7 , # Intel 80860
"EM_MIPS": 8 , # MIPS R3000 big-endian
"EM_S370": 9 , # IBM System/370
"EM_MIPS_RS3_LE": 10 , # MIPS R3000 little-endian
"EM_PARISC": 15 , # HPPA
"EM_VPP500": 17 , # Fujitsu VPP500
"EM_SPARC32PLUS": 18 , # Sun's "v8plus"
"EM_960": 19 , # Intel 80960
"EM_PPC": 20 , # PowerPC
"EM_PPC64": 21 , # PowerPC 64-bit
"EM_S390": 22 , # IBM S390
"EM_V800": 36 , # NEC V800 series
"EM_FR20": 37 , # Fujitsu FR20
"EM_RH32": 38 , # TRW RH-32
"EM_RCE": 39 , # Motorola RCE
"EM_ARM": 40 , # ARM
"EM_FAKE_ALPHA": 41 , # Digital Alpha
"EM_SH": 42 , # Hitachi SH
"EM_SPARCV9": 43 , # SPARC v9 64-bit
"EM_TRICORE": 44 , # Siemens Tricore
"EM_ARC": 45 , # Argonaut RISC Core
"EM_H8_300": 46 , # Hitachi H8/300
"EM_H8_300H": 47 , # Hitachi H8/300H
"EM_H8S": 48 , # Hitachi H8S
"EM_H8_500": 49 , # Hitachi H8/500
"EM_IA_64": 50 , # Intel Merced
"EM_MIPS_X": 51 , # Stanford MIPS-X
"EM_COLDFIRE": 52 , # Motorola Coldfire
"EM_68HC12": 53 , # Motorola M68HC12
"EM_MMA": 54 , # Fujitsu MMA Multimedia Accelerator
"EM_PCP": 55 , # Siemens PCP
"EM_NCPU": 56 , # Sony nCPU embeeded RISC
"EM_NDR1": 57 , # Denso NDR1 microprocessor
"EM_STARCORE": 58 , # Motorola Start*Core processor
"EM_ME16": 59 , # Toyota ME16 processor
"EM_ST100": 60 , # STMicroelectronic ST100 processor
"EM_TINYJ": 61 , # Advanced Logic Corp. Tinyj emb.fam
"EM_X86_64": 62 , # AMD x86-64 architecture
"EM_PDSP": 63 , # Sony DSP Processor
"EM_FX66": 66 , # Siemens FX66 microcontroller
"EM_ST9PLUS": 67 , # STMicroelectronics ST9+ 8/16 mc
"EM_ST7": 68 , # STmicroelectronics ST7 8 bit mc
"EM_68HC16": 69 , # Motorola MC68HC16 microcontroller
"EM_68HC11": 70 , # Motorola MC68HC11 microcontroller
"EM_68HC08": 71 , # Motorola MC68HC08 microcontroller
"EM_68HC05": 72 , # Motorola MC68HC05 microcontroller
"EM_SVX": 73 , # Silicon Graphics SVx
"EM_ST19": 74 , # STMicroelectronics ST19 8 bit mc
"EM_VAX": 75 , # Digital VAX
"EM_CRIS": 76 , # Axis Communications 32-bit embedded processor
"EM_JAVELIN": 77 , # Infineon Technologies 32-bit embedded processor
"EM_FIREPATH": 78 , # Element 14 64-bit DSP Processor
"EM_ZSP": 79 , # LSI Logic 16-bit DSP Processor
"EM_MMIX": 80 , # Donald Knuth's educational 64-bit processor
"EM_HUANY": 81 , # Harvard University machine-independent object files
"EM_PRISM": 82 , # SiTera Prism
"EM_AVR": 83 , # Atmel AVR 8-bit microcontroller
"EM_FR30": 84 , # Fujitsu FR30
"EM_D10V": 85 , # Mitsubishi D10V
"EM_D30V": 86 , # Mitsubishi D30V
"EM_V850": 87 , # NEC v850
"EM_M32R": 88 , # Mitsubishi M32R
"EM_MN10300": 89 , # Matsushita MN10300
"EM_MN10200": 90 , # Matsushita MN10200
"EM_PJ": 91 , # picoJava
"EM_OPENRISC": 92 , # OpenRISC 32-bit embedded processor
"EM_ARC_A5": 93 , # ARC Cores Tangent-A5
"EM_XTENSA": 94 , # Tensilica Xtensa Architecture
"EM_NUM": 95 , #
"EM_ALPHA": 0x9026 , #
"EV_NONE": 0 , # Invalid ELF version
"EV_CURRENT": 1 , # Current version
"EV_NUM": 2 , #
"SHN_UNDEF": 0 , # Undefined section
"SHN_LORESERVE": 0xff00 , # Start of reserved indices
"SHN_LOPROC": 0xff00 , # Start of processor-specific
"SHN_BEFORE": 0xff00 , # Order section before all others
"SHN_AFTER": 0xff01 , # Order section after all others
"SHN_HIPROC": 0xff1f , # End of processor-specific
"SHN_LOOS": 0xff20 , # Start of OS-specific
"SHN_HIOS": 0xff3f , # End of OS-specific
"SHN_ABS": 0xfff1 , # Associated symbol is absolute
"SHN_COMMON": 0xfff2 , # Associated symbol is common
"SHN_XINDEX": 0xffff , # Index is in extra table.
"SHN_HIRESERVE": 0xffff , # End of reserved indices
"SHT_NULL": 0 , # Section header table entry unused
"SHT_PROGBITS": 1 , # Program data
"SHT_SYMTAB": 2 , # Symbol table
"SHT_STRTAB": 3 , # String table
"SHT_RELA": 4 , # Relocation entries with addends
"SHT_HASH": 5 , # Symbol hash table
"SHT_DYNAMIC": 6 , # Dynamic linking information
"SHT_NOTE": 7 , # Notes
"SHT_NOBITS": 8 , # Program space with no data (bss)
"SHT_REL": 9 , # Relocation entries, no addends
"SHT_SHLIB": 10 , # Reserved
"SHT_DYNSYM": 11 , # Dynamic linker symbol table
"SHT_INIT_ARRAY": 14 , # Array of constructors
"SHT_FINI_ARRAY": 15 , # Array of destructors
"SHT_PREINIT_ARRAY": 16 , # Array of pre-constructors
"SHT_GROUP": 17 , # Section group
"SHT_SYMTAB_SHNDX": 18 , # Extended section indeces
"SHT_NUM": 19 , # Number of defined types.
"SHT_LOOS": 0x60000000 , # Start OS-specific.
"SHT_GNU_ATTRIBUTES": 0x6ffffff5 , # Object attributes.
"SHT_GNU_HASH": 0x6ffffff6 , # GNU-style hash table.
"SHT_GNU_LIBLIST": 0x6ffffff7 , # Prelink library list
"SHT_CHECKSUM": 0x6ffffff8 , # Checksum for DSO content.
"SHT_LOSUNW": 0x6ffffffa , # Sun-specific low bound.
"SHT_SUNW_move": 0x6ffffffa , #
"SHT_SUNW_COMDAT": 0x6ffffffb , #
"SHT_SUNW_syminfo": 0x6ffffffc , #
"SHT_GNU_verdef": 0x6ffffffd , # Version definition section.
"SHT_GNU_verneed": 0x6ffffffe , # Version needs section.
"SHT_GNU_versym": 0x6fffffff , # Version symbol table.
"SHT_HISUNW": 0x6fffffff , # Sun-specific high bound.
"SHT_HIOS": 0x6fffffff , # End OS-specific type
"SHT_LOPROC": 0x70000000 , # Start of processor-specific
"SHT_HIPROC": 0x7fffffff , # End of processor-specific
"SHT_LOUSER": 0x80000000 , # Start of application-specific
"SHT_HIUSER": 0x8fffffff , # End of application-specific
"SHF_MASKOS": 0x0ff00000 , # OS-specific.
"SHF_MASKPROC": 0xf0000000 , # Processor-specific
"SHF_ORDERED": (1 << 30) , # Special ordering requirement
"SHF_EXCLUDE": (1 << 31) , # Section is excluded unless
"GRP_COMDAT": 0x1 , # Mark group as COMDAT.
"SYMINFO_BT_SELF": 0xffff , # Symbol bound to self
"SYMINFO_BT_PARENT": 0xfffe , # Symbol bound to parent
"SYMINFO_BT_LOWRESERVE": 0xff00 , # Beginning of reserved entries
"SYMINFO_FLG_DIRECT": 0x0001 , # Direct bound symbol
"SYMINFO_FLG_PASSTHRU": 0x0002 , # Pass-thru symbol for translator
"SYMINFO_FLG_COPY": 0x0004 , # Symbol is a copy-reloc
"SYMINFO_FLG_LAZYLOAD": 0x0008 , # Symbol bound to object to be lazy
"SYMINFO_NONE": 0 , #
"SYMINFO_CURRENT": 1 , #
"SYMINFO_NUM": 2 , #
"STB_LOCAL": 0 , # Local symbol
"STB_GLOBAL": 1 , # Global symbol
"STB_WEAK": 2 , # Weak symbol
"STB_NUM": 3 , # Number of defined types.
"STB_LOOS": 10 , # Start of OS-specific
"STB_GNU_UNIQUE": 10 , # Unique symbol.
"STB_HIOS": 12 , # End of OS-specific
"STB_LOPROC": 13 , # Start of processor-specific
"STB_HIPROC": 15 , # End of processor-specific
"STT_NOTYPE": 0 , # Symbol type is unspecified
"STT_OBJECT": 1 , # Symbol is a data object
"STT_FUNC": 2 , # Symbol is a code object
"STT_SECTION": 3 , # Symbol associated with a section
"STT_FILE": 4 , # Symbol's name is file name
"STT_COMMON": 5 , # Symbol is a common data object
"STT_TLS": 6 , # Symbol is thread-local data object
"STT_NUM": 7 , # Number of defined types.
"STT_LOOS": 10 , # Start of OS-specific
"STT_GNU_IFUNC": 10 , # Symbol is indirect code object
"STT_HIOS": 12 , # End of OS-specific
"STT_LOPROC": 13 , # Start of processor-specific
"STT_HIPROC": 15 , # End of processor-specific
"STN_UNDEF": 0 , # End of a chain.
"STV_DEFAULT": 0 , # Default symbol visibility rules
"STV_INTERNAL": 1 , # Processor specific hidden class
"STV_HIDDEN": 2 , # Sym unavailable in other modules
"STV_PROTECTED": 3 , # Not preemptible, not exported
"PN_XNUM": 0xffff , #
"PT_NULL": 0 , # Program header table entry unused
"PT_LOAD": 1 , # Loadable program segment
"PT_DYNAMIC": 2 , # Dynamic linking information
"PT_INTERP": 3 , # Program interpreter
"PT_NOTE": 4 , # Auxiliary information
"PT_SHLIB": 5 , # Reserved
"PT_PHDR": 6 , # Entry for header table itself
"PT_TLS": 7 , # Thread-local storage segment
"PT_NUM": 8 , # Number of defined types
"PT_LOOS": 0x60000000 , # Start of OS-specific
"PT_GNU_EH_FRAME": 0x6474e550 , # GCC .eh_frame_hdr segment
"PT_GNU_STACK": 0x6474e551 , # Indicates stack executability
"PT_GNU_RELRO": 0x6474e552 , # Read-only after relocation
"PT_LOSUNW": 0x6ffffffa , #
"PT_SUNWBSS": 0x6ffffffa , # Sun Specific segment
"PT_SUNWSTACK": 0x6ffffffb , # Stack segment
"PT_HISUNW": 0x6fffffff , #
"PT_HIOS": 0x6fffffff , # End of OS-specific
"PT_LOPROC": 0x70000000 , # Start of processor-specific
"PT_HIPROC": 0x7fffffff , # End of processor-specific
"PF_X": (1 << 0) , # Segment is executable
"PF_W": (1 << 1) , # Segment is writable
"PF_R": (1 << 2) , # Segment is readable
"PF_MASKOS": 0x0ff00000 , # OS-specific
"PF_MASKPROC": 0xf0000000 , # Processor-specific
"NT_PRSTATUS": 1 , # Contains copy of prstatus struct
"NT_FPREGSET": 2 , # Contains copy of fpregset struct
"NT_PRPSINFO": 3 , # Contains copy of prpsinfo struct
"NT_PRXREG": 4 , # Contains copy of prxregset struct
"NT_TASKSTRUCT": 4 , # Contains copy of task structure
"NT_PLATFORM": 5 , # String from sysinfo(SI_PLATFORM)
"NT_AUXV": 6 , # Contains copy of auxv array
"NT_GWINDOWS": 7 , # Contains copy of gwindows struct
"NT_ASRS": 8 , # Contains copy of asrset struct
"NT_PSTATUS": 10 , # Contains copy of pstatus struct
"NT_PSINFO": 13 , # Contains copy of psinfo struct
"NT_PRCRED": 14 , # Contains copy of prcred struct
"NT_UTSNAME": 15 , # Contains copy of utsname struct
"NT_LWPSTATUS": 16 , # Contains copy of lwpstatus struct
"NT_LWPSINFO": 17 , # Contains copy of lwpinfo struct
"NT_PRFPXREG": 20 , # Contains copy of fprxregset struct
"NT_PRXFPREG": 0x46e62b7f , # Contains copy of user_fxsr_struct
"NT_PPC_VMX": 0x100 , # PowerPC Altivec/VMX registers
"NT_PPC_SPE": 0x101 , # PowerPC SPE/EVR registers
"NT_PPC_VSX": 0x102 , # PowerPC VSX registers
"NT_386_TLS": 0x200 , # i386 TLS slots (struct user_desc)
"NT_386_IOPERM": 0x201 , # x86 io permission bitmap (1=deny)
"NT_X86_XSTATE": 0x202 , # x86 extended state using xsave
"NT_VERSION": 1 , # Contains a version string.
"DT_NULL": 0 , # Marks end of dynamic section
"DT_NEEDED": 1 , # Name of needed library
"DT_PLTRELSZ": 2 , # Size in bytes of PLT relocs
"DT_PLTGOT": 3 , # Processor defined value
"DT_HASH": 4 , # Address of symbol hash table
"DT_STRTAB": 5 , # Address of string table
"DT_SYMTAB": 6 , # Address of symbol table
"DT_RELA": 7 , # Address of Rela relocs
"DT_RELASZ": 8 , # Total size of Rela relocs
"DT_RELAENT": 9 , # Size of one Rela reloc
"DT_STRSZ": 10 , # Size of string table
"DT_SYMENT": 11 , # Size of one symbol table entry
"DT_INIT": 12 , # Address of init function
"DT_FINI": 13 , # Address of termination function
"DT_SONAME": 14 , # Name of shared object
"DT_RPATH": 15 , # Library search path (deprecated)
"DT_SYMBOLIC": 16 , # Start symbol search here
"DT_REL": 17 , # Address of Rel relocs
"DT_RELSZ": 18 , # Total size of Rel relocs
"DT_RELENT": 19 , # Size of one Rel reloc
"DT_PLTREL": 20 , # Type of reloc in PLT
"DT_DEBUG": 21 , # For debugging; unspecified
"DT_TEXTREL": 22 , # Reloc might modify .text
"DT_JMPREL": 23 , # Address of PLT relocs
"DT_BIND_NOW": 24 , # Process relocations of object
"DT_INIT_ARRAY": 25 , # Array with addresses of init fct
"DT_FINI_ARRAY": 26 , # Array with addresses of fini fct
"DT_INIT_ARRAYSZ": 27 , # Size in bytes of DT_INIT_ARRAY
"DT_FINI_ARRAYSZ": 28 , # Size in bytes of DT_FINI_ARRAY
"DT_RUNPATH": 29 , # Library search path
"DT_FLAGS": 30 , # Flags for the object being loaded
"DT_ENCODING": 32 , # Start of encoded range
"DT_PREINIT_ARRAY": 32 , # Array with addresses of preinit fct
"DT_PREINIT_ARRAYSZ": 33 , # size in bytes of DT_PREINIT_ARRAY
"DT_NUM": 34 , # Number used
"DT_LOOS": 0x6000000d , # Start of OS-specific
"DT_HIOS": 0x6ffff000 , # End of OS-specific
"DT_LOPROC": 0x70000000 , # Start of processor-specific
"DT_HIPROC": 0x7fffffff , # End of processor-specific
"DT_PROCNUM": MacroRef("DT_MIPS_NUM") , # Most used by any processor
"DT_VALRNGLO": 0x6ffffd00 , #
"DT_GNU_PRELINKED": 0x6ffffdf5 , # Prelinking timestamp
"DT_GNU_CONFLICTSZ": 0x6ffffdf6 , # Size of conflict section
"DT_GNU_LIBLISTSZ": 0x6ffffdf7 , # Size of library list
"DT_CHECKSUM": 0x6ffffdf8 , #
"DT_PLTPADSZ": 0x6ffffdf9 , #
"DT_MOVEENT": 0x6ffffdfa , #
"DT_MOVESZ": 0x6ffffdfb , #
"DT_FEATURE_1": 0x6ffffdfc , # Feature selection (DTF_*).
"DT_POSFLAG_1": 0x6ffffdfd , # Flags for DT_* entries, effecting
"DT_SYMINSZ": 0x6ffffdfe , # Size of syminfo table (in bytes)
"DT_SYMINENT": 0x6ffffdff , # Entry size of syminfo
"DT_VALRNGHI": 0x6ffffdff , #
"DT_VALNUM": 12 , #
"DT_ADDRRNGLO": 0x6ffffe00 , #
"DT_GNU_HASH": 0x6ffffef5 , # GNU-style hash table.
"DT_TLSDESC_PLT": 0x6ffffef6 , #
"DT_TLSDESC_GOT": 0x6ffffef7 , #
"DT_GNU_CONFLICT": 0x6ffffef8 , # Start of conflict section
"DT_GNU_LIBLIST": 0x6ffffef9 , # Library list
"DT_CONFIG": 0x6ffffefa , # Configuration information.
"DT_DEPAUDIT": 0x6ffffefb , # Dependency auditing.
"DT_AUDIT": 0x6ffffefc , # Object auditing.
"DT_PLTPAD": 0x6ffffefd , # PLT padding.
"DT_MOVETAB": 0x6ffffefe , # Move table.
"DT_SYMINFO": 0x6ffffeff , # Syminfo table.
"DT_ADDRRNGHI": 0x6ffffeff , #
"DT_ADDRNUM": 11 , #
"DT_VERSYM": 0x6ffffff0 , #
"DT_RELACOUNT": 0x6ffffff9 , #
"DT_RELCOUNT": 0x6ffffffa , #
"DT_FLAGS_1": 0x6ffffffb , # State flags, see DF_1_* below.
"DT_VERDEF": 0x6ffffffc , # Address of version definition
"DT_VERDEFNUM": 0x6ffffffd , # Number of version definitions
"DT_VERNEED": 0x6ffffffe , # Address of table with needed
"DT_VERNEEDNUM": 0x6fffffff , # Number of needed versions
"DT_VERSIONTAGNUM": 16 , #
"DT_AUXILIARY": 0x7ffffffd , # Shared object to load before self
"DT_FILTER": 0x7fffffff , # Shared object to get values from
"DT_EXTRANUM": 3 , #
"DF_ORIGIN": 0x00000001 , # Object may use DF_ORIGIN
"DF_SYMBOLIC": 0x00000002 , # Symbol resolutions starts here
"DF_TEXTREL": 0x00000004 , # Object contains text relocations
"DF_BIND_NOW": 0x00000008 , # No lazy binding for this object
"DF_STATIC_TLS": 0x00000010 , # Module uses the static TLS model
"DF_1_NOW": 0x00000001 , # Set RTLD_NOW for this object.
"DF_1_GLOBAL": 0x00000002 , # Set RTLD_GLOBAL for this object.
"DF_1_GROUP": 0x00000004 , # Set RTLD_GROUP for this object.
"DF_1_NODELETE": 0x00000008 , # Set RTLD_NODELETE for this object.
"DF_1_LOADFLTR": 0x00000010 , # Trigger filtee loading at runtime.
"DF_1_INITFIRST": 0x00000020 , # Set RTLD_INITFIRST for this object
"DF_1_NOOPEN": 0x00000040 , # Set RTLD_NOOPEN for this object.
"DF_1_ORIGIN": 0x00000080 , # $ORIGIN must be handled.
"DF_1_DIRECT": 0x00000100 , # Direct binding enabled.
"DF_1_TRANS": 0x00000200 , #
"DF_1_INTERPOSE": 0x00000400 , # Object is used to interpose.
"DF_1_NODEFLIB": 0x00000800 , # Ignore default lib search path.
"DF_1_NODUMP": 0x00001000 , # Object can't be dldump'ed.
"DF_1_CONFALT": 0x00002000 , # Configuration alternative created.
"DF_1_ENDFILTEE": 0x00004000 , # Filtee terminates filters search.
"DF_1_DISPRELDNE": 0x00008000 , # Disp reloc applied at build time.
"DF_1_DISPRELPND": 0x00010000 , # Disp reloc applied at run-time.
"DTF_1_PARINIT": 0x00000001 , #
"DTF_1_CONFEXP": 0x00000002 , #
"DF_P1_LAZYLOAD": 0x00000001 , # Lazyload following object.
"DF_P1_GROUPPERM": 0x00000002 , # Symbols from next object are not
"VER_DEF_NONE": 0 , # No version
"VER_DEF_CURRENT": 1 , # Current version
"VER_DEF_NUM": 2 , # Given version number
"VER_FLG_BASE": 0x1 , # Version definition of file itself
"VER_FLG_WEAK": 0x2 , # Weak version identifier
"VER_NDX_LOCAL": 0 , # Symbol is local.
"VER_NDX_GLOBAL": 1 , # Symbol is global.
"VER_NDX_LORESERVE": 0xff00 , # Beginning of reserved entries.
"VER_NDX_ELIMINATE": 0xff01 , # Symbol is to be eliminated.
"VER_NEED_NONE": 0 , # No version
"VER_NEED_CURRENT": 1 , # Current version
"VER_NEED_NUM": 2 , # Given version number
"VER_FLG_WEAK": 0x2 , # Weak version identifier
"AT_NULL": 0 , # End of vector
"AT_IGNORE": 1 , # Entry should be ignored
"AT_EXECFD": 2 , # File descriptor of program
"AT_PHDR": 3 , # Program headers for program
"AT_PHENT": 4 , # Size of program header entry
"AT_PHNUM": 5 , # Number of program headers
"AT_PAGESZ": 6 , # System page size
"AT_BASE": 7 , # Base address of interpreter
"AT_FLAGS": 8 , # Flags
"AT_ENTRY": 9 , # Entry point of program
"AT_NOTELF": 10 , # Program is not ELF
"AT_UID": 11 , # Real uid
"AT_EUID": 12 , # Effective uid
"AT_GID": 13 , # Real gid
"AT_EGID": 14 , # Effective gid
"AT_CLKTCK": 17 , # Frequency of times()
"AT_PLATFORM": 15 , # String identifying platform.
"AT_HWCAP": 16 , # Machine dependent hints about
"AT_FPUCW": 18 , # Used FPU control word.
"AT_DCACHEBSIZE": 19 , # Data cache block size.
"AT_ICACHEBSIZE": 20 , # Instruction cache block size.
"AT_UCACHEBSIZE": 21 , # Unified cache block size.
"AT_IGNOREPPC": 22 , # Entry should be ignored.
"AT_SECURE": 23 , # Boolean, was exec setuid-like?
"AT_BASE_PLATFORM": 24 , # String identifying real platforms.
"AT_RANDOM": 25 , # Address of 16 random bytes.
"AT_EXECFN": 31 , # Filename of executable.
"AT_SYSINFO": 32 , #
"AT_SYSINFO_EHDR": 33 , #
"AT_L1I_CACHESHAPE": 34 , #
"AT_L1D_CACHESHAPE": 35 , #
"AT_L2_CACHESHAPE": 36 , #
"AT_L3_CACHESHAPE": 37 , #
"ELF_NOTE_SOLARIS": "SUNW Solaris" , #
"ELF_NOTE_GNU": "GNU" , #
"ELF_NOTE_PAGESIZE_HINT": 1 , #
"NT_GNU_ABI_TAG": 1 , #
"ELF_NOTE_ABI": MacroRef("NT_GNU_ABI_TAG") , # Old name.
"ELF_NOTE_OS_LINUX": 0 , #
"ELF_NOTE_OS_GNU": 1 , #
"ELF_NOTE_OS_SOLARIS2": 2 , #
"ELF_NOTE_OS_FREEBSD": 3 , #
"NT_GNU_HWCAP": 2 , #
"NT_GNU_BUILD_ID": 3 , #
"NT_GNU_GOLD_VERSION": 4 , #
"EF_CPU32": 0x00810000 , #
"R_68K_NONE": 0 , # No reloc
"R_68K_32": 1 , # Direct 32 bit
"R_68K_16": 2 , # Direct 16 bit
"R_68K_8": 3 , # Direct 8 bit
"R_68K_PC32": 4 , # PC relative 32 bit
"R_68K_PC16": 5 , # PC relative 16 bit
"R_68K_PC8": 6 , # PC relative 8 bit
"R_68K_GOT32": 7 , # 32 bit PC relative GOT entry
"R_68K_GOT16": 8 , # 16 bit PC relative GOT entry
"R_68K_GOT8": 9 , # 8 bit PC relative GOT entry
"R_68K_GOT32O": 10 , # 32 bit GOT offset
"R_68K_GOT16O": 11 , # 16 bit GOT offset
"R_68K_GOT8O": 12 , # 8 bit GOT offset
"R_68K_PLT32": 13 , # 32 bit PC relative PLT address
"R_68K_PLT16": 14 , # 16 bit PC relative PLT address
"R_68K_PLT8": 15 , # 8 bit PC relative PLT address
"R_68K_PLT32O": 16 , # 32 bit PLT offset
"R_68K_PLT16O": 17 , # 16 bit PLT offset
"R_68K_PLT8O": 18 , # 8 bit PLT offset
"R_68K_COPY": 19 , # Copy symbol at runtime
"R_68K_GLOB_DAT": 20 , # Create GOT entry
"R_68K_JMP_SLOT": 21 , # Create PLT entry
"R_68K_RELATIVE": 22 , # Adjust by program base
"R_68K_TLS_GD32": 25 , # 32 bit GOT offset for GD
"R_68K_TLS_GD16": 26 , # 16 bit GOT offset for GD
"R_68K_TLS_GD8": 27 , # 8 bit GOT offset for GD
"R_68K_TLS_LDM32": 28 , # 32 bit GOT offset for LDM
"R_68K_TLS_LDM16": 29 , # 16 bit GOT offset for LDM
"R_68K_TLS_LDM8": 30 , # 8 bit GOT offset for LDM
"R_68K_TLS_LDO32": 31 , # 32 bit module-relative offset
"R_68K_TLS_LDO16": 32 , # 16 bit module-relative offset
"R_68K_TLS_LDO8": 33 , # 8 bit module-relative offset
"R_68K_TLS_IE32": 34 , # 32 bit GOT offset for IE
"R_68K_TLS_IE16": 35 , # 16 bit GOT offset for IE
"R_68K_TLS_IE8": 36 , # 8 bit GOT offset for IE
"R_68K_TLS_LE32": 37 , # 32 bit offset relative to
"R_68K_TLS_LE16": 38 , # 16 bit offset relative to
"R_68K_TLS_LE8": 39 , # 8 bit offset relative to
"R_68K_TLS_DTPMOD32": 40 , # 32 bit module number
"R_68K_TLS_DTPREL32": 41 , # 32 bit module-relative offset
"R_68K_TLS_TPREL32": 42 , # 32 bit TP-relative offset
"R_68K_NUM": 43 , #
"R_386_NONE": 0 , # No reloc
"R_386_32": 1 , # Direct 32 bit
"R_386_PC32": 2 , # PC relative 32 bit
"R_386_GOT32": 3 , # 32 bit GOT entry
"R_386_PLT32": 4 , # 32 bit PLT address
"R_386_COPY": 5 , # Copy symbol at runtime
"R_386_GLOB_DAT": 6 , # Create GOT entry
"R_386_JMP_SLOT": 7 , # Create PLT entry
"R_386_RELATIVE": 8 , # Adjust by program base
"R_386_GOTOFF": 9 , # 32 bit offset to GOT
"R_386_GOTPC": 10 , # 32 bit PC relative offset to GOT
"R_386_32PLT": 11 , #
"R_386_TLS_TPOFF": 14 , # Offset in static TLS block
"R_386_TLS_IE": 15 , # Address of GOT entry for static TLS
"R_386_TLS_GOTIE": 16 , # GOT entry for static TLS block
"R_386_TLS_LE": 17 , # Offset relative to static TLS
"R_386_TLS_GD": 18 , # Direct 32 bit for GNU version of
"R_386_TLS_LDM": 19 , # Direct 32 bit for GNU version of
"R_386_16": 20 , #
"R_386_PC16": 21 , #
"R_386_8": 22 , #
"R_386_PC8": 23 , #
"R_386_TLS_GD_32": 24 , # Direct 32 bit for general dynamic
"R_386_TLS_GD_PUSH": 25 , # Tag for pushl in GD TLS code
"R_386_TLS_GD_CALL": 26 , # Relocation for call to
"R_386_TLS_GD_POP": 27 , # Tag for popl in GD TLS code
"R_386_TLS_LDM_32": 28 , # Direct 32 bit for local dynamic
"R_386_TLS_LDM_PUSH": 29 , # Tag for pushl in LDM TLS code
"R_386_TLS_LDM_CALL": 30 , # Relocation for call to
"R_386_TLS_LDM_POP": 31 , # Tag for popl in LDM TLS code
"R_386_TLS_LDO_32": 32 , # Offset relative to TLS block
"R_386_TLS_IE_32": 33 , # GOT entry for negated static TLS
"R_386_TLS_LE_32": 34 , # Negated offset relative to static
"R_386_TLS_DTPMOD32": 35 , # ID of module containing symbol
"R_386_TLS_DTPOFF32": 36 , # Offset in TLS block
"R_386_TLS_TPOFF32": 37 , # Negated offset in static TLS block
"R_386_TLS_GOTDESC": 39 , # GOT offset for TLS descriptor.
"R_386_TLS_DESC_CALL": 40 , # Marker of call through TLS
"R_386_TLS_DESC": 41 , # TLS descriptor containing
"R_386_IRELATIVE": 42 , # Adjust indirectly by program base
"R_386_NUM": 43 , #
"STT_SPARC_REGISTER": 13 , # Global register reserved to app.
"EF_SPARCV9_MM": 3 , #
"EF_SPARCV9_TSO": 0 , #
"EF_SPARCV9_PSO": 1 , #
"EF_SPARCV9_RMO": 2 , #
"EF_SPARC_LEDATA": 0x800000 , # little endian data
"EF_SPARC_EXT_MASK": 0xFFFF00 , #
"EF_SPARC_32PLUS": 0x000100 , # generic V8+ features
"EF_SPARC_SUN_US1": 0x000200 , # Sun UltraSPARC1 extensions
"EF_SPARC_HAL_R1": 0x000400 , # HAL R1 extensions
"EF_SPARC_SUN_US3": 0x000800 , # Sun UltraSPARCIII extensions
"R_SPARC_NONE": 0 , # No reloc
"R_SPARC_8": 1 , # Direct 8 bit
"R_SPARC_16": 2 , # Direct 16 bit
"R_SPARC_32": 3 , # Direct 32 bit
"R_SPARC_DISP8": 4 , # PC relative 8 bit
"R_SPARC_DISP16": 5 , # PC relative 16 bit
"R_SPARC_DISP32": 6 , # PC relative 32 bit
"R_SPARC_WDISP30": 7 , # PC relative 30 bit shifted
"R_SPARC_WDISP22": 8 , # PC relative 22 bit shifted
"R_SPARC_HI22": 9 , # High 22 bit
"R_SPARC_22": 10 , # Direct 22 bit
"R_SPARC_13": 11 , # Direct 13 bit
"R_SPARC_LO10": 12 , # Truncated 10 bit
"R_SPARC_GOT10": 13 , # Truncated 10 bit GOT entry
"R_SPARC_GOT13": 14 , # 13 bit GOT entry
"R_SPARC_GOT22": 15 , # 22 bit GOT entry shifted
"R_SPARC_PC10": 16 , # PC relative 10 bit truncated
"R_SPARC_PC22": 17 , # PC relative 22 bit shifted
"R_SPARC_WPLT30": 18 , # 30 bit PC relative PLT address
"R_SPARC_COPY": 19 , # Copy symbol at runtime
"R_SPARC_GLOB_DAT": 20 , # Create GOT entry
"R_SPARC_JMP_SLOT": 21 , # Create PLT entry
"R_SPARC_RELATIVE": 22 , # Adjust by program base
"R_SPARC_UA32": 23 , # Direct 32 bit unaligned
"R_SPARC_PLT32": 24 , # Direct 32 bit ref to PLT entry
"R_SPARC_HIPLT22": 25 , # High 22 bit PLT entry
"R_SPARC_LOPLT10": 26 , # Truncated 10 bit PLT entry
"R_SPARC_PCPLT32": 27 , # PC rel 32 bit ref to PLT entry
"R_SPARC_PCPLT22": 28 , # PC rel high 22 bit PLT entry
"R_SPARC_PCPLT10": 29 , # PC rel trunc 10 bit PLT entry
"R_SPARC_10": 30 , # Direct 10 bit
"R_SPARC_11": 31 , # Direct 11 bit
"R_SPARC_64": 32 , # Direct 64 bit
"R_SPARC_OLO10": 33 , # 10bit with secondary 13bit addend
"R_SPARC_HH22": 34 , # Top 22 bits of direct 64 bit
"R_SPARC_HM10": 35 , # High middle 10 bits of ...
"R_SPARC_LM22": 36 , # Low middle 22 bits of ...
"R_SPARC_PC_HH22": 37 , # Top 22 bits of pc rel 64 bit
"R_SPARC_PC_HM10": 38 , # High middle 10 bit of ...
"R_SPARC_PC_LM22": 39 , # Low miggle 22 bits of ...
"R_SPARC_WDISP16": 40 , # PC relative 16 bit shifted
"R_SPARC_WDISP19": 41 , # PC relative 19 bit shifted
"R_SPARC_GLOB_JMP": 42 , # was part of v9 ABI but was removed
"R_SPARC_7": 43 , # Direct 7 bit
"R_SPARC_5": 44 , # Direct 5 bit
"R_SPARC_6": 45 , # Direct 6 bit
"R_SPARC_DISP64": 46 , # PC relative 64 bit
"R_SPARC_PLT64": 47 , # Direct 64 bit ref to PLT entry
"R_SPARC_HIX22": 48 , # High 22 bit complemented
"R_SPARC_LOX10": 49 , # Truncated 11 bit complemented
"R_SPARC_H44": 50 , # Direct high 12 of 44 bit
"R_SPARC_M44": 51 , # Direct mid 22 of 44 bit
"R_SPARC_L44": 52 , # Direct low 10 of 44 bit
"R_SPARC_REGISTER": 53 , # Global register usage
"R_SPARC_UA64": 54 , # Direct 64 bit unaligned
"R_SPARC_UA16": 55 , # Direct 16 bit unaligned
"R_SPARC_TLS_GD_HI22": 56 , #
"R_SPARC_TLS_GD_LO10": 57 , #
"R_SPARC_TLS_GD_ADD": 58 , #
"R_SPARC_TLS_GD_CALL": 59 , #
"R_SPARC_TLS_LDM_HI22": 60 , #
"R_SPARC_TLS_LDM_LO10": 61 , #
"R_SPARC_TLS_LDM_ADD": 62 , #
"R_SPARC_TLS_LDM_CALL": 63 , #
"R_SPARC_TLS_LDO_HIX22": 64 , #
"R_SPARC_TLS_LDO_LOX10": 65 , #
"R_SPARC_TLS_LDO_ADD": 66 , #
"R_SPARC_TLS_IE_HI22": 67 , #
"R_SPARC_TLS_IE_LO10": 68 , #
"R_SPARC_TLS_IE_LD": 69 , #
"R_SPARC_TLS_IE_LDX": 70 , #
"R_SPARC_TLS_IE_ADD": 71 , #
"R_SPARC_TLS_LE_HIX22": 72 , #
"R_SPARC_TLS_LE_LOX10": 73 , #
"R_SPARC_TLS_DTPMOD32": 74 , #
"R_SPARC_TLS_DTPMOD64": 75 , #
"R_SPARC_TLS_DTPOFF32": 76 , #
"R_SPARC_TLS_DTPOFF64": 77 , #
"R_SPARC_TLS_TPOFF32": 78 , #
"R_SPARC_TLS_TPOFF64": 79 , #
"R_SPARC_GOTDATA_HIX22": 80 , #
"R_SPARC_GOTDATA_LOX10": 81 , #
"R_SPARC_GOTDATA_OP_HIX22": 82 , #
"R_SPARC_GOTDATA_OP_LOX10": 83 , #
"R_SPARC_GOTDATA_OP": 84 , #
"R_SPARC_H34": 85 , #
"R_SPARC_SIZE32": 86 , #
"R_SPARC_SIZE64": 87 , #
"R_SPARC_JMP_IREL": 248 , #
"R_SPARC_IRELATIVE": 249 , #
"R_SPARC_GNU_VTINHERIT": 250 , #
"R_SPARC_GNU_VTENTRY": 251 , #
"R_SPARC_REV32": 252 , #
"R_SPARC_NUM": 253 , #
"DT_SPARC_REGISTER": 0x70000001 , #
"DT_SPARC_NUM": 2 , #
"EF_MIPS_NOREORDER": 1 , # A .noreorder directive was used
"EF_MIPS_PIC": 2 , # Contains PIC code
"EF_MIPS_CPIC": 4 , # Uses PIC calling sequence
"EF_MIPS_XGOT": 8 , #
"EF_MIPS_64BIT_WHIRL": 16 , #
"EF_MIPS_ABI2": 32 , #
"EF_MIPS_ABI_ON32": 64 , #
"EF_MIPS_ARCH": 0xf0000000 , # MIPS architecture level
"EF_MIPS_ARCH_1": 0x00000000 , # -mips1 code.
"EF_MIPS_ARCH_2": 0x10000000 , # -mips2 code.
"EF_MIPS_ARCH_3": 0x20000000 , # -mips3 code.
"EF_MIPS_ARCH_4": 0x30000000 , # -mips4 code.
"EF_MIPS_ARCH_5": 0x40000000 , # -mips5 code.
"EF_MIPS_ARCH_32": 0x60000000 , # MIPS32 code.
"EF_MIPS_ARCH_64": 0x70000000 , # MIPS64 code.
"E_MIPS_ARCH_1": 0x00000000 , # -mips1 code.
"E_MIPS_ARCH_2": 0x10000000 , # -mips2 code.
"E_MIPS_ARCH_3": 0x20000000 , # -mips3 code.
"E_MIPS_ARCH_4": 0x30000000 , # -mips4 code.
"E_MIPS_ARCH_5": 0x40000000 , # -mips5 code.
"E_MIPS_ARCH_32": 0x60000000 , # MIPS32 code.
"E_MIPS_ARCH_64": 0x70000000 , # MIPS64 code.
"SHN_MIPS_ACOMMON": 0xff00 , # Allocated common symbols
"SHN_MIPS_TEXT": 0xff01 , # Allocated test symbols.
"SHN_MIPS_DATA": 0xff02 , # Allocated data symbols.
"SHN_MIPS_SCOMMON": 0xff03 , # Small common symbols
"SHN_MIPS_SUNDEFINED": 0xff04 , # Small undefined symbols
"SHT_MIPS_LIBLIST": 0x70000000 , # Shared objects used in link
"SHT_MIPS_MSYM": 0x70000001 , #
"SHT_MIPS_CONFLICT": 0x70000002 , # Conflicting symbols
"SHT_MIPS_GPTAB": 0x70000003 , # Global data area sizes
"SHT_MIPS_UCODE": 0x70000004 , # Reserved for SGI/MIPS compilers
"SHT_MIPS_DEBUG": 0x70000005 , # MIPS ECOFF debugging information
"SHT_MIPS_REGINFO": 0x70000006 , # Register usage information
"SHT_MIPS_PACKAGE": 0x70000007 , #
"SHT_MIPS_PACKSYM": 0x70000008 , #
"SHT_MIPS_RELD": 0x70000009 , #
"SHT_MIPS_IFACE": 0x7000000b , #
"SHT_MIPS_CONTENT": 0x7000000c , #
"SHT_MIPS_OPTIONS": 0x7000000d , # Miscellaneous options.
"SHT_MIPS_SHDR": 0x70000010 , #
"SHT_MIPS_FDESC": 0x70000011 , #
"SHT_MIPS_EXTSYM": 0x70000012 , #
"SHT_MIPS_DENSE": 0x70000013 , #
"SHT_MIPS_PDESC": 0x70000014 , #
"SHT_MIPS_LOCSYM": 0x70000015 , #
"SHT_MIPS_AUXSYM": 0x70000016 , #
"SHT_MIPS_OPTSYM": 0x70000017 , #
"SHT_MIPS_LOCSTR": 0x70000018 , #
"SHT_MIPS_LINE": 0x70000019 , #
"SHT_MIPS_RFDESC": 0x7000001a , #
"SHT_MIPS_DELTASYM": 0x7000001b , #
"SHT_MIPS_DELTAINST": 0x7000001c , #
"SHT_MIPS_DELTACLASS": 0x7000001d , #
"SHT_MIPS_DWARF": 0x7000001e , # DWARF debugging information.
"SHT_MIPS_DELTADECL": 0x7000001f , #
"SHT_MIPS_SYMBOL_LIB": 0x70000020 , #
"SHT_MIPS_EVENTS": 0x70000021 , # Event section.
"SHT_MIPS_TRANSLATE": 0x70000022 , #
"SHT_MIPS_PIXIE": 0x70000023 , #
"SHT_MIPS_XLATE": 0x70000024 , #
"SHT_MIPS_XLATE_DEBUG": 0x70000025 , #
"SHT_MIPS_WHIRL": 0x70000026 , #
"SHT_MIPS_EH_REGION": 0x70000027 , #
"SHT_MIPS_XLATE_OLD": 0x70000028 , #
"SHT_MIPS_PDR_EXCEPTION": 0x70000029 , #
"SHF_MIPS_GPREL": 0x10000000 , # Must be part of global data area
"SHF_MIPS_MERGE": 0x20000000 , #
"SHF_MIPS_ADDR": 0x40000000 , #
"SHF_MIPS_STRINGS": 0x80000000 , #
"SHF_MIPS_NOSTRIP": 0x08000000 , #
"SHF_MIPS_LOCAL": 0x04000000 , #
"SHF_MIPS_NAMES": 0x02000000 , #
"SHF_MIPS_NODUPE": 0x01000000 , #
"STO_MIPS_DEFAULT": 0x0 , #
"STO_MIPS_INTERNAL": 0x1 , #
"STO_MIPS_HIDDEN": 0x2 , #
"STO_MIPS_PROTECTED": 0x3 , #
"STO_MIPS_PLT": 0x8 , #
"STO_MIPS_SC_ALIGN_UNUSED": 0xff , #
"STB_MIPS_SPLIT_COMMON": 13 , #
"ODK_NULL": 0 , # Undefined.
"ODK_REGINFO": 1 , # Register usage information.
"ODK_EXCEPTIONS": 2 , # Exception processing options.
"ODK_PAD": 3 , # Section padding options.
"ODK_HWPATCH": 4 , # Hardware workarounds performed
"ODK_FILL": 5 , # record the fill value used by the linker.
"ODK_TAGS": 6 , # reserve space for desktop tools to write.
"ODK_HWAND": 7 , # HW workarounds. 'AND' bits when merging.
"ODK_HWOR": 8 , # HW workarounds. 'OR' bits when merging.
"OEX_FPU_MIN": 0x1f , # FPE's which MUST be enabled.
"OEX_FPU_MAX": 0x1f00 , # FPE's which MAY be enabled.
"OEX_PAGE0": 0x10000 , # page zero must be mapped.
"OEX_SMM": 0x20000 , # Force sequential memory mode?
"OEX_FPDBUG": 0x40000 , # Force floating point debug mode?
"OEX_PRECISEFP": MacroRef("OEX_FPDBUG") , #
"OEX_DISMISS": 0x80000 , # Dismiss invalid address faults?
"OEX_FPU_INVAL": 0x10 , #
"OEX_FPU_DIV0": 0x08 , #
"OEX_FPU_OFLO": 0x04 , #
"OEX_FPU_UFLO": 0x02 , #
"OEX_FPU_INEX": 0x01 , #
"OHW_R4KEOP": 0x1 , # R4000 end-of-page patch.
"OHW_R8KPFETCH": 0x2 , # may need R8000 prefetch patch.
"OHW_R5KEOP": 0x4 , # R5000 end-of-page patch.
"OHW_R5KCVTL": 0x8 , # R5000 cvt.[ds].l bug. clean=1.
"OPAD_PREFIX": 0x1 , #
"OPAD_POSTFIX": 0x2 , #
"OPAD_SYMBOL": 0x4 , #
"OHWA0_R4KEOP_CHECKED": 0x00000001 , #
"OHWA1_R4KEOP_CLEAN": 0x00000002 , #
"R_MIPS_NONE": 0 , # No reloc
"R_MIPS_16": 1 , # Direct 16 bit
"R_MIPS_32": 2 , # Direct 32 bit
"R_MIPS_REL32": 3 , # PC relative 32 bit
"R_MIPS_26": 4 , # Direct 26 bit shifted
"R_MIPS_HI16": 5 , # High 16 bit
"R_MIPS_LO16": 6 , # Low 16 bit
"R_MIPS_GPREL16": 7 , # GP relative 16 bit
"R_MIPS_LITERAL": 8 , # 16 bit literal entry
"R_MIPS_GOT16": 9 , # 16 bit GOT entry
"R_MIPS_PC16": 10 , # PC relative 16 bit
"R_MIPS_CALL16": 11 , # 16 bit GOT entry for function
"R_MIPS_GPREL32": 12 , # GP relative 32 bit
"R_MIPS_SHIFT5": 16 , #
"R_MIPS_SHIFT6": 17 , #
"R_MIPS_64": 18 , #
"R_MIPS_GOT_DISP": 19 , #
"R_MIPS_GOT_PAGE": 20 , #
"R_MIPS_GOT_OFST": 21 , #
"R_MIPS_GOT_HI16": 22 , #
"R_MIPS_GOT_LO16": 23 , #
"R_MIPS_SUB": 24 , #
"R_MIPS_INSERT_A": 25 , #
"R_MIPS_INSERT_B": 26 , #
"R_MIPS_DELETE": 27 , #
"R_MIPS_HIGHER": 28 , #
"R_MIPS_HIGHEST": 29 , #
"R_MIPS_CALL_HI16": 30 , #
"R_MIPS_CALL_LO16": 31 , #
"R_MIPS_SCN_DISP": 32 , #
"R_MIPS_REL16": 33 , #
"R_MIPS_ADD_IMMEDIATE": 34 , #
"R_MIPS_PJUMP": 35 , #
"R_MIPS_RELGOT": 36 , #
"R_MIPS_JALR": 37 , #
"R_MIPS_TLS_DTPMOD32": 38 , # Module number 32 bit
"R_MIPS_TLS_DTPREL32": 39 , # Module-relative offset 32 bit
"R_MIPS_TLS_DTPMOD64": 40 , # Module number 64 bit
"R_MIPS_TLS_DTPREL64": 41 , # Module-relative offset 64 bit
"R_MIPS_TLS_GD": 42 , # 16 bit GOT offset for GD
"R_MIPS_TLS_LDM": 43 , # 16 bit GOT offset for LDM
"R_MIPS_TLS_DTPREL_HI16": 44 , # Module-relative offset, high 16 bits
"R_MIPS_TLS_DTPREL_LO16": 45 , # Module-relative offset, low 16 bits
"R_MIPS_TLS_GOTTPREL": 46 , # 16 bit GOT offset for IE
"R_MIPS_TLS_TPREL32": 47 , # TP-relative offset, 32 bit
"R_MIPS_TLS_TPREL64": 48 , # TP-relative offset, 64 bit
"R_MIPS_TLS_TPREL_HI16": 49 , # TP-relative offset, high 16 bits
"R_MIPS_TLS_TPREL_LO16": 50 , # TP-relative offset, low 16 bits
"R_MIPS_GLOB_DAT": 51 , #
"R_MIPS_COPY": 126 , #
"R_MIPS_JUMP_SLOT": 127 , #
"R_MIPS_NUM": 128 , #
"PT_MIPS_REGINFO": 0x70000000 , # Register usage information
"PT_MIPS_RTPROC": 0x70000001 , # Runtime procedure table.
"PT_MIPS_OPTIONS": 0x70000002 , #
"PF_MIPS_LOCAL": 0x10000000 , #
"DT_MIPS_RLD_VERSION": 0x70000001 , # Runtime linker interface version
"DT_MIPS_TIME_STAMP": 0x70000002 , # Timestamp
"DT_MIPS_ICHECKSUM": 0x70000003 , # Checksum
"DT_MIPS_IVERSION": 0x70000004 , # Version string (string tbl index)
"DT_MIPS_FLAGS": 0x70000005 , # Flags
"DT_MIPS_BASE_ADDRESS": 0x70000006 , # Base address
"DT_MIPS_MSYM": 0x70000007 , #
"DT_MIPS_CONFLICT": 0x70000008 , # Address of CONFLICT section
"DT_MIPS_LIBLIST": 0x70000009 , # Address of LIBLIST section
"DT_MIPS_LOCAL_GOTNO": 0x7000000a , # Number of local GOT entries
"DT_MIPS_CONFLICTNO": 0x7000000b , # Number of CONFLICT entries
"DT_MIPS_LIBLISTNO": 0x70000010 , # Number of LIBLIST entries
"DT_MIPS_SYMTABNO": 0x70000011 , # Number of DYNSYM entries
"DT_MIPS_UNREFEXTNO": 0x70000012 , # First external DYNSYM
"DT_MIPS_GOTSYM": 0x70000013 , # First GOT entry in DYNSYM
"DT_MIPS_HIPAGENO": 0x70000014 , # Number of GOT page table entries
"DT_MIPS_RLD_MAP": 0x70000016 , # Address of run time loader map.
"DT_MIPS_DELTA_CLASS": 0x70000017 , # Delta C++ class definition.
"DT_MIPS_DELTA_CLASS_NO": 0x70000018 , # Number of entries in
"DT_MIPS_DELTA_INSTANCE": 0x70000019 , # Delta C++ class instances.
"DT_MIPS_DELTA_INSTANCE_NO": 0x7000001a , # Number of entries in
"DT_MIPS_DELTA_RELOC": 0x7000001b , # Delta relocations.
"DT_MIPS_DELTA_RELOC_NO": 0x7000001c , # Number of entries in
"DT_MIPS_DELTA_SYM": 0x7000001d , # Delta symbols that Delta
"DT_MIPS_DELTA_SYM_NO": 0x7000001e , # Number of entries in
"DT_MIPS_DELTA_CLASSSYM": 0x70000020 , # Delta symbols that hold the
"DT_MIPS_DELTA_CLASSSYM_NO": 0x70000021 , # Number of entries in
"DT_MIPS_CXX_FLAGS": 0x70000022 , # Flags indicating for C++ flavor.
"DT_MIPS_PIXIE_INIT": 0x70000023 , #
"DT_MIPS_SYMBOL_LIB": 0x70000024 , #
"DT_MIPS_LOCALPAGE_GOTIDX": 0x70000025 , #
"DT_MIPS_LOCAL_GOTIDX": 0x70000026 , #
"DT_MIPS_HIDDEN_GOTIDX": 0x70000027 , #
"DT_MIPS_PROTECTED_GOTIDX": 0x70000028 , #
"DT_MIPS_OPTIONS": 0x70000029 , # Address of .options.
"DT_MIPS_INTERFACE": 0x7000002a , # Address of .interface.
"DT_MIPS_DYNSTR_ALIGN": 0x7000002b , #
"DT_MIPS_INTERFACE_SIZE": 0x7000002c , # Size of the .interface section.
"DT_MIPS_RLD_TEXT_RESOLVE_ADDR": 0x7000002d , # Address of rld_text_rsolve
"DT_MIPS_PERF_SUFFIX": 0x7000002e , # Default suffix of dso to be added
"DT_MIPS_COMPACT_SIZE": 0x7000002f , # (O32)Size of compact rel section.
"DT_MIPS_GP_VALUE": 0x70000030 , # GP value for aux GOTs.
"DT_MIPS_AUX_DYNAMIC": 0x70000031 , # Address of aux .dynamic.
"DT_MIPS_PLTGOT": 0x70000032 , #
"DT_MIPS_RWPLT": 0x70000034 , #
"DT_MIPS_NUM": 0x35 , #
"RHF_NONE": 0 , # No flags
"RHF_QUICKSTART": (1 << 0) , # Use quickstart
"RHF_NOTPOT": (1 << 1) , # Hash size not power of 2
"RHF_NO_LIBRARY_REPLACEMENT": (1 << 2) , # Ignore LD_LIBRARY_PATH
"RHF_NO_MOVE": (1 << 3) , #
"RHF_SGI_ONLY": (1 << 4) , #
"RHF_GUARANTEE_INIT": (1 << 5) , #
"RHF_DELTA_C_PLUS_PLUS": (1 << 6) , #
"RHF_GUARANTEE_START_INIT": (1 << 7) , #
"RHF_PIXIE": (1 << 8) , #
"RHF_DEFAULT_DELAY_LOAD": (1 << 9) , #
"RHF_REQUICKSTART": (1 << 10) , #
"RHF_REQUICKSTARTED": (1 << 11) , #
"RHF_CORD": (1 << 12) , #
"RHF_NO_UNRES_UNDEF": (1 << 13) , #
"RHF_RLD_ORDER_SAFE": (1 << 14) , #
"LL_NONE": 0 , #
"LL_EXACT_MATCH": (1 << 0) , # Require exact match
"LL_IGNORE_INT_VER": (1 << 1) , # Ignore interface version
"LL_REQUIRE_MINOR": (1 << 2) , #
"LL_EXPORTS": (1 << 3) , #
"LL_DELAY_LOAD": (1 << 4) , #
"LL_DELTA": (1 << 5) , #
"EF_PARISC_TRAPNIL": 0x00010000 , # Trap nil pointer dereference.
"EF_PARISC_EXT": 0x00020000 , # Program uses arch. extensions.
"EF_PARISC_LSB": 0x00040000 , # Program expects little endian.
"EF_PARISC_WIDE": 0x00080000 , # Program expects wide mode.
"EF_PARISC_NO_KABP": 0x00100000 , # No kernel assisted branch
"EF_PARISC_LAZYSWAP": 0x00400000 , # Allow lazy swapping.
"EF_PARISC_ARCH": 0x0000ffff , # Architecture version.
"EFA_PARISC_1_0": 0x020b , # PA-RISC 1.0 big-endian.
"EFA_PARISC_1_1": 0x0210 , # PA-RISC 1.1 big-endian.
"EFA_PARISC_2_0": 0x0214 , # PA-RISC 2.0 big-endian.
"SHN_PARISC_ANSI_COMMON": 0xff00 , # Section for tenatively declared
"SHN_PARISC_HUGE_COMMON": 0xff01 , # Common blocks in huge model.
"SHT_PARISC_EXT": 0x70000000 , # Contains product specific ext.
"SHT_PARISC_UNWIND": 0x70000001 , # Unwind information.
"SHT_PARISC_DOC": 0x70000002 , # Debug info for optimized code.
"SHF_PARISC_SHORT": 0x20000000 , # Section with short addressing.
"SHF_PARISC_HUGE": 0x40000000 , # Section far from gp.
"SHF_PARISC_SBP": 0x80000000 , # Static branch prediction code.
"STT_PARISC_MILLICODE": 13 , # Millicode function entry point.
"STT_HP_OPAQUE": (MacroRef("STT_LOOS") + 0x1) , #
"STT_HP_STUB": (MacroRef("STT_LOOS") + 0x2) , #
"R_PARISC_NONE": 0 , # No reloc.
"R_PARISC_DIR32": 1 , # Direct 32-bit reference.
"R_PARISC_DIR21L": 2 , # Left 21 bits of eff. address.
"R_PARISC_DIR17R": 3 , # Right 17 bits of eff. address.
"R_PARISC_DIR17F": 4 , # 17 bits of eff. address.
"R_PARISC_DIR14R": 6 , # Right 14 bits of eff. address.
"R_PARISC_PCREL32": 9 , # 32-bit rel. address.
"R_PARISC_PCREL21L": 10 , # Left 21 bits of rel. address.
"R_PARISC_PCREL17R": 11 , # Right 17 bits of rel. address.
"R_PARISC_PCREL17F": 12 , # 17 bits of rel. address.
"R_PARISC_PCREL14R": 14 , # Right 14 bits of rel. address.
"R_PARISC_DPREL21L": 18 , # Left 21 bits of rel. address.
"R_PARISC_DPREL14R": 22 , # Right 14 bits of rel. address.
"R_PARISC_GPREL21L": 26 , # GP-relative, left 21 bits.
"R_PARISC_GPREL14R": 30 , # GP-relative, right 14 bits.
"R_PARISC_LTOFF21L": 34 , # LT-relative, left 21 bits.
"R_PARISC_LTOFF14R": 38 , # LT-relative, right 14 bits.
"R_PARISC_SECREL32": 41 , # 32 bits section rel. address.
"R_PARISC_SEGBASE": 48 , # No relocation, set segment base.
"R_PARISC_SEGREL32": 49 , # 32 bits segment rel. address.
"R_PARISC_PLTOFF21L": 50 , # PLT rel. address, left 21 bits.
"R_PARISC_PLTOFF14R": 54 , # PLT rel. address, right 14 bits.
"R_PARISC_LTOFF_FPTR32": 57 , # 32 bits LT-rel. function pointer.
"R_PARISC_LTOFF_FPTR21L": 58 , # LT-rel. fct ptr, left 21 bits.
"R_PARISC_LTOFF_FPTR14R": 62 , # LT-rel. fct ptr, right 14 bits.
"R_PARISC_FPTR64": 64 , # 64 bits function address.
"R_PARISC_PLABEL32": 65 , # 32 bits function address.
"R_PARISC_PLABEL21L": 66 , # Left 21 bits of fdesc address.
"R_PARISC_PLABEL14R": 70 , # Right 14 bits of fdesc address.
"R_PARISC_PCREL64": 72 , # 64 bits PC-rel. address.
"R_PARISC_PCREL22F": 74 , # 22 bits PC-rel. address.
"R_PARISC_PCREL14WR": 75 , # PC-rel. address, right 14 bits.
"R_PARISC_PCREL14DR": 76 , # PC rel. address, right 14 bits.
"R_PARISC_PCREL16F": 77 , # 16 bits PC-rel. address.
"R_PARISC_PCREL16WF": 78 , # 16 bits PC-rel. address.
"R_PARISC_PCREL16DF": 79 , # 16 bits PC-rel. address.
"R_PARISC_DIR64": 80 , # 64 bits of eff. address.
"R_PARISC_DIR14WR": 83 , # 14 bits of eff. address.
"R_PARISC_DIR14DR": 84 , # 14 bits of eff. address.
"R_PARISC_DIR16F": 85 , # 16 bits of eff. address.
"R_PARISC_DIR16WF": 86 , # 16 bits of eff. address.
"R_PARISC_DIR16DF": 87 , # 16 bits of eff. address.
"R_PARISC_GPREL64": 88 , # 64 bits of GP-rel. address.
"R_PARISC_GPREL14WR": 91 , # GP-rel. address, right 14 bits.
"R_PARISC_GPREL14DR": 92 , # GP-rel. address, right 14 bits.
"R_PARISC_GPREL16F": 93 , # 16 bits GP-rel. address.
"R_PARISC_GPREL16WF": 94 , # 16 bits GP-rel. address.
"R_PARISC_GPREL16DF": 95 , # 16 bits GP-rel. address.
"R_PARISC_LTOFF64": 96 , # 64 bits LT-rel. address.
"R_PARISC_LTOFF14WR": 99 , # LT-rel. address, right 14 bits.
"R_PARISC_LTOFF14DR": 100 , # LT-rel. address, right 14 bits.
"R_PARISC_LTOFF16F": 101 , # 16 bits LT-rel. address.
"R_PARISC_LTOFF16WF": 102 , # 16 bits LT-rel. address.
"R_PARISC_LTOFF16DF": 103 , # 16 bits LT-rel. address.
"R_PARISC_SECREL64": 104 , # 64 bits section rel. address.
"R_PARISC_SEGREL64": 112 , # 64 bits segment rel. address.
"R_PARISC_PLTOFF14WR": 115 , # PLT-rel. address, right 14 bits.
"R_PARISC_PLTOFF14DR": 116 , # PLT-rel. address, right 14 bits.
"R_PARISC_PLTOFF16F": 117 , # 16 bits LT-rel. address.
"R_PARISC_PLTOFF16WF": 118 , # 16 bits PLT-rel. address.
"R_PARISC_PLTOFF16DF": 119 , # 16 bits PLT-rel. address.
"R_PARISC_LTOFF_FPTR64": 120 , # 64 bits LT-rel. function ptr.
"R_PARISC_LTOFF_FPTR14WR": 123 , # LT-rel. fct. ptr., right 14 bits.
"R_PARISC_LTOFF_FPTR14DR": 124 , # LT-rel. fct. ptr., right 14 bits.
"R_PARISC_LTOFF_FPTR16F": 125 , # 16 bits LT-rel. function ptr.
"R_PARISC_LTOFF_FPTR16WF": 126 , # 16 bits LT-rel. function ptr.
"R_PARISC_LTOFF_FPTR16DF": 127 , # 16 bits LT-rel. function ptr.
"R_PARISC_LORESERVE": 128 , #
"R_PARISC_COPY": 128 , # Copy relocation.
"R_PARISC_IPLT": 129 , # Dynamic reloc, imported PLT
"R_PARISC_EPLT": 130 , # Dynamic reloc, exported PLT
"R_PARISC_TPREL32": 153 , # 32 bits TP-rel. address.
"R_PARISC_TPREL21L": 154 , # TP-rel. address, left 21 bits.
"R_PARISC_TPREL14R": 158 , # TP-rel. address, right 14 bits.
"R_PARISC_LTOFF_TP21L": 162 , # LT-TP-rel. address, left 21 bits.
"R_PARISC_LTOFF_TP14R": 166 , # LT-TP-rel. address, right 14 bits.
"R_PARISC_LTOFF_TP14F": 167 , # 14 bits LT-TP-rel. address.
"R_PARISC_TPREL64": 216 , # 64 bits TP-rel. address.
"R_PARISC_TPREL14WR": 219 , # TP-rel. address, right 14 bits.
"R_PARISC_TPREL14DR": 220 , # TP-rel. address, right 14 bits.
"R_PARISC_TPREL16F": 221 , # 16 bits TP-rel. address.
"R_PARISC_TPREL16WF": 222 , # 16 bits TP-rel. address.
"R_PARISC_TPREL16DF": 223 , # 16 bits TP-rel. address.
"R_PARISC_LTOFF_TP64": 224 , # 64 bits LT-TP-rel. address.
"R_PARISC_LTOFF_TP14WR": 227 , # LT-TP-rel. address, right 14 bits.
"R_PARISC_LTOFF_TP14DR": 228 , # LT-TP-rel. address, right 14 bits.
"R_PARISC_LTOFF_TP16F": 229 , # 16 bits LT-TP-rel. address.
"R_PARISC_LTOFF_TP16WF": 230 , # 16 bits LT-TP-rel. address.
"R_PARISC_LTOFF_TP16DF": 231 , # 16 bits LT-TP-rel. address.
"R_PARISC_GNU_VTENTRY": 232 , #
"R_PARISC_GNU_VTINHERIT": 233 , #
"R_PARISC_TLS_GD21L": 234 , # GD 21-bit left.
"R_PARISC_TLS_GD14R": 235 , # GD 14-bit right.
"R_PARISC_TLS_GDCALL": 236 , # GD call to __t_g_a.
"R_PARISC_TLS_LDM21L": 237 , # LD module 21-bit left.
"R_PARISC_TLS_LDM14R": 238 , # LD module 14-bit right.
"R_PARISC_TLS_LDMCALL": 239 , # LD module call to __t_g_a.
"R_PARISC_TLS_LDO21L": 240 , # LD offset 21-bit left.
"R_PARISC_TLS_LDO14R": 241 , # LD offset 14-bit right.
"R_PARISC_TLS_DTPMOD32": 242 , # DTP module 32-bit.
"R_PARISC_TLS_DTPMOD64": 243 , # DTP module 64-bit.
"R_PARISC_TLS_DTPOFF32": 244 , # DTP offset 32-bit.
"R_PARISC_TLS_DTPOFF64": 245 , # DTP offset 32-bit.
"R_PARISC_TLS_LE21L": MacroRef("R_PARISC_TPREL21L"), #
"R_PARISC_TLS_LE14R": MacroRef("R_PARISC_TPREL14R"), #
"R_PARISC_TLS_IE21L": MacroRef("R_PARISC_LTOFF_TP21L") , #
"R_PARISC_TLS_IE14R": MacroRef("R_PARISC_LTOFF_TP14R") , #
"R_PARISC_TLS_TPREL32": MacroRef("R_PARISC_TPREL32") , #
"R_PARISC_TLS_TPREL64": MacroRef("R_PARISC_TPREL64") , #
"R_PARISC_HIRESERVE": 255 , #
"PT_HP_TLS": (MacroRef("PT_LOOS") + 0x0) , #
"PT_HP_CORE_NONE": (MacroRef("PT_LOOS") + 0x1) , #
"PT_HP_CORE_VERSION": (MacroRef("PT_LOOS") + 0x2) , #
"PT_HP_CORE_KERNEL": (MacroRef("PT_LOOS") + 0x3) , #
"PT_HP_CORE_COMM": (MacroRef("PT_LOOS") + 0x4) , #
"PT_HP_CORE_PROC": (MacroRef("PT_LOOS") + 0x5) , #
"PT_HP_CORE_LOADABLE": (MacroRef("PT_LOOS") + 0x6) , #
"PT_HP_CORE_STACK": (MacroRef("PT_LOOS") + 0x7) , #
"PT_HP_CORE_SHM": (MacroRef("PT_LOOS") + 0x8) , #
"PT_HP_CORE_MMF": (MacroRef("PT_LOOS") + 0x9) , #
"PT_HP_PARALLEL": (MacroRef("PT_LOOS") + 0x10) , #
"PT_HP_FASTBIND": (MacroRef("PT_LOOS") + 0x11) , #
"PT_HP_OPT_ANNOT": (MacroRef("PT_LOOS") + 0x12) , #
"PT_HP_HSL_ANNOT": (MacroRef("PT_LOOS") + 0x13) , #
"PT_HP_STACK": (MacroRef("PT_LOOS") + 0x14) , #
"PT_PARISC_ARCHEXT": 0x70000000 , #
"PT_PARISC_UNWIND": 0x70000001 , #
"PF_PARISC_SBP": 0x08000000 , #
"PF_HP_PAGE_SIZE": 0x00100000 , #
"PF_HP_FAR_SHARED": 0x00200000 , #
"PF_HP_NEAR_SHARED": 0x00400000 , #
"PF_HP_CODE": 0x01000000 , #
"PF_HP_MODIFY": 0x02000000 , #
"PF_HP_LAZYSWAP": 0x04000000 , #
"PF_HP_SBP": 0x08000000 , #
"EF_ALPHA_32BIT": 1 , # All addresses must be < 2GB.
"EF_ALPHA_CANRELAX": 2 , # Relocations for relaxing exist.
"SHT_ALPHA_DEBUG": 0x70000001 , #
"SHT_ALPHA_REGINFO": 0x70000002 , #
"SHF_ALPHA_GPREL": 0x10000000 , #
"STO_ALPHA_NOPV": 0x80 , # No PV required.
"STO_ALPHA_STD_GPLOAD": 0x88 , # PV only used for initial ldgp.
"R_ALPHA_NONE": 0 , # No reloc
"R_ALPHA_REFLONG": 1 , # Direct 32 bit
"R_ALPHA_REFQUAD": 2 , # Direct 64 bit
"R_ALPHA_GPREL32": 3 , # GP relative 32 bit
"R_ALPHA_LITERAL": 4 , # GP relative 16 bit w/optimization
"R_ALPHA_LITUSE": 5 , # Optimization hint for LITERAL
"R_ALPHA_GPDISP": 6 , # Add displacement to GP
"R_ALPHA_BRADDR": 7 , # PC+4 relative 23 bit shifted
"R_ALPHA_HINT": 8 , # PC+4 relative 16 bit shifted
"R_ALPHA_SREL16": 9 , # PC relative 16 bit
"R_ALPHA_SREL32": 10 , # PC relative 32 bit
"R_ALPHA_SREL64": 11 , # PC relative 64 bit
"R_ALPHA_GPRELHIGH": 17 , # GP relative 32 bit, high 16 bits
"R_ALPHA_GPRELLOW": 18 , # GP relative 32 bit, low 16 bits
"R_ALPHA_GPREL16": 19 , # GP relative 16 bit
"R_ALPHA_COPY": 24 , # Copy symbol at runtime
"R_ALPHA_GLOB_DAT": 25 , # Create GOT entry
"R_ALPHA_JMP_SLOT": 26 , # Create PLT entry
"R_ALPHA_RELATIVE": 27 , # Adjust by program base
"R_ALPHA_TLS_GD_HI": 28 , #
"R_ALPHA_TLSGD": 29 , #
"R_ALPHA_TLS_LDM": 30 , #
"R_ALPHA_DTPMOD64": 31 , #
"R_ALPHA_GOTDTPREL": 32 , #
"R_ALPHA_DTPREL64": 33 , #
"R_ALPHA_DTPRELHI": 34 , #
"R_ALPHA_DTPRELLO": 35 , #
"R_ALPHA_DTPREL16": 36 , #
"R_ALPHA_GOTTPREL": 37 , #
"R_ALPHA_TPREL64": 38 , #
"R_ALPHA_TPRELHI": 39 , #
"R_ALPHA_TPRELLO": 40 , #
"R_ALPHA_TPREL16": 41 , #
"R_ALPHA_NUM": 46 , #
"LITUSE_ALPHA_ADDR": 0 , #
"LITUSE_ALPHA_BASE": 1 , #
"LITUSE_ALPHA_BYTOFF": 2 , #
"LITUSE_ALPHA_JSR": 3 , #
"LITUSE_ALPHA_TLS_GD": 4 , #
"LITUSE_ALPHA_TLS_LDM": 5 , #
"DT_ALPHA_PLTRO": (MacroRef("DT_LOPROC") + 0) , #
"DT_ALPHA_NUM": 1 , #
"EF_PPC_EMB": 0x80000000 , # PowerPC embedded flag
"EF_PPC_RELOCATABLE": 0x00010000 , # PowerPC -mrelocatable flag
"EF_PPC_RELOCATABLE_LIB": 0x00008000 , # PowerPC -mrelocatable-lib
"R_PPC_NONE": 0 , #
"R_PPC_ADDR32": 1 , # 32bit absolute address
"R_PPC_ADDR24": 2 , # 26bit address, 2 bits ignored.
"R_PPC_ADDR16": 3 , # 16bit absolute address
"R_PPC_ADDR16_LO": 4 , # lower 16bit of absolute address
"R_PPC_ADDR16_HI": 5 , # high 16bit of absolute address
"R_PPC_ADDR16_HA": 6 , # adjusted high 16bit
"R_PPC_ADDR14": 7 , # 16bit address, 2 bits ignored
"R_PPC_ADDR14_BRTAKEN": 8 , #
"R_PPC_ADDR14_BRNTAKEN": 9 , #
"R_PPC_REL24": 10 , # PC relative 26 bit
"R_PPC_REL14": 11 , # PC relative 16 bit
"R_PPC_REL14_BRTAKEN": 12 , #
"R_PPC_REL14_BRNTAKEN": 13 , #
"R_PPC_GOT16": 14 , #
"R_PPC_GOT16_LO": 15 , #
"R_PPC_GOT16_HI": 16 , #
"R_PPC_GOT16_HA": 17 , #
"R_PPC_PLTREL24": 18 , #
"R_PPC_COPY": 19 , #
"R_PPC_GLOB_DAT": 20 , #
"R_PPC_JMP_SLOT": 21 , #
"R_PPC_RELATIVE": 22 , #
"R_PPC_LOCAL24PC": 23 , #
"R_PPC_UADDR32": 24 , #
"R_PPC_UADDR16": 25 , #
"R_PPC_REL32": 26 , #
"R_PPC_PLT32": 27 , #
"R_PPC_PLTREL32": 28 , #
"R_PPC_PLT16_LO": 29 , #
"R_PPC_PLT16_HI": 30 , #
"R_PPC_PLT16_HA": 31 , #
"R_PPC_SDAREL16": 32 , #
"R_PPC_SECTOFF": 33 , #
"R_PPC_SECTOFF_LO": 34 , #
"R_PPC_SECTOFF_HI": 35 , #
"R_PPC_SECTOFF_HA": 36 , #
"R_PPC_TLS": 67 , # none (sym+add)@tls
"R_PPC_DTPMOD32": 68 , # word32 (sym+add)@dtpmod
"R_PPC_TPREL16": 69 , # half16* (sym+add)@tprel
"R_PPC_TPREL16_LO": 70 , # half16 (sym+add)@tprel@l
"R_PPC_TPREL16_HI": 71 , # half16 (sym+add)@tprel@h
"R_PPC_TPREL16_HA": 72 , # half16 (sym+add)@tprel@ha
"R_PPC_TPREL32": 73 , # word32 (sym+add)@tprel
"R_PPC_DTPREL16": 74 , # half16* (sym+add)@dtprel
"R_PPC_DTPREL16_LO": 75 , # half16 (sym+add)@dtprel@l
"R_PPC_DTPREL16_HI": 76 , # half16 (sym+add)@dtprel@h
"R_PPC_DTPREL16_HA": 77 , # half16 (sym+add)@dtprel@ha
"R_PPC_DTPREL32": 78 , # word32 (sym+add)@dtprel
"R_PPC_GOT_TLSGD16": 79 , # half16* (sym+add)@got@tlsgd
"R_PPC_GOT_TLSGD16_LO": 80 , # half16 (sym+add)@got@tlsgd@l
"R_PPC_GOT_TLSGD16_HI": 81 , # half16 (sym+add)@got@tlsgd@h
"R_PPC_GOT_TLSGD16_HA": 82 , # half16 (sym+add)@got@tlsgd@ha
"R_PPC_GOT_TLSLD16": 83 , # half16* (sym+add)@got@tlsld
"R_PPC_GOT_TLSLD16_LO": 84 , # half16 (sym+add)@got@tlsld@l
"R_PPC_GOT_TLSLD16_HI": 85 , # half16 (sym+add)@got@tlsld@h
"R_PPC_GOT_TLSLD16_HA": 86 , # half16 (sym+add)@got@tlsld@ha
"R_PPC_GOT_TPREL16": 87 , # half16* (sym+add)@got@tprel
"R_PPC_GOT_TPREL16_LO": 88 , # half16 (sym+add)@got@tprel@l
"R_PPC_GOT_TPREL16_HI": 89 , # half16 (sym+add)@got@tprel@h
"R_PPC_GOT_TPREL16_HA": 90 , # half16 (sym+add)@got@tprel@ha
"R_PPC_GOT_DTPREL16": 91 , # half16* (sym+add)@got@dtprel
"R_PPC_GOT_DTPREL16_LO": 92 , # half16* (sym+add)@got@dtprel@l
"R_PPC_GOT_DTPREL16_HI": 93 , # half16* (sym+add)@got@dtprel@h
"R_PPC_GOT_DTPREL16_HA": 94 , # half16* (sym+add)@got@dtprel@ha
"R_PPC_EMB_NADDR32": 101 , #
"R_PPC_EMB_NADDR16": 102 , #
"R_PPC_EMB_NADDR16_LO": 103 , #
"R_PPC_EMB_NADDR16_HI": 104 , #
"R_PPC_EMB_NADDR16_HA": 105 , #
"R_PPC_EMB_SDAI16": 106 , #
"R_PPC_EMB_SDA2I16": 107 , #
"R_PPC_EMB_SDA2REL": 108 , #
"R_PPC_EMB_SDA21": 109 , # 16 bit offset in SDA
"R_PPC_EMB_MRKREF": 110 , #
"R_PPC_EMB_RELSEC16": 111 , #
"R_PPC_EMB_RELST_LO": 112 , #
"R_PPC_EMB_RELST_HI": 113 , #
"R_PPC_EMB_RELST_HA": 114 , #
"R_PPC_EMB_BIT_FLD": 115 , #
"R_PPC_EMB_RELSDA": 116 , # 16 bit relative offset in SDA
"R_PPC_DIAB_SDA21_LO": 180 , # like EMB_SDA21, but lower 16 bit
"R_PPC_DIAB_SDA21_HI": 181 , # like EMB_SDA21, but high 16 bit
"R_PPC_DIAB_SDA21_HA": 182 , # like EMB_SDA21, adjusted high 16
"R_PPC_DIAB_RELSDA_LO": 183 , # like EMB_RELSDA, but lower 16 bit
"R_PPC_DIAB_RELSDA_HI": 184 , # like EMB_RELSDA, but high 16 bit
"R_PPC_DIAB_RELSDA_HA": 185 , # like EMB_RELSDA, adjusted high 16
"R_PPC_IRELATIVE": 248 , #
"R_PPC_REL16": 249 , # half16 (sym+add-.)
"R_PPC_REL16_LO": 250 , # half16 (sym+add-.)@l
"R_PPC_REL16_HI": 251 , # half16 (sym+add-.)@h
"R_PPC_REL16_HA": 252 , # half16 (sym+add-.)@ha
"R_PPC_TOC16": 255 , #
"DT_PPC_GOT": (MacroRef("DT_LOPROC") + 0) , #
"DT_PPC_NUM": 1 , #
"R_PPC64_NONE": MacroRef("R_PPC_NONE") , #
"R_PPC64_ADDR32": MacroRef("R_PPC_ADDR32") , # 32bit absolute address
"R_PPC64_ADDR24": MacroRef("R_PPC_ADDR24") , # 26bit address, word aligned
"R_PPC64_ADDR16": MacroRef("R_PPC_ADDR16") , # 16bit absolute address
"R_PPC64_ADDR16_LO": MacroRef("R_PPC_ADDR16_LO") , # lower 16bits of address
"R_PPC64_ADDR16_HI": MacroRef("R_PPC_ADDR16_HI") , # high 16bits of address.
"R_PPC64_ADDR16_HA": MacroRef("R_PPC_ADDR16_HA") , # adjusted high 16bits.
"R_PPC64_ADDR14": MacroRef("R_PPC_ADDR14") , # 16bit address, word aligned
"R_PPC64_ADDR14_BRTAKEN": MacroRef("R_PPC_ADDR14_BRTAKEN") , #
"R_PPC64_ADDR14_BRNTAKEN": MacroRef("R_PPC_ADDR14_BRNTAKEN") , #
"R_PPC64_REL24": MacroRef("R_PPC_REL24") , # PC-rel. 26 bit, word aligned
"R_PPC64_REL14": MacroRef("R_PPC_REL14") , # PC relative 16 bit
"R_PPC64_REL14_BRTAKEN": MacroRef("R_PPC_REL14_BRTAKEN") , #
"R_PPC64_REL14_BRNTAKEN": MacroRef("R_PPC_REL14_BRNTAKEN") , #
"R_PPC64_GOT16": MacroRef("R_PPC_GOT16") , #
"R_PPC64_GOT16_LO": MacroRef("R_PPC_GOT16_LO") , #
"R_PPC64_GOT16_HI": MacroRef("R_PPC_GOT16_HI") , #
"R_PPC64_GOT16_HA": MacroRef("R_PPC_GOT16_HA") , #
"R_PPC64_COPY": MacroRef("R_PPC_COPY") , #
"R_PPC64_GLOB_DAT": MacroRef("R_PPC_GLOB_DAT") , #
"R_PPC64_JMP_SLOT": MacroRef("R_PPC_JMP_SLOT") , #
"R_PPC64_RELATIVE": MacroRef("R_PPC_RELATIVE") , #
"R_PPC64_UADDR32": MacroRef("R_PPC_UADDR32") , #
"R_PPC64_UADDR16": MacroRef("R_PPC_UADDR16") , #
"R_PPC64_REL32": MacroRef("R_PPC_REL32") , #
"R_PPC64_PLT32": MacroRef("R_PPC_PLT32") , #
"R_PPC64_PLTREL32": MacroRef("R_PPC_PLTREL32") , #
"R_PPC64_PLT16_LO": MacroRef("R_PPC_PLT16_LO") , #
"R_PPC64_PLT16_HI": MacroRef("R_PPC_PLT16_HI") , #
"R_PPC64_PLT16_HA": MacroRef("R_PPC_PLT16_HA") , #
"R_PPC64_SECTOFF": MacroRef("R_PPC_SECTOFF") , #
"R_PPC64_SECTOFF_LO": MacroRef("R_PPC_SECTOFF_LO") , #
"R_PPC64_SECTOFF_HI": MacroRef("R_PPC_SECTOFF_HI") , #
"R_PPC64_SECTOFF_HA": MacroRef("R_PPC_SECTOFF_HA") , #
"R_PPC64_ADDR30": 37 , # word30 (S + A - P) >> 2
"R_PPC64_ADDR64": 38 , # doubleword64 S + A
"R_PPC64_ADDR16_HIGHER": 39 , # half16 #higher(S + A)
"R_PPC64_ADDR16_HIGHERA": 40 , # half16 #highera(S + A)
"R_PPC64_ADDR16_HIGHEST": 41 , # half16 #highest(S + A)
"R_PPC64_ADDR16_HIGHESTA": 42 , # half16 #highesta(S + A)
"R_PPC64_UADDR64": 43 , # doubleword64 S + A
"R_PPC64_REL64": 44 , # doubleword64 S + A - P
"R_PPC64_PLT64": 45 , # doubleword64 L + A
"R_PPC64_PLTREL64": 46 , # doubleword64 L + A - P
"R_PPC64_TOC16": 47 , # half16* S + A - .TOC
"R_PPC64_TOC16_LO": 48 , # half16 #lo(S + A - .TOC.)
"R_PPC64_TOC16_HI": 49 , # half16 #hi(S + A - .TOC.)
"R_PPC64_TOC16_HA": 50 , # half16 #ha(S + A - .TOC.)
"R_PPC64_TOC": 51 , # doubleword64 .TOC
"R_PPC64_PLTGOT16": 52 , # half16* M + A
"R_PPC64_PLTGOT16_LO": 53 , # half16 #lo(M + A)
"R_PPC64_PLTGOT16_HI": 54 , # half16 #hi(M + A)
"R_PPC64_PLTGOT16_HA": 55 , # half16 #ha(M + A)
"R_PPC64_ADDR16_DS": 56 , # half16ds* (S + A) >> 2
"R_PPC64_ADDR16_LO_DS": 57 , # half16ds #lo(S + A) >> 2
"R_PPC64_GOT16_DS": 58 , # half16ds* (G + A) >> 2
"R_PPC64_GOT16_LO_DS": 59 , # half16ds #lo(G + A) >> 2
"R_PPC64_PLT16_LO_DS": 60 , # half16ds #lo(L + A) >> 2
"R_PPC64_SECTOFF_DS": 61 , # half16ds* (R + A) >> 2
"R_PPC64_SECTOFF_LO_DS": 62 , # half16ds #lo(R + A) >> 2
"R_PPC64_TOC16_DS": 63 , # half16ds* (S + A - .TOC.) >> 2
"R_PPC64_TOC16_LO_DS": 64 , # half16ds #lo(S + A - .TOC.) >> 2
"R_PPC64_PLTGOT16_DS": 65 , # half16ds* (M + A) >> 2
"R_PPC64_PLTGOT16_LO_DS": 66 , # half16ds #lo(M + A) >> 2
"R_PPC64_TLS": 67 , # none (sym+add)@tls
"R_PPC64_DTPMOD64": 68 , # doubleword64 (sym+add)@dtpmod
"R_PPC64_TPREL16": 69 , # half16* (sym+add)@tprel
"R_PPC64_TPREL16_LO": 70 , # half16 (sym+add)@tprel@l
"R_PPC64_TPREL16_HI": 71 , # half16 (sym+add)@tprel@h
"R_PPC64_TPREL16_HA": 72 , # half16 (sym+add)@tprel@ha
"R_PPC64_TPREL64": 73 , # doubleword64 (sym+add)@tprel
"R_PPC64_DTPREL16": 74 , # half16* (sym+add)@dtprel
"R_PPC64_DTPREL16_LO": 75 , # half16 (sym+add)@dtprel@l
"R_PPC64_DTPREL16_HI": 76 , # half16 (sym+add)@dtprel@h
"R_PPC64_DTPREL16_HA": 77 , # half16 (sym+add)@dtprel@ha
"R_PPC64_DTPREL64": 78 , # doubleword64 (sym+add)@dtprel
"R_PPC64_GOT_TLSGD16": 79 , # half16* (sym+add)@got@tlsgd
"R_PPC64_GOT_TLSGD16_LO": 80 , # half16 (sym+add)@got@tlsgd@l
"R_PPC64_GOT_TLSGD16_HI": 81 , # half16 (sym+add)@got@tlsgd@h
"R_PPC64_GOT_TLSGD16_HA": 82 , # half16 (sym+add)@got@tlsgd@ha
"R_PPC64_GOT_TLSLD16": 83 , # half16* (sym+add)@got@tlsld
"R_PPC64_GOT_TLSLD16_LO": 84 , # half16 (sym+add)@got@tlsld@l
"R_PPC64_GOT_TLSLD16_HI": 85 , # half16 (sym+add)@got@tlsld@h
"R_PPC64_GOT_TLSLD16_HA": 86 , # half16 (sym+add)@got@tlsld@ha
"R_PPC64_GOT_TPREL16_DS": 87 , # half16ds* (sym+add)@got@tprel
"R_PPC64_GOT_TPREL16_LO_DS": 88 , # half16ds (sym+add)@got@tprel@l
"R_PPC64_GOT_TPREL16_HI": 89 , # half16 (sym+add)@got@tprel@h
"R_PPC64_GOT_TPREL16_HA": 90 , # half16 (sym+add)@got@tprel@ha
"R_PPC64_GOT_DTPREL16_DS": 91 , # half16ds* (sym+add)@got@dtprel
"R_PPC64_GOT_DTPREL16_LO_DS": 92 , # half16ds (sym+add)@got@dtprel@l
"R_PPC64_GOT_DTPREL16_HI": 93 , # half16 (sym+add)@got@dtprel@h
"R_PPC64_GOT_DTPREL16_HA": 94 , # half16 (sym+add)@got@dtprel@ha
"R_PPC64_TPREL16_DS": 95 , # half16ds* (sym+add)@tprel
"R_PPC64_TPREL16_LO_DS": 96 , # half16ds (sym+add)@tprel@l
"R_PPC64_TPREL16_HIGHER": 97 , # half16 (sym+add)@tprel@higher
"R_PPC64_TPREL16_HIGHERA": 98 , # half16 (sym+add)@tprel@highera
"R_PPC64_TPREL16_HIGHEST": 99 , # half16 (sym+add)@tprel@highest
"R_PPC64_TPREL16_HIGHESTA": 100 , # half16 (sym+add)@tprel@highesta
"R_PPC64_DTPREL16_DS": 101 , # half16ds* (sym+add)@dtprel
"R_PPC64_DTPREL16_LO_DS": 102 , # half16ds (sym+add)@dtprel@l
"R_PPC64_DTPREL16_HIGHER": 103 , # half16 (sym+add)@dtprel@higher
"R_PPC64_DTPREL16_HIGHERA": 104 , # half16 (sym+add)@dtprel@highera
"R_PPC64_DTPREL16_HIGHEST": 105 , # half16 (sym+add)@dtprel@highest
"R_PPC64_DTPREL16_HIGHESTA": 106 , # half16 (sym+add)@dtprel@highesta
"R_PPC64_JMP_IREL": 247 , #
"R_PPC64_IRELATIVE": 248 , #
"R_PPC64_REL16": 249 , # half16 (sym+add-.)
"R_PPC64_REL16_LO": 250 , # half16 (sym+add-.)@l
"R_PPC64_REL16_HI": 251 , # half16 (sym+add-.)@h
"R_PPC64_REL16_HA": 252 , # half16 (sym+add-.)@ha
"DT_PPC64_GLINK": (MacroRef("DT_LOPROC") + 0) , #
"DT_PPC64_OPD": (MacroRef("DT_LOPROC") + 1) , #
"DT_PPC64_OPDSZ": (MacroRef("DT_LOPROC") + 2) , #
"DT_PPC64_NUM": 3 , #
"EF_ARM_RELEXEC": 0x01 , #
"EF_ARM_HASENTRY": 0x02 , #
"EF_ARM_INTERWORK": 0x04 , #
"EF_ARM_APCS_26": 0x08 , #
"EF_ARM_APCS_FLOAT": 0x10 , #
"EF_ARM_PIC": 0x20 , #
"EF_ARM_ALIGN8": 0x40 , # 8-bit structure alignment is in use
"EF_ARM_NEW_ABI": 0x80 , #
"EF_ARM_OLD_ABI": 0x100 , #
"EF_ARM_SOFT_FLOAT": 0x200 , #
"EF_ARM_VFP_FLOAT": 0x400 , #
"EF_ARM_MAVERICK_FLOAT": 0x800 , #
"EF_ARM_SYMSARESORTED": 0x04 , #
"EF_ARM_DYNSYMSUSESEGIDX": 0x08 , #
"EF_ARM_MAPSYMSFIRST": 0x10 , #
"EF_ARM_EABIMASK": 0XFF000000 , #
"EF_ARM_BE8": 0x00800000 , #
"EF_ARM_LE8": 0x00400000 , #
"EF_ARM_EABI_UNKNOWN": 0x00000000 , #
"EF_ARM_EABI_VER1": 0x01000000 , #
"EF_ARM_EABI_VER2": 0x02000000 , #
"EF_ARM_EABI_VER3": 0x03000000 , #
"EF_ARM_EABI_VER4": 0x04000000 , #
"EF_ARM_EABI_VER5": 0x05000000 , #
"STT_ARM_TFUNC": MacroRef("STT_LOPROC") , # A Thumb function.
"STT_ARM_16BIT": MacroRef("STT_HIPROC") , # A Thumb label.
"SHF_ARM_ENTRYSECT": 0x10000000 , # Section contains an entry point
"SHF_ARM_COMDEF": 0x80000000 , # Section may be multiply defined
"PF_ARM_SB": 0x10000000 , # Segment contains the location
"PF_ARM_PI": 0x20000000 , # Position-independent segment.
"PF_ARM_ABS": 0x40000000 , # Absolute segment.
"PT_ARM_EXIDX": (MacroRef("PT_LOPROC") + 1) , # ARM unwind segment.
"SHT_ARM_EXIDX": (MacroRef("SHT_LOPROC") + 1) , # ARM unwind section.
"SHT_ARM_PREEMPTMAP": (MacroRef("SHT_LOPROC") + 2) , # Preemption details.
"SHT_ARM_ATTRIBUTES": (MacroRef("SHT_LOPROC") + 3) , # ARM attributes section.
"R_ARM_NONE": 0 , # No reloc
"R_ARM_PC24": 1 , # PC relative 26 bit branch
"R_ARM_ABS32": 2 , # Direct 32 bit
"R_ARM_REL32": 3 , # PC relative 32 bit
"R_ARM_PC13": 4 , #
"R_ARM_ABS16": 5 , # Direct 16 bit
"R_ARM_ABS12": 6 , # Direct 12 bit
"R_ARM_THM_ABS5": 7 , #
"R_ARM_ABS8": 8 , # Direct 8 bit
"R_ARM_SBREL32": 9 , #
"R_ARM_THM_PC22": 10 , #
"R_ARM_THM_PC8": 11 , #
"R_ARM_AMP_VCALL9": 12 , #
"R_ARM_SWI24": 13 , # Obsolete static relocation.
"R_ARM_TLS_DESC": 13 , # Dynamic relocation.
"R_ARM_THM_SWI8": 14 , #
"R_ARM_XPC25": 15 , #
"R_ARM_THM_XPC22": 16 , #
"R_ARM_TLS_DTPMOD32": 17 , # ID of module containing symbol
"R_ARM_TLS_DTPOFF32": 18 , # Offset in TLS block
"R_ARM_TLS_TPOFF32": 19 , # Offset in static TLS block
"R_ARM_COPY": 20 , # Copy symbol at runtime
"R_ARM_GLOB_DAT": 21 , # Create GOT entry
"R_ARM_JUMP_SLOT": 22 , # Create PLT entry
"R_ARM_RELATIVE": 23 , # Adjust by program base
"R_ARM_GOTOFF": 24 , # 32 bit offset to GOT
"R_ARM_GOTPC": 25 , # 32 bit PC relative offset to GOT
"R_ARM_GOT32": 26 , # 32 bit GOT entry
"R_ARM_PLT32": 27 , # 32 bit PLT address
"R_ARM_ALU_PCREL_7_0": 32 , #
"R_ARM_ALU_PCREL_15_8": 33 , #
"R_ARM_ALU_PCREL_23_15": 34 , #
"R_ARM_LDR_SBREL_11_0": 35 , #
"R_ARM_ALU_SBREL_19_12": 36 , #
"R_ARM_ALU_SBREL_27_20": 37 , #
"R_ARM_TLS_GOTDESC": 90 , #
"R_ARM_TLS_CALL": 91 , #
"R_ARM_TLS_DESCSEQ": 92 , #
"R_ARM_THM_TLS_CALL": 93 , #
"R_ARM_GNU_VTENTRY": 100 , #
"R_ARM_GNU_VTINHERIT": 101 , #
"R_ARM_THM_PC11": 102 , # thumb unconditional branch
"R_ARM_THM_PC9": 103 , # thumb conditional branch
"R_ARM_TLS_GD32": 104 , # PC-rel 32 bit for global dynamic
"R_ARM_TLS_LDM32": 105 , # PC-rel 32 bit for local dynamic
"R_ARM_TLS_LDO32": 106 , # 32 bit offset relative to TLS
"R_ARM_TLS_IE32": 107 , # PC-rel 32 bit for GOT entry of
"R_ARM_TLS_LE32": 108 , # 32 bit offset relative to static
"R_ARM_THM_TLS_DESCSEQ": 129 , #
"R_ARM_IRELATIVE": 160 , #
"R_ARM_RXPC25": 249 , #
"R_ARM_RSBREL32": 250 , #
"R_ARM_THM_RPC22": 251 , #
"R_ARM_RREL32": 252 , #
"R_ARM_RABS22": 253 , #
"R_ARM_RPC24": 254 , #
"R_ARM_RBASE": 255 , #
"R_ARM_NUM": 256 , #
"EF_IA_64_MASKOS": 0x0000000f , # os-specific flags
"EF_IA_64_ABI64": 0x00000010 , # 64-bit ABI
"EF_IA_64_ARCH": 0xff000000 , # arch. version mask
"PT_IA_64_ARCHEXT": (MacroRef("PT_LOPROC") + 0) , # arch extension bits
"PT_IA_64_UNWIND": (MacroRef("PT_LOPROC") + 1) , # ia64 unwind bits
"PT_IA_64_HP_OPT_ANOT": (MacroRef("PT_LOOS") + 0x12) , #
"PT_IA_64_HP_HSL_ANOT": (MacroRef("PT_LOOS") + 0x13) , #
"PT_IA_64_HP_STACK": (MacroRef("PT_LOOS") + 0x14) , #
"PF_IA_64_NORECOV": 0x80000000 , # spec insns w/o recovery
"SHT_IA_64_EXT": (MacroRef("SHT_LOPROC") + 0) , # extension bits
"SHT_IA_64_UNWIND": (MacroRef("SHT_LOPROC") + 1) , # unwind bits
"SHF_IA_64_SHORT": 0x10000000 , # section near gp
"SHF_IA_64_NORECOV": 0x20000000 , # spec insns w/o recovery
"DT_IA_64_PLT_RESERVE": (MacroRef("DT_LOPROC") + 0) , #
"DT_IA_64_NUM": 1 , #
"R_IA64_NONE": 0x00 , # none
"R_IA64_IMM14": 0x21 , # symbol + addend, add imm14
"R_IA64_IMM22": 0x22 , # symbol + addend, add imm22
"R_IA64_IMM64": 0x23 , # symbol + addend, mov imm64
"R_IA64_DIR32MSB": 0x24 , # symbol + addend, data4 MSB
"R_IA64_DIR32LSB": 0x25 , # symbol + addend, data4 LSB
"R_IA64_DIR64MSB": 0x26 , # symbol + addend, data8 MSB
"R_IA64_DIR64LSB": 0x27 , # symbol + addend, data8 LSB
"R_IA64_GPREL22": 0x2a , # @gprel(sym + add), add imm22
"R_IA64_GPREL64I": 0x2b , # @gprel(sym + add), mov imm64
"R_IA64_GPREL32MSB": 0x2c , # @gprel(sym + add), data4 MSB
"R_IA64_GPREL32LSB": 0x2d , # @gprel(sym + add), data4 LSB
"R_IA64_GPREL64MSB": 0x2e , # @gprel(sym + add), data8 MSB
"R_IA64_GPREL64LSB": 0x2f , # @gprel(sym + add), data8 LSB
"R_IA64_LTOFF22": 0x32 , # @ltoff(sym + add), add imm22
"R_IA64_LTOFF64I": 0x33 , # @ltoff(sym + add), mov imm64
"R_IA64_PLTOFF22": 0x3a , # @pltoff(sym + add), add imm22
"R_IA64_PLTOFF64I": 0x3b , # @pltoff(sym + add), mov imm64
"R_IA64_PLTOFF64MSB": 0x3e , # @pltoff(sym + add), data8 MSB
"R_IA64_PLTOFF64LSB": 0x3f , # @pltoff(sym + add), data8 LSB
"R_IA64_FPTR64I": 0x43 , # @fptr(sym + add), mov imm64
"R_IA64_FPTR32MSB": 0x44 , # @fptr(sym + add), data4 MSB
"R_IA64_FPTR32LSB": 0x45 , # @fptr(sym + add), data4 LSB
"R_IA64_FPTR64MSB": 0x46 , # @fptr(sym + add), data8 MSB
"R_IA64_FPTR64LSB": 0x47 , # @fptr(sym + add), data8 LSB
"R_IA64_PCREL60B": 0x48 , # @pcrel(sym + add), brl
"R_IA64_PCREL21B": 0x49 , # @pcrel(sym + add), ptb, call
"R_IA64_PCREL21M": 0x4a , # @pcrel(sym + add), chk.s
"R_IA64_PCREL21F": 0x4b , # @pcrel(sym + add), fchkf
"R_IA64_PCREL32MSB": 0x4c , # @pcrel(sym + add), data4 MSB
"R_IA64_PCREL32LSB": 0x4d , # @pcrel(sym + add), data4 LSB
"R_IA64_PCREL64MSB": 0x4e , # @pcrel(sym + add), data8 MSB
"R_IA64_PCREL64LSB": 0x4f , # @pcrel(sym + add), data8 LSB
"R_IA64_LTOFF_FPTR22": 0x52 , # @ltoff(@fptr(s+a)), imm22
"R_IA64_LTOFF_FPTR64I": 0x53 , # @ltoff(@fptr(s+a)), imm64
"R_IA64_LTOFF_FPTR32MSB": 0x54 , # @ltoff(@fptr(s+a)), data4 MSB
"R_IA64_LTOFF_FPTR32LSB": 0x55 , # @ltoff(@fptr(s+a)), data4 LSB
"R_IA64_LTOFF_FPTR64MSB": 0x56 , # @ltoff(@fptr(s+a)), data8 MSB
"R_IA64_LTOFF_FPTR64LSB": 0x57 , # @ltoff(@fptr(s+a)), data8 LSB
"R_IA64_SEGREL32MSB": 0x5c , # @segrel(sym + add), data4 MSB
"R_IA64_SEGREL32LSB": 0x5d , # @segrel(sym + add), data4 LSB
"R_IA64_SEGREL64MSB": 0x5e , # @segrel(sym + add), data8 MSB
"R_IA64_SEGREL64LSB": 0x5f , # @segrel(sym + add), data8 LSB
"R_IA64_SECREL32MSB": 0x64 , # @secrel(sym + add), data4 MSB
"R_IA64_SECREL32LSB": 0x65 , # @secrel(sym + add), data4 LSB
"R_IA64_SECREL64MSB": 0x66 , # @secrel(sym + add), data8 MSB
"R_IA64_SECREL64LSB": 0x67 , # @secrel(sym + add), data8 LSB
"R_IA64_REL32MSB": 0x6c , # data 4 + REL
"R_IA64_REL32LSB": 0x6d , # data 4 + REL
"R_IA64_REL64MSB": 0x6e , # data 8 + REL
"R_IA64_REL64LSB": 0x6f , # data 8 + REL
"R_IA64_LTV32MSB": 0x74 , # symbol + addend, data4 MSB
"R_IA64_LTV32LSB": 0x75 , # symbol + addend, data4 LSB
"R_IA64_LTV64MSB": 0x76 , # symbol + addend, data8 MSB
"R_IA64_LTV64LSB": 0x77 , # symbol + addend, data8 LSB
"R_IA64_PCREL21BI": 0x79 , # @pcrel(sym + add), 21bit inst
"R_IA64_PCREL22": 0x7a , # @pcrel(sym + add), 22bit inst
"R_IA64_PCREL64I": 0x7b , # @pcrel(sym + add), 64bit inst
"R_IA64_IPLTMSB": 0x80 , # dynamic reloc, imported PLT, MSB
"R_IA64_IPLTLSB": 0x81 , # dynamic reloc, imported PLT, LSB
"R_IA64_COPY": 0x84 , # copy relocation
"R_IA64_SUB": 0x85 , # Addend and symbol difference
"R_IA64_LTOFF22X": 0x86 , # LTOFF22, relaxable.
"R_IA64_LDXMOV": 0x87 , # Use of LTOFF22X.
"R_IA64_TPREL14": 0x91 , # @tprel(sym + add), imm14
"R_IA64_TPREL22": 0x92 , # @tprel(sym + add), imm22
"R_IA64_TPREL64I": 0x93 , # @tprel(sym + add), imm64
"R_IA64_TPREL64MSB": 0x96 , # @tprel(sym + add), data8 MSB
"R_IA64_TPREL64LSB": 0x97 , # @tprel(sym + add), data8 LSB
"R_IA64_LTOFF_TPREL22": 0x9a , # @ltoff(@tprel(s+a)), imm2
"R_IA64_DTPMOD64MSB": 0xa6 , # @dtpmod(sym + add), data8 MSB
"R_IA64_DTPMOD64LSB": 0xa7 , # @dtpmod(sym + add), data8 LSB
"R_IA64_LTOFF_DTPMOD22": 0xaa , # @ltoff(@dtpmod(sym + add)), imm22
"R_IA64_DTPREL14": 0xb1 , # @dtprel(sym + add), imm14
"R_IA64_DTPREL22": 0xb2 , # @dtprel(sym + add), imm22
"R_IA64_DTPREL64I": 0xb3 , # @dtprel(sym + add), imm64
"R_IA64_DTPREL32MSB": 0xb4 , # @dtprel(sym + add), data4 MSB
"R_IA64_DTPREL32LSB": 0xb5 , # @dtprel(sym + add), data4 LSB
"R_IA64_DTPREL64MSB": 0xb6 , # @dtprel(sym + add), data8 MSB
"R_IA64_DTPREL64LSB": 0xb7 , # @dtprel(sym + add), data8 LSB
"R_IA64_LTOFF_DTPREL22": 0xba , # @ltoff(@dtprel(s+a)), imm22
"EF_SH_MACH_MASK": 0x1f , #
"EF_SH_UNKNOWN": 0x0 , #
"EF_SH1": 0x1 , #
"EF_SH2": 0x2 , #
"EF_SH3": 0x3 , #
"EF_SH_DSP": 0x4 , #
"EF_SH3_DSP": 0x5 , #
"EF_SH4AL_DSP": 0x6 , #
"EF_SH3E": 0x8 , #
"EF_SH4": 0x9 , #
"EF_SH2E": 0xb , #
"EF_SH4A": 0xc , #
"EF_SH2A": 0xd , #
"EF_SH4_NOFPU": 0x10 , #
"EF_SH4A_NOFPU": 0x11 , #
"EF_SH4_NOMMU_NOFPU": 0x12 , #
"EF_SH2A_NOFPU": 0x13 , #
"EF_SH3_NOMMU": 0x14 , #
"EF_SH2A_SH4_NOFPU": 0x15 , #
"EF_SH2A_SH3_NOFPU": 0x16 , #
"EF_SH2A_SH4": 0x17 , #
"EF_SH2A_SH3E": 0x18 , #
"R_SH_NONE": 0 , #
"R_SH_DIR32": 1 , #
"R_SH_REL32": 2 , #
"R_SH_DIR8WPN": 3 , #
"R_SH_IND12W": 4 , #
"R_SH_DIR8WPL": 5 , #
"R_SH_DIR8WPZ": 6 , #
"R_SH_DIR8BP": 7 , #
"R_SH_DIR8W": 8 , #
"R_SH_DIR8L": 9 , #
"R_SH_SWITCH16": 25 , #
"R_SH_SWITCH32": 26 , #
"R_SH_USES": 27 , #
"R_SH_COUNT": 28 , #
"R_SH_ALIGN": 29 , #
"R_SH_CODE": 30 , #
"R_SH_DATA": 31 , #
"R_SH_LABEL": 32 , #
"R_SH_SWITCH8": 33 , #
"R_SH_GNU_VTINHERIT": 34 , #
"R_SH_GNU_VTENTRY": 35 , #
"R_SH_TLS_GD_32": 144 , #
"R_SH_TLS_LD_32": 145 , #
"R_SH_TLS_LDO_32": 146 , #
"R_SH_TLS_IE_32": 147 , #
"R_SH_TLS_LE_32": 148 , #
"R_SH_TLS_DTPMOD32": 149 , #
"R_SH_TLS_DTPOFF32": 150 , #
"R_SH_TLS_TPOFF32": 151 , #
"R_SH_GOT32": 160 , #
"R_SH_PLT32": 161 , #
"R_SH_COPY": 162 , #
"R_SH_GLOB_DAT": 163 , #
"R_SH_JMP_SLOT": 164 , #
"R_SH_RELATIVE": 165 , #
"R_SH_GOTOFF": 166 , #
"R_SH_GOTPC": 167 , #
"R_SH_NUM": 256 , #
"EF_S390_HIGH_GPRS": 0x00000001 , # High GPRs kernel facility needed.
"R_390_NONE": 0 , # No reloc.
"R_390_8": 1 , # Direct 8 bit.
"R_390_12": 2 , # Direct 12 bit.
"R_390_16": 3 , # Direct 16 bit.
"R_390_32": 4 , # Direct 32 bit.
"R_390_PC32": 5 , # PC relative 32 bit.
"R_390_GOT12": 6 , # 12 bit GOT offset.
"R_390_GOT32": 7 , # 32 bit GOT offset.
"R_390_PLT32": 8 , # 32 bit PC relative PLT address.
"R_390_COPY": 9 , # Copy symbol at runtime.
"R_390_GLOB_DAT": 10 , # Create GOT entry.
"R_390_JMP_SLOT": 11 , # Create PLT entry.
"R_390_RELATIVE": 12 , # Adjust by program base.
"R_390_GOTOFF32": 13 , # 32 bit offset to GOT.
"R_390_GOTPC": 14 , # 32 bit PC relative offset to GOT.
"R_390_GOT16": 15 , # 16 bit GOT offset.
"R_390_PC16": 16 , # PC relative 16 bit.
"R_390_PC16DBL": 17 , # PC relative 16 bit shifted by 1.
"R_390_PLT16DBL": 18 , # 16 bit PC rel. PLT shifted by 1.
"R_390_PC32DBL": 19 , # PC relative 32 bit shifted by 1.
"R_390_PLT32DBL": 20 , # 32 bit PC rel. PLT shifted by 1.
"R_390_GOTPCDBL": 21 , # 32 bit PC rel. GOT shifted by 1.
"R_390_64": 22 , # Direct 64 bit.
"R_390_PC64": 23 , # PC relative 64 bit.
"R_390_GOT64": 24 , # 64 bit GOT offset.
"R_390_PLT64": 25 , # 64 bit PC relative PLT address.
"R_390_GOTENT": 26 , # 32 bit PC rel. to GOT entry >> 1.
"R_390_GOTOFF16": 27 , # 16 bit offset to GOT.
"R_390_GOTOFF64": 28 , # 64 bit offset to GOT.
"R_390_GOTPLT12": 29 , # 12 bit offset to jump slot.
"R_390_GOTPLT16": 30 , # 16 bit offset to jump slot.
"R_390_GOTPLT32": 31 , # 32 bit offset to jump slot.
"R_390_GOTPLT64": 32 , # 64 bit offset to jump slot.
"R_390_GOTPLTENT": 33 , # 32 bit rel. offset to jump slot.
"R_390_PLTOFF16": 34 , # 16 bit offset from GOT to PLT.
"R_390_PLTOFF32": 35 , # 32 bit offset from GOT to PLT.
"R_390_PLTOFF64": 36 , # 16 bit offset from GOT to PLT.
"R_390_TLS_LOAD": 37 , # Tag for load insn in TLS code.
"R_390_TLS_GDCALL": 38 , # Tag for function call in general
"R_390_TLS_LDCALL": 39 , # Tag for function call in local
"R_390_TLS_GD32": 40 , # Direct 32 bit for general dynamic
"R_390_TLS_GD64": 41 , # Direct 64 bit for general dynamic
"R_390_TLS_GOTIE12": 42 , # 12 bit GOT offset for static TLS
"R_390_TLS_GOTIE32": 43 , # 32 bit GOT offset for static TLS
"R_390_TLS_GOTIE64": 44 , # 64 bit GOT offset for static TLS
"R_390_TLS_LDM32": 45 , # Direct 32 bit for local dynamic
"R_390_TLS_LDM64": 46 , # Direct 64 bit for local dynamic
"R_390_TLS_IE32": 47 , # 32 bit address of GOT entry for
"R_390_TLS_IE64": 48 , # 64 bit address of GOT entry for
"R_390_TLS_IEENT": 49 , # 32 bit rel. offset to GOT entry for
"R_390_TLS_LE32": 50 , # 32 bit negated offset relative to
"R_390_TLS_LE64": 51 , # 64 bit negated offset relative to
"R_390_TLS_LDO32": 52 , # 32 bit offset relative to TLS
"R_390_TLS_LDO64": 53 , # 64 bit offset relative to TLS
"R_390_TLS_DTPMOD": 54 , # ID of module containing symbol.
"R_390_TLS_DTPOFF": 55 , # Offset in TLS block.
"R_390_TLS_TPOFF": 56 , # Negated offset in static TLS
"R_390_20": 57 , # Direct 20 bit.
"R_390_GOT20": 58 , # 20 bit GOT offset.
"R_390_GOTPLT20": 59 , # 20 bit offset to jump slot.
"R_390_TLS_GOTIE20": 60 , # 20 bit GOT offset for static TLS
"R_390_NUM": 61 , #
"R_CRIS_NONE": 0 , #
"R_CRIS_8": 1 , #
"R_CRIS_16": 2 , #
"R_CRIS_32": 3 , #
"R_CRIS_8_PCREL": 4 , #
"R_CRIS_16_PCREL": 5 , #
"R_CRIS_32_PCREL": 6 , #
"R_CRIS_GNU_VTINHERIT": 7 , #
"R_CRIS_GNU_VTENTRY": 8 , #
"R_CRIS_COPY": 9 , #
"R_CRIS_GLOB_DAT": 10 , #
"R_CRIS_JUMP_SLOT": 11 , #
"R_CRIS_RELATIVE": 12 , #
"R_CRIS_16_GOT": 13 , #
"R_CRIS_32_GOT": 14 , #
"R_CRIS_16_GOTPLT": 15 , #
"R_CRIS_32_GOTPLT": 16 , #
"R_CRIS_32_GOTREL": 17 , #
"R_CRIS_32_PLT_GOTREL": 18 , #
"R_CRIS_32_PLT_PCREL": 19 , #
"R_CRIS_NUM": 20 , #
"R_X86_64_NONE": 0 , # No reloc
"R_X86_64_64": 1 , # Direct 64 bit
"R_X86_64_PC32": 2 , # PC relative 32 bit signed
"R_X86_64_GOT32": 3 , # 32 bit GOT entry
"R_X86_64_PLT32": 4 , # 32 bit PLT address
"R_X86_64_COPY": 5 , # Copy symbol at runtime
"R_X86_64_GLOB_DAT": 6 , # Create GOT entry
"R_X86_64_JUMP_SLOT": 7 , # Create PLT entry
"R_X86_64_RELATIVE": 8 , # Adjust by program base
"R_X86_64_GOTPCREL": 9 , # 32 bit signed PC relative
"R_X86_64_32": 10 , # Direct 32 bit zero extended
"R_X86_64_32S": 11 , # Direct 32 bit sign extended
"R_X86_64_16": 12 , # Direct 16 bit zero extended
"R_X86_64_PC16": 13 , # 16 bit sign extended pc relative
"R_X86_64_8": 14 , # Direct 8 bit sign extended
"R_X86_64_PC8": 15 , # 8 bit sign extended pc relative
"R_X86_64_DTPMOD64": 16 , # ID of module containing symbol
"R_X86_64_DTPOFF64": 17 , # Offset in module's TLS block
"R_X86_64_TPOFF64": 18 , # Offset in initial TLS block
"R_X86_64_TLSGD": 19 , # 32 bit signed PC relative offset
"R_X86_64_TLSLD": 20 , # 32 bit signed PC relative offset
"R_X86_64_DTPOFF32": 21 , # Offset in TLS block
"R_X86_64_GOTTPOFF": 22 , # 32 bit signed PC relative offset
"R_X86_64_TPOFF32": 23 , # Offset in initial TLS block
"R_X86_64_PC64": 24 , # PC relative 64 bit
"R_X86_64_GOTOFF64": 25 , # 64 bit offset to GOT
"R_X86_64_GOTPC32": 26 , # 32 bit signed pc relative
"R_X86_64_GOT64": 27 , # 64-bit GOT entry offset
"R_X86_64_GOTPCREL64": 28 , # 64-bit PC relative offset
"R_X86_64_GOTPC64": 29 , # 64-bit PC relative offset to GOT
"R_X86_64_GOTPLT64": 30 , # like GOT64, says PLT entry needed
"R_X86_64_PLTOFF64": 31 , # 64-bit GOT relative offset
"R_X86_64_SIZE32": 32 , # Size of symbol plus 32-bit addend
"R_X86_64_SIZE64": 33 , # Size of symbol plus 64-bit addend
"R_X86_64_GOTPC32_TLSDESC": 34 , # GOT offset for TLS descriptor.
"R_X86_64_TLSDESC_CALL": 35 , # Marker for call through TLS
"R_X86_64_TLSDESC": 36 , # TLS descriptor.
"R_X86_64_IRELATIVE": 37 , # Adjust indirectly by program base
"R_X86_64_NUM": 38 , #
"R_MN10300_NONE": 0 , # No reloc.
"R_MN10300_32": 1 , # Direct 32 bit.
"R_MN10300_16": 2 , # Direct 16 bit.
"R_MN10300_8": 3 , # Direct 8 bit.
"R_MN10300_PCREL32": 4 , # PC-relative 32-bit.
"R_MN10300_PCREL16": 5 , # PC-relative 16-bit signed.
"R_MN10300_PCREL8": 6 , # PC-relative 8-bit signed.
"R_MN10300_GNU_VTINHERIT": 7 , # Ancient C++ vtable garbage...
"R_MN10300_GNU_VTENTRY": 8 , # ... collection annotation.
"R_MN10300_24": 9 , # Direct 24 bit.
"R_MN10300_GOTPC32": 10 , # 32-bit PCrel offset to GOT.
"R_MN10300_GOTPC16": 11 , # 16-bit PCrel offset to GOT.
"R_MN10300_GOTOFF32": 12 , # 32-bit offset from GOT.
"R_MN10300_GOTOFF24": 13 , # 24-bit offset from GOT.
"R_MN10300_GOTOFF16": 14 , # 16-bit offset from GOT.
"R_MN10300_PLT32": 15 , # 32-bit PCrel to PLT entry.
"R_MN10300_PLT16": 16 , # 16-bit PCrel to PLT entry.
"R_MN10300_GOT32": 17 , # 32-bit offset to GOT entry.
"R_MN10300_GOT24": 18 , # 24-bit offset to GOT entry.
"R_MN10300_GOT16": 19 , # 16-bit offset to GOT entry.
"R_MN10300_COPY": 20 , # Copy symbol at runtime.
"R_MN10300_GLOB_DAT": 21 , # Create GOT entry.
"R_MN10300_JMP_SLOT": 22 , # Create PLT entry.
"R_MN10300_RELATIVE": 23 , # Adjust by program base.
"R_MN10300_NUM": 24 , #
"R_M32R_NONE": 0 , # No reloc.
"R_M32R_16": 1 , # Direct 16 bit.
"R_M32R_32": 2 , # Direct 32 bit.
"R_M32R_24": 3 , # Direct 24 bit.
"R_M32R_10_PCREL": 4 , # PC relative 10 bit shifted.
"R_M32R_18_PCREL": 5 , # PC relative 18 bit shifted.
"R_M32R_26_PCREL": 6 , # PC relative 26 bit shifted.
"R_M32R_HI16_ULO": 7 , # High 16 bit with unsigned low.
"R_M32R_HI16_SLO": 8 , # High 16 bit with signed low.
"R_M32R_LO16": 9 , # Low 16 bit.
"R_M32R_SDA16": 10 , # 16 bit offset in SDA.
"R_M32R_GNU_VTINHERIT": 11 , #
"R_M32R_GNU_VTENTRY": 12 , #
"R_M32R_16_RELA": 33 , # Direct 16 bit.
"R_M32R_32_RELA": 34 , # Direct 32 bit.
"R_M32R_24_RELA": 35 , # Direct 24 bit.
"R_M32R_10_PCREL_RELA": 36 , # PC relative 10 bit shifted.
"R_M32R_18_PCREL_RELA": 37 , # PC relative 18 bit shifted.
"R_M32R_26_PCREL_RELA": 38 , # PC relative 26 bit shifted.
"R_M32R_HI16_ULO_RELA": 39 , # High 16 bit with unsigned low
"R_M32R_HI16_SLO_RELA": 40 , # High 16 bit with signed low
"R_M32R_LO16_RELA": 41 , # Low 16 bit
"R_M32R_SDA16_RELA": 42 , # 16 bit offset in SDA
"R_M32R_RELA_GNU_VTINHERIT": 43 , #
"R_M32R_RELA_GNU_VTENTRY": 44 , #
"R_M32R_REL32": 45 , # PC relative 32 bit.
"R_M32R_GOT24": 48 , # 24 bit GOT entry
"R_M32R_26_PLTREL": 49 , # 26 bit PC relative to PLT shifted
"R_M32R_COPY": 50 , # Copy symbol at runtime
"R_M32R_GLOB_DAT": 51 , # Create GOT entry
"R_M32R_JMP_SLOT": 52 , # Create PLT entry
"R_M32R_RELATIVE": 53 , # Adjust by program base
"R_M32R_GOTOFF": 54 , # 24 bit offset to GOT
"R_M32R_GOTPC24": 55 , # 24 bit PC relative offset to GOT
"R_M32R_GOT16_HI_ULO": 56 , # High 16 bit GOT entry with unsigned
"R_M32R_GOT16_HI_SLO": 57 , # High 16 bit GOT entry with signed
"R_M32R_GOT16_LO": 58 , # Low 16 bit GOT entry
"R_M32R_GOTPC_HI_ULO": 59 , # High 16 bit PC relative offset to
"R_M32R_GOTPC_HI_SLO": 60 , # High 16 bit PC relative offset to
"R_M32R_GOTPC_LO": 61 , # Low 16 bit PC relative offset to
"R_M32R_GOTOFF_HI_ULO": 62 , # High 16 bit offset to GOT
"R_M32R_GOTOFF_HI_SLO": 63 , # High 16 bit offset to GOT
"R_M32R_GOTOFF_LO": 64 , # Low 16 bit offset to GOT
"R_M32R_NUM": 256 , # Keep this the last entry.
"SHF_WRITE": (1 << 0) , # Writable
"SHF_ALLOC": (1 << 1) , # Occupies memory during execution
"SHF_EXECINSTR": (1 << 2) , # Executable
"SHF_MERGE": (1 << 4) , # Might be merged
"SHF_STRINGS": (1 << 5) , # Contains nul-terminated strings
"SHF_INFO_LINK": (1 << 6) , # `sh_info' contains SHT index
"SHF_LINK_ORDER": (1 << 7) , # Preserve order after combining
"SHF_OS_NONCONFORMING": (1 << 8) , # Non-standard OS specific handling
"SHF_GROUP": (1 << 9) , # Section is member of a group.
"SHF_TLS": (1 << 10) , # Section hold thread-local data.
# libelf.h constants
# ELF_C
"ELF_C_NULL": 0,
"ELF_C_READ": 1,
"ELF_C_WRITE": 2,
"ELF_C_CLR": 3,
"ELF_C_SET": 4,
"ELF_C_FDDONE": 5,
"ELF_C_FDREAD": 6,
"ELF_C_RDWR": 7,
"ELF_C_NUM": 8,
# ELF_K
"ELF_K_NONE": 0,
"ELF_K_AR": 1,
"ELF_K_COFF": 2,
"ELF_K_ELF": 3,
"ELF_K_NUM": 4,
# ELF_T
"ELF_T_BYTE": 00,
"ELF_T_ADDR": 01,
"ELF_T_DYN": 02,
"ELF_T_EHDR": 03,
"ELF_T_HALF": 04,
"ELF_T_OFF": 05,
"ELF_T_PHDR": 06,
"ELF_T_RELA": 07,
"ELF_T_REL": 8,
"ELF_T_SHDR": 9,
"ELF_T_SWORD": 10,
"ELF_T_SYM": 11,
"ELF_T_WORD": 12,
"ELF_T_SXWORD": 13,
"ELF_T_XWORD": 14,
"ELF_T_VDEF": 15,
"ELF_T_VNEED": 16,
"ELF_T_NUM": 17,
# ELF_F (ELF flags)
"ELF_F_DIRTY": 0x1 , #
"ELF_F_LAYOUT": 0x4 , #
"ELF_F_LAYOUT_OVERLAP": 0x10000000 , #
}
# Now lets generate constants for all
g = globals()
for c in _consts:
g[c] = _consts[c]
__all__ = _consts.keys()
# TODO: Move these to the macros module
#elf.h
# Macro functions
#define ELF32_ST_VISIBILITY(o) ((o) & 0x03)
#define ELF64_ST_VISIBILITY(o) ELF32_ST_VISIBILITY (o)
#define DT_VALTAGIDX(tag) (DT_VALRNGHI - (tag)) # Reverse order!
#define DT_ADDRTAGIDX(tag) (DT_ADDRRNGHI - (tag)) # Reverse order!
#define DT_VERSIONTAGIDX(tag) (DT_VERNEEDNUM - (tag)) # Reverse order!
#define DT_EXTRATAGIDX(tag) ((Elf32_Word)-((Elf32_Sword) (tag) <<1>>1)-1)
#define ELF32_M_SYM(info) ((info) >> 8)
#define ELF32_M_SIZE(info) ((unsigned char) (info))
#define ELF32_M_INFO(sym, size) (((sym) << 8) + (unsigned char) (size))
#define ELF64_M_SYM(info) ELF32_M_SYM (info)
#define ELF64_M_SIZE(info) ELF32_M_SIZE (info)
#define ELF64_M_INFO(sym, size) ELF32_M_INFO (sym, size)
#define EF_ARM_EABI_VERSION(flags) ((flags) & EF_ARM_EABIMASK)
#libelf.h | unknown | codeparrot/codeparrot-clean | ||
//===----------------------------------------------------------------------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
#include "FloatTypesCheck.h"
#include "clang/ASTMatchers/ASTMatchFinder.h"
#include "clang/Lex/Lexer.h"
namespace clang {
using namespace ast_matchers;
namespace {
AST_POLYMORPHIC_MATCHER(isValidAndNotInMacro,
AST_POLYMORPHIC_SUPPORTED_TYPES(TypeLoc,
FloatingLiteral)) {
const SourceLocation Loc = Node.getBeginLoc();
return Loc.isValid() && !Loc.isMacroID();
}
AST_MATCHER(TypeLoc, isLongDoubleType) {
TypeLoc TL = Node;
if (const auto QualLoc = Node.getAs<QualifiedTypeLoc>())
TL = QualLoc.getUnqualifiedLoc();
const auto BuiltinLoc = TL.getAs<BuiltinTypeLoc>();
if (!BuiltinLoc)
return false;
if (const auto *BT = BuiltinLoc.getTypePtr())
return BT->getKind() == BuiltinType::LongDouble;
return false;
}
AST_MATCHER(FloatingLiteral, isLongDoubleLiteral) {
if (const auto *BT =
dyn_cast_if_present<BuiltinType>(Node.getType().getTypePtr()))
return BT->getKind() == BuiltinType::LongDouble;
return false;
}
} // namespace
namespace tidy::google::runtime {
void RuntimeFloatCheck::registerMatchers(MatchFinder *Finder) {
Finder->addMatcher(typeLoc(loc(realFloatingPointType()),
isValidAndNotInMacro(), isLongDoubleType())
.bind("longDoubleTypeLoc"),
this);
Finder->addMatcher(floatLiteral(isValidAndNotInMacro(), isLongDoubleLiteral())
.bind("longDoubleFloatLiteral"),
this);
}
void RuntimeFloatCheck::check(const MatchFinder::MatchResult &Result) {
if (const auto *TL = Result.Nodes.getNodeAs<TypeLoc>("longDoubleTypeLoc")) {
diag(TL->getBeginLoc(), "%0 type is not portable and should not be used")
<< TL->getType();
}
if (const auto *FL =
Result.Nodes.getNodeAs<FloatingLiteral>("longDoubleFloatLiteral")) {
diag(FL->getBeginLoc(), "%0 type from literal suffix 'L' is not portable "
"and should not be used")
<< FL->getType();
}
}
} // namespace tidy::google::runtime
} // namespace clang | cpp | github | https://github.com/llvm/llvm-project | clang-tools-extra/clang-tidy/google/FloatTypesCheck.cpp |
#!/bin/bash
set -e
set -x
# Move up two levels to create the virtual
# environment outside of the source folder
cd ../../
python -m venv build_env
source build_env/bin/activate
python -m pip install numpy scipy cython
python -m pip install twine build
cd scikit-learn/scikit-learn
python -m build --sdist
# Check whether the source distribution will render correctly
twine check dist/*.tar.gz | unknown | github | https://github.com/scikit-learn/scikit-learn | build_tools/github/build_source.sh |
# React Router Development
## Releases
New 7.x releases should be created from release branches originating from the `dev` branch. If you are doing a 6.x release, please see the [v6 section](#v6-releases) below.
When you are ready to begin the release process:
- Make sure you've pulled all the changes from GitHub for both `dev` and `main` branches
- `git checkout main && git pull origin main`
- `git checkout dev && git pull origin dev`
- Check out the `dev` branch
- `git checkout dev`
- Create a new `release-next` branch
- `git checkout -b release-next`
- Technically, any `release-*` branch name will work as this is what triggers our GitHub CI workflow that will ultimately publish the release - but we just always use `release-next`
- We are using `release-v6` for [ongoing v6 releases](#v6-releases)
- Merge `main` into the `release-next` branch
- `git merge --no-ff main`
Changesets will do most of the heavy lifting for our releases. When changes are made to the codebase, an accompanying changeset file should be included to document the change. Those files will dictate how Changesets will version our packages and what shows up in the changelogs.
### Starting a new pre-release
- Ensure you are on the new `release-next` branch
- `git checkout release-next`
- Enter Changesets pre-release mode using the `pre` tag:
- `pnpm changeset pre enter pre`
- Commit the change and push the `release-next` branch to GitHub
- `git commit -a -m "Enter prerelease mode"`
- `git push --set-upstream origin release-next`
- Wait for the changesets CI workflow to finish which will open a PR pointed to `release-next` that will increment all versions and generate the changelogs
- If you need/want to make any changes to the `CHANGELOG.md` files, you can do so and commit directly to the PR branch
- This is usually not required for prereleases
- Once the changesets files are in good shape, merge the PR to `release-next`
- Once the PR is merged, the release workflow will publish the updated `X.Y.Z-pre.*` packages to npm
### Prepare the draft release notes
- At this point, you can begin crafting the release notes for the eventual stable release in the root `CHANGELOG.md` file in the repo
- Copy the commented out template for a new release and update the version numbers and links accordingly
- Copy the relevant changelog entries from all packages into the release notes and adjust accordingly
- `find packages -name 'CHANGELOG.md' -mindepth 2 -maxdepth 2 -exec code {} \;`
- Commit these changes directly to the `release-next` branch - they will not trigger a new prerelease since they do not include a changeset
### Iterating a pre-release
You may need to make changes to a pre-release prior to publishing a final stable release. To do so:
- Branch off of `release-next` and make whatever changes you need
- Create a new changeset: `pnpm changeset`
- **IMPORTANT:** This is required even if you ultimately don't want to include these changes in the logs. Remember, changelogs can be edited prior to publishing, but the Changeset version script needs to see new changesets in order to create a new version
- Push your branch to GitHub and PR it to `release-next`
- Once reviewed/approved, merge the PR to the `release-next` branch
- Wait for the release workflow to finish and the Changesets action to open its PR that will increment all versions
- Note: If more changes are needed you can just merge them to `release-next` and this PR will automatically update in place
- Review the PR, make any adjustments necessary, and merge it into the `release-next` branch
- Once the PR is merged, the release workflow will publish the updated `X.Y.Z-pre.*` packages to npm
- Make sure you copy over the new changeset contents into stable release notes in the root `CHANGELOG.md` file in the repo
### Publishing the stable release
- Exit Changesets pre-release mode in the `release-next` branch:
- `pnpm changeset pre exit`
- Commit the edited pre-release file along with any unpublished changesets, and push the `release-next` branch to GitHub
- Wait for the release workflow to finish - the Changesets action in the workflow will open a PR that will increment all versions and generate the changelogs for the stable release
- Review the updated `CHANGELOG` files in the PR and make any adjustments necessary
- `find packages -name 'CHANGELOG.md' -mindepth 2 -maxdepth 2 -exec code {} \;`
- Our automated release process should have removed prerelease entries
- Finalize the release notes
- This should already be in pretty good shape in the root `CHANGELOG.md` file in the repo because changes have been added with each prerelease
- Do a quick double check that all iterated prerelease changesets got copied over
- Merge the PR into the `release-next` branch
- Once the PR is merged, the release workflow will publish the updated packages to npm
- Once the release is published:
- Pull the latest `release-next` branch containing the PR you just merged
- Merge the `release-next` branch into `main` **using a non-fast-forward merge** and push it up to GitHub
- `git checkout main`
- `git merge --no-ff release-next`
- `git push origin main`
- _Note:_ For the `v7.0.0` stable release, there will probably be a bunch of conflicts on `docs/**/*.md` files here because we have made changes to v6 docs but in `dev` we removed a lot of those files in favor of auto-generated API docs. To resolve those conflicts, we should accept the deletion from the `release-next` branch.
- Merge the `release-next` branch into `dev` **using a non-fast-forward merge** and push it up to GitHub
- `git checkout dev`
- `git merge --no-ff release-next`
- `git push origin dev`
- Convert the `react-router@6.x.y` tag to a Release on GitHub with the name `v6.x.y` and add a deep-link to the release heading in `CHANGELOG.md`
- Delete the `release-next` branch locally and on GitHub
### Hotfix releases
Hotfix releases follow the same process as standard releases above, but the `release-next` branch should be branched off latest `main` instead of `dev`. Once the stable hotfix is published, the `release-next` branch should be merged back into both `main` and `dev` just like a normal release.
### v6 releases
6.x releases are managed in a similar process to the above but from the `v6` branch, and they do not automatically merge changes back to `dev`/`main`.
- Changes for 6.x should be PR'd to the `v6` branch with a changeset
- If these changes should also be applied to v7, cherry-pick or re-do those changes against the `dev` branch (including the changeset). These changes will make it to `main` with the next v7 release.
- Starting the release process for 6.x is the same as outlined above, with a few exceptioins:
- Branch from `v6` instead of `dev`
- Use `release-v6` instead of `release-next`
- Do **not** merge `main` into `release-v6`
- Steps:
- `git checkout v6 && git pull origin v6`
- `git checkout -b release-v6`
- `pnpm changeset pre enter pre-v6`
- The process of the PRs and iterating on prereleases remains the same
- Once the stable release is out:
- Merge `release-v6` back to `v6` with a **Normal Merge**
- **Do not** merge `release-v6` to `main`
- Manually copy the new root `CHANGELOG.md` entry to `main` and `dev`
- We don't worry about backporting individual `packages/*/CHANGELOG.md` updates to `main` for subsequent v6 releases
- The _code_ changes should already be in the `dev` branch
- This should have happened at the time the v6 change was made (except for changes such as deprecation warnings)
- Confirm that the commits in this release are all included in `dev` already, and if not you can manually bring them over by cherry-picking the commit or re-doing the work
### Experimental releases
Experimental releases and hot-fixes do not need to be branched off of `dev`. Experimental releases can be branched from anywhere as they are not intended for general use.
- Create a new branch for the release: `git checkout -b release-experimental`
- Make whatever changes you need and commit them: `git add . && git commit "experimental changes!"`
- Update version numbers and create a release tag: `pnpm run version:experimental`
- Push to GitHub: `git push origin --follow-tags`
- The CI workflow should automatically trigger from the experimental tag to publish the release to npm | unknown | github | https://github.com/remix-run/react-router | DEVELOPMENT.md |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
from oslo_log import log as logging
from oslo_utils import reflection
from neutron.callbacks import events
from neutron.callbacks import exceptions
from neutron.callbacks import resources
from neutron.i18n import _LE
LOG = logging.getLogger(__name__)
class CallbacksManager(object):
"""A callback system that allows objects to cooperate in a loose manner."""
def __init__(self):
self.clear()
def subscribe(self, callback, resource, event):
"""Subscribe callback for a resource event.
The same callback may register for more than one event.
:param callback: the callback. It must raise or return a boolean.
:param resource: the resource. It must be a valid resource.
:param event: the event. It must be a valid event.
"""
LOG.debug("Subscribe: %(callback)s %(resource)s %(event)s",
{'callback': callback, 'resource': resource, 'event': event})
if resource not in resources.VALID:
raise exceptions.Invalid(element='resource', value=resource)
if event not in events.VALID:
raise exceptions.Invalid(element='event', value=event)
callback_id = _get_id(callback)
self._callbacks[resource][event][callback_id] = callback
# We keep a copy of callbacks to speed the unsubscribe operation.
if callback_id not in self._index:
self._index[callback_id] = collections.defaultdict(set)
self._index[callback_id][resource].add(event)
def unsubscribe(self, callback, resource, event):
"""Unsubscribe callback from the registry.
:param callback: the callback.
:param resource: the resource.
:param event: the event.
"""
LOG.debug("Unsubscribe: %(callback)s %(resource)s %(event)s",
{'callback': callback, 'resource': resource, 'event': event})
callback_id = self._find(callback)
if not callback_id:
LOG.debug("Callback %s not found", callback_id)
return
if resource and event:
del self._callbacks[resource][event][callback_id]
self._index[callback_id][resource].discard(event)
if not self._index[callback_id][resource]:
del self._index[callback_id][resource]
if not self._index[callback_id]:
del self._index[callback_id]
else:
value = '%s,%s' % (resource, event)
raise exceptions.Invalid(element='resource,event', value=value)
def unsubscribe_by_resource(self, callback, resource):
"""Unsubscribe callback for any event associated to the resource.
:param callback: the callback.
:param resource: the resource.
"""
callback_id = self._find(callback)
if callback_id:
if resource in self._index[callback_id]:
for event in self._index[callback_id][resource]:
del self._callbacks[resource][event][callback_id]
del self._index[callback_id][resource]
if not self._index[callback_id]:
del self._index[callback_id]
def unsubscribe_all(self, callback):
"""Unsubscribe callback for all events and all resources.
:param callback: the callback.
"""
callback_id = self._find(callback)
if callback_id:
for resource, resource_events in self._index[callback_id].items():
for event in resource_events:
del self._callbacks[resource][event][callback_id]
del self._index[callback_id]
def notify(self, resource, event, trigger, **kwargs):
"""Notify all subscribed callback(s).
Dispatch the resource's event to the subscribed callbacks.
:param resource: the resource.
:param event: the event.
:param trigger: the trigger. A reference to the sender of the event.
"""
errors = self._notify_loop(resource, event, trigger, **kwargs)
if errors and event.startswith(events.BEFORE):
abort_event = event.replace(
events.BEFORE, events.ABORT)
self._notify_loop(resource, abort_event, trigger)
raise exceptions.CallbackFailure(errors=errors)
def clear(self):
"""Brings the manager to a clean slate."""
self._callbacks = collections.defaultdict(dict)
self._index = collections.defaultdict(dict)
for resource in resources.VALID:
for event in events.VALID:
self._callbacks[resource][event] = collections.defaultdict()
def _notify_loop(self, resource, event, trigger, **kwargs):
"""The notification loop."""
LOG.debug("Notify callbacks for %(resource)s, %(event)s",
{'resource': resource, 'event': event})
errors = []
# TODO(armax): consider using a GreenPile
for callback_id, callback in self._callbacks[resource][event].items():
try:
LOG.debug("Calling callback %s", callback_id)
callback(resource, event, trigger, **kwargs)
except Exception as e:
LOG.exception(_LE("Error during notification for "
"%(callback)s %(resource)s, %(event)s"),
{'callback': callback_id,
'resource': resource,
'event': event})
errors.append(exceptions.NotificationError(callback_id, e))
return errors
def _find(self, callback):
"""Return the callback_id if found, None otherwise."""
callback_id = _get_id(callback)
return callback_id if callback_id in self._index else None
def _get_id(callback):
"""Return a unique identifier for the callback."""
# TODO(armax): consider using something other than names
# https://www.python.org/dev/peps/pep-3155/, but this
# might be okay for now.
return reflection.get_callable_name(callback) | unknown | codeparrot/codeparrot-clean | ||
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.hadoop.security.authentication.util;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.Properties;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.junit.jupiter.api.Test;
public class TestStringSignerSecretProvider {
@Test
public void testGetSecrets() throws Exception {
String secretStr = "secret";
StringSignerSecretProvider secretProvider
= new StringSignerSecretProvider();
Properties secretProviderProps = new Properties();
secretProviderProps.setProperty(
AuthenticationFilter.SIGNATURE_SECRET, "secret");
secretProvider.init(secretProviderProps, null, -1);
byte[] secretBytes = secretStr.getBytes();
assertArrayEquals(secretBytes, secretProvider.getCurrentSecret());
byte[][] allSecrets = secretProvider.getAllSecrets();
assertEquals(1, allSecrets.length);
assertArrayEquals(secretBytes, allSecrets[0]);
}
} | java | github | https://github.com/apache/hadoop | hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestStringSignerSecretProvider.java |
# writeable [](https://crates.io/crates/writeable)
<!-- cargo-rdme start -->
This crate defines [`Writeable`], a trait representing an object that can be written to a
sink implementing `std::fmt::Write`. It is an alternative to `std::fmt::Display` with the
addition of a function indicating the number of bytes to be written.
`Writeable` improves upon `std::fmt::Display` in two ways:
1. More efficient, since the sink can pre-allocate bytes.
2. Smaller code, since the format machinery can be short-circuited.
This crate also exports [`TryWriteable`], a writeable that supports a custom error.
## Benchmarks
The benchmarks to generate the following data can be found in the `benches` directory.
| Case | `Writeable` | `Display` |
|---|---|---|
| Create string from single-string message (139 chars) | 15.642 ns | 19.251 ns |
| Create string from complex message | 35.830 ns | 89.478 ns |
| Write complex message to buffer | 57.336 ns | 64.408 ns |
## Examples
```rust
use std::fmt;
use writeable::assert_writeable_eq;
use writeable::LengthHint;
use writeable::Writeable;
struct WelcomeMessage<'s> {
pub name: &'s str,
}
impl<'s> Writeable for WelcomeMessage<'s> {
fn write_to<W: fmt::Write + ?Sized>(&self, sink: &mut W) -> fmt::Result {
sink.write_str("Hello, ")?;
sink.write_str(self.name)?;
sink.write_char('!')?;
Ok(())
}
fn writeable_length_hint(&self) -> LengthHint {
// "Hello, " + '!' + length of name
LengthHint::exact(8 + self.name.len())
}
}
let message = WelcomeMessage { name: "Alice" };
assert_writeable_eq!(&message, "Hello, Alice!");
// Types implementing `Writeable` are recommended to also implement `fmt::Display`.
// This can be simply done by redirecting to the `Writeable` implementation:
writeable::impl_display_with_writeable!(WelcomeMessage<'_>);
assert_eq!(message.to_string(), "Hello, Alice!");
```
[`ICU4X`]: ../icu/index.html
<!-- cargo-rdme end -->
## More Information
For more information on development, authorship, contributing etc. please visit [`ICU4X home page`](https://github.com/unicode-org/icu4x). | unknown | github | https://github.com/nodejs/node | deps/crates/vendor/writeable/README.md |
# -*- coding: utf-8 -*-
# $Id: da.py 7678 2013-07-03 09:57:36Z milde $
# Author: E D
# Copyright: This module has been placed in the public domain.
# New language mappings are welcome. Before doing a new translation, please
# read <http://docutils.sf.net/docs/howto/i18n.html>. Two files must be
# translated for each language: one in docutils/languages, the other in
# docutils/parsers/rst/languages.
"""
Danish-language mappings for language-dependent features of
reStructuredText.
"""
__docformat__ = 'reStructuredText'
directives = {
# language-dependent: fixed
u'giv agt': 'attention',
u'pas på': 'caution',
u'kode': 'code',
u'kode-blok': 'code',
u'kildekode': 'code',
u'fare': 'danger',
u'fejl': 'error',
u'vink': 'hint',
u'vigtigt': 'important',
u'bemærk': 'note',
u'tips': 'tip',
u'advarsel': 'warning',
u'formaning': 'admonition',
u'sidebjælke': 'sidebar',
u'emne': 'topic',
u'linje-blok': 'line-block',
u'linie-blok': 'line-block',
u'parset-literal': 'parsed-literal',
u'rubrik': 'rubric',
u'epigraf': 'epigraph',
u'fremhævninger': 'highlights',
u'pull-quote (translation required)': 'pull-quote',
u'compound (translation required)': 'compound',
u'container (translation required)': 'container',
#'questions': 'questions',
u'tabel': 'table',
u'csv-tabel': 'csv-table',
u'liste-tabel': 'list-table',
#'qa': 'questions',
#'faq': 'questions',
u'meta': 'meta',
u'math (translation required)': 'math',
#'imagemap': 'imagemap',
u'billede': 'image',
u'figur': 'figure',
u'inkludér': 'include',
u'inkluder': 'include',
u'rå': 'raw',
u'erstat': 'replace',
u'unicode': 'unicode',
u'dato': 'date',
u'klasse': 'class',
u'rolle': 'role',
u'forvalgt-rolle': 'default-role',
u'titel': 'title',
u'indhold': 'contents',
u'sektnum': 'sectnum',
u'sektions-nummerering': 'sectnum',
u'sidehovede': 'header',
u'sidefod': 'footer',
#'footnotes': 'footnotes',
#'citations': 'citations',
u'target-notes (translation required)': 'target-notes',
u'restructuredtext-test-direktiv': 'restructuredtext-test-directive'}
"""Danish name to registered (in directives/__init__.py) directive name
mapping."""
roles = {
# language-dependent: fixed
u'forkortelse': 'abbreviation',
u'fork': 'abbreviation',
u'akronym': 'acronym',
u'ac (translation required)': 'acronym',
u'kode': 'code',
u'indeks': 'index',
u'i': 'index',
u'subscript (translation required)': 'subscript',
u'sub (translation required)': 'subscript',
u'superscript (translation required)': 'superscript',
u'sup (translation required)': 'superscript',
u'titel-reference': 'title-reference',
u'titel': 'title-reference',
u't': 'title-reference',
u'pep-reference': 'pep-reference',
u'pep': 'pep-reference',
u'rfc-reference': 'rfc-reference',
u'rfc': 'rfc-reference',
u'emfase': 'emphasis',
u'kraftig': 'strong',
u'literal': 'literal',
u'math (translation required)': 'math',
u'navngivet-reference': 'named-reference',
u'anonym-reference': 'anonymous-reference',
u'fodnote-reference': 'footnote-reference',
u'citation-reference (translation required)': 'citation-reference',
u'substitutions-reference': 'substitution-reference',
u'target (translation required)': 'target',
u'uri-reference': 'uri-reference',
u'uri': 'uri-reference',
u'url': 'uri-reference',
u'rå': 'raw',}
"""Mapping of Danish role names to canonical role names for interpreted text.
""" | unknown | codeparrot/codeparrot-clean | ||
'''
This module defines multiple utility functions for filtering, creation, slicing,
etc. of neo.core objects.
'''
import neo
import copy
import warnings
import numpy as np
import quantities as pq
def get_events(container, **properties):
"""
This function returns a list of Event objects, corresponding to given
key-value pairs in the attributes or annotations of the Event.
Parameter:
---------
container: Block or Segment
The Block or Segment object to extract data from.
Keyword Arguments:
------------------
The Event properties to filter for.
Each property name is matched to an attribute or an
(array-)annotation of the Event. The value of property corresponds
to a valid entry or a list of valid entries of the attribute or
(array-)annotation.
If the value is a list of entries of the same
length as the number of events in the Event object, the list entries
are matched to the events in the Event object. The resulting Event
object contains only those events where the values match up.
Otherwise, the value is compared to the attribute or (array-)annotation
of the Event object as such, and depending on the comparison, either the
complete Event object is returned or not.
If no keyword arguments is passed, all Event Objects will
be returned in a list.
Returns:
--------
events: list
A list of Event objects matching the given criteria.
Example:
--------
>>> import neo
>>> from neo.utils import get_events
>>> import quantities as pq
>>> event = neo.Event(times=[0.5, 10.0, 25.2] * pq.s)
>>> event.annotate(event_type='trial start')
>>> event.array_annotate(trial_id=[1, 2, 3])
>>> seg = neo.Segment()
>>> seg.events = [event]
# Will return a list with the complete event object
>>> get_events(seg, event_type='trial start')
# Will return an empty list
>>> get_events(seg, event_type='trial stop')
# Will return a list with an Event object, but only with trial 2
>>> get_events(seg, trial_id=2)
# Will return a list with an Event object, but only with trials 1 and 2
>>> get_events(seg, trial_id=[1, 2])
"""
if isinstance(container, neo.Segment):
return _get_from_list(container.events, prop=properties)
elif isinstance(container, neo.Block):
event_lst = []
for seg in container.segments:
event_lst += _get_from_list(seg.events, prop=properties)
return event_lst
else:
raise TypeError(
'Container needs to be of type Block or Segment, not %s '
'in order to extract Events.' % (type(container)))
def get_epochs(container, **properties):
"""
This function returns a list of Epoch objects, corresponding to given
key-value pairs in the attributes or annotations of the Epoch.
Parameters:
-----------
container: Block or Segment
The Block or Segment object to extract data from.
Keyword Arguments:
------------------
The Epoch properties to filter for.
Each property name is matched to an attribute or an
(array-)annotation of the Epoch. The value of property corresponds
to a valid entry or a list of valid entries of the attribute or
(array-)annotation.
If the value is a list of entries of the same
length as the number of epochs in the Epoch object, the list entries
are matched to the epochs in the Epoch object. The resulting Epoch
object contains only those epochs where the values match up.
Otherwise, the value is compared to the attribute or (array-)annotation
of the Epoch object as such, and depending on the comparison, either the
complete Epoch object is returned or not.
If no keyword arguments is passed, all Epoch Objects will
be returned in a list.
Returns:
--------
epochs: list
A list of Epoch objects matching the given criteria.
Example:
--------
>>> import neo
>>> from neo.utils import get_epochs
>>> import quantities as pq
>>> epoch = neo.Epoch(times=[0.5, 10.0, 25.2] * pq.s,
... durations=[100, 100, 100] * pq.ms,
... epoch_type='complete trial')
>>> epoch.array_annotate(trial_id=[1, 2, 3])
>>> seg = neo.Segment()
>>> seg.epochs = [epoch]
# Will return a list with the complete event object
>>> get_epochs(seg, epoch_type='complete trial')
# Will return an empty list
>>> get_epochs(seg, epoch_type='error trial')
# Will return a list with an Event object, but only with trial 2
>>> get_epochs(seg, trial_id=2)
# Will return a list with an Event object, but only with trials 1 and 2
>>> get_epochs(seg, trial_id=[1, 2])
"""
if isinstance(container, neo.Segment):
return _get_from_list(container.epochs, prop=properties)
elif isinstance(container, neo.Block):
epoch_list = []
for seg in container.segments:
epoch_list += _get_from_list(seg.epochs, prop=properties)
return epoch_list
else:
raise TypeError(
'Container needs to be of type Block or Segment, not %s '
'in order to extract Epochs.' % (type(container)))
def _get_from_list(input_list, prop=None):
"""
Internal function
"""
output_list = []
# empty or no dictionary
if not prop or bool([b for b in prop.values() if b == []]):
output_list += [e for e in input_list]
# dictionary is given
else:
for ep in input_list:
if isinstance(ep, neo.Epoch) or isinstance(ep, neo.Event):
sparse_ep = ep.copy()
elif isinstance(ep, neo.io.proxyobjects.EpochProxy) \
or isinstance(ep, neo.io.proxyobjects.EventProxy):
# need to load the Event/Epoch in order to be able to filter by array annotations
sparse_ep = ep.load()
for k in prop.keys():
sparse_ep = _filter_event_epoch(sparse_ep, k, prop[k])
# if there is nothing left, it cannot filtered
if sparse_ep is None:
break
if sparse_ep is not None:
output_list.append(sparse_ep)
return output_list
def _filter_event_epoch(obj, annotation_key, annotation_value):
"""
Internal function.
This function returns a copy of a Event or Epoch object, which only
contains attributes or annotations corresponding to requested key-value
pairs.
Parameters:
-----------
obj : Event
The Event or Epoch object to modify.
annotation_key : string, int or float
The name of the annotation used to filter.
annotation_value : string, int, float, list or np.ndarray
The accepted value or list of accepted values of the attributes or
annotations specified by annotation_key. For each entry in obj the
respective annotation defined by annotation_key is compared to the
annotation value. The entry of obj is kept if the attribute or
annotation is equal or contained in annotation_value.
Returns:
--------
obj : Event or Epoch
The Event or Epoch object with every event or epoch removed that does
not match the filter criteria (i.e., where none of the entries in
annotation_value match the attribute or annotation annotation_key.
"""
valid_ids = _get_valid_ids(obj, annotation_key, annotation_value)
if len(valid_ids) == 0:
return None
return _event_epoch_slice_by_valid_ids(obj, valid_ids)
def _event_epoch_slice_by_valid_ids(obj, valid_ids):
"""
Internal function
"""
if type(obj) is neo.Event or type(obj) is neo.Epoch:
sparse_obj = copy.deepcopy(obj[valid_ids])
else:
raise TypeError('Can only slice Event and Epoch objects by valid IDs.')
return sparse_obj
def _get_valid_ids(obj, annotation_key, annotation_value):
"""
Internal function
"""
valid_mask = np.zeros(obj.shape)
if annotation_key in obj.annotations and obj.annotations[annotation_key] == annotation_value:
valid_mask = np.ones(obj.shape)
elif annotation_key == 'labels':
# wrap annotation value to be list
if not type(annotation_value) in [list, np.ndarray]:
annotation_value = [annotation_value]
valid_mask = np.in1d(obj.labels, annotation_value)
elif annotation_key in obj.array_annotations:
# wrap annotation value to be list
if not type(annotation_value) in [list, np.ndarray]:
annotation_value = [annotation_value]
valid_mask = np.in1d(obj.array_annotations[annotation_key], annotation_value)
elif hasattr(obj, annotation_key) and getattr(obj, annotation_key) == annotation_value:
valid_mask = np.ones(obj.shape)
valid_ids = np.where(valid_mask)[0]
return valid_ids
def add_epoch(
segment, event1, event2=None, pre=0 * pq.s, post=0 * pq.s,
attach_result=True, **kwargs):
"""
Create Epochs around a single Event, or between pairs of events. Starting
and end time of the Epoch can be modified using pre and post as offsets
before the and after the event(s). Additional keywords will be directly
forwarded to the Epoch intialization.
Parameters:
-----------
segment : Segment
The segment in which the final Epoch object is added.
event1 : Event
The Event objects containing the start events of the epochs. If no
event2 is specified, these event1 also specifies the stop events, i.e.,
the Epoch is cut around event1 times.
event2: Event
The Event objects containing the stop events of the epochs. If no
event2 is specified, event1 specifies the stop events, i.e., the Epoch
is cut around event1 times. The number of events in event2 must match
that of event1.
pre, post: Quantity (time)
Time offsets to modify the start (pre) and end (post) of the resulting
Epoch. Example: pre=-10*ms and post=+25*ms will cut from 10 ms before
event1 times to 25 ms after event2 times
attach_result: bool
If True, the resulting Epoch object is added to segment.
Keyword Arguments:
------------------
Passed to the Epoch object.
Returns:
--------
epoch: Epoch
An Epoch object with the calculated epochs (one per entry in event1).
See also:
---------
Event.to_epoch()
"""
if event2 is None:
event2 = event1
if not isinstance(segment, neo.Segment):
raise TypeError(
'Segment has to be of type Segment, not %s' % type(segment))
# load the full event if a proxy object has been given as an argument
if isinstance(event1, neo.io.proxyobjects.EventProxy):
event1 = event1.load()
if isinstance(event2, neo.io.proxyobjects.EventProxy):
event2 = event2.load()
for event in [event1, event2]:
if not isinstance(event, neo.Event):
raise TypeError(
'Events have to be of type Event, not %s' % type(event))
if len(event1) != len(event2):
raise ValueError(
'event1 and event2 have to have the same number of entries in '
'order to create epochs between pairs of entries. Match your '
'events before generating epochs. Current event lengths '
'are %i and %i' % (len(event1), len(event2)))
times = event1.times + pre
durations = event2.times + post - times
if any(durations < 0):
raise ValueError(
'Can not create epoch with negative duration. '
'Requested durations %s.' % durations)
elif any(durations == 0):
raise ValueError('Can not create epoch with zero duration.')
if 'name' not in kwargs:
kwargs['name'] = 'epoch'
if 'labels' not in kwargs:
kwargs['labels'] = [u'{}_{}'.format(kwargs['name'], i)
for i in range(len(times))]
ep = neo.Epoch(times=times, durations=durations, **kwargs)
ep.annotate(**event1.annotations)
ep.array_annotate(**event1.array_annotations)
if attach_result:
segment.epochs.append(ep)
segment.create_relationship()
return ep
def match_events(event1, event2):
"""
Finds pairs of Event entries in event1 and event2 with the minimum delay,
such that the entry of event1 directly precedes the entry of event2.
Returns filtered two events of identical length, which contain matched
entries.
Parameters:
-----------
event1, event2: Event
The two Event objects to match up.
Returns:
--------
event1, event2: Event
Event objects with identical number of events, containing only those
events that could be matched against each other. A warning is issued if
not all events in event1 or event2 could be matched.
"""
# load the full event if a proxy object has been given as an argument
if isinstance(event1, neo.io.proxyobjects.EventProxy):
event1 = event1.load()
if isinstance(event2, neo.io.proxyobjects.EventProxy):
event2 = event2.load()
id1, id2 = 0, 0
match_ev1, match_ev2 = [], []
while id1 < len(event1) and id2 < len(event2):
time1 = event1.times[id1]
time2 = event2.times[id2]
# wrong order of events
if time1 >= time2:
id2 += 1
# shorter epoch possible by later event1 entry
elif id1 + 1 < len(event1) and event1.times[id1 + 1] < time2:
# there is no event in 2 until the next event in 1
id1 += 1
# found a match
else:
match_ev1.append(id1)
match_ev2.append(id2)
id1 += 1
id2 += 1
if id1 < len(event1):
warnings.warn(
'Could not match all events to generate epochs. Missed '
'%s event entries in event1 list' % (len(event1) - id1))
if id2 < len(event2):
warnings.warn(
'Could not match all events to generate epochs. Missed '
'%s event entries in event2 list' % (len(event2) - id2))
event1_matched = _event_epoch_slice_by_valid_ids(
obj=event1, valid_ids=match_ev1)
event2_matched = _event_epoch_slice_by_valid_ids(
obj=event2, valid_ids=match_ev2)
return event1_matched, event2_matched
def cut_block_by_epochs(block, properties=None, reset_time=False):
"""
This function cuts Segments in a Block according to multiple Neo
Epoch objects.
The function alters the Block by adding one Segment per Epoch entry
fulfilling a set of conditions on the Epoch attributes and annotations. The
original segments are removed from the block.
A dictionary contains restrictions on which Epochs are considered for
the cutting procedure. To this end, it is possible to
specify accepted (valid) values of specific annotations on the source
Epochs.
The resulting cut segments may either retain their original time stamps, or
be shifted to a common starting time.
Parameters
----------
block: Block
Contains the Segments to cut according to the Epoch criteria provided
properties: dictionary
A dictionary that contains the Epoch keys and values to filter for.
Each key of the dictionary is matched to an attribute or an
annotation or an array_annotation of the Event.
The value of each dictionary entry corresponds to a valid entry or a
list of valid entries of the attribute or (array) annotation.
If the value belonging to the key is a list of entries of the same
length as the number of epochs in the Epoch object, the list entries
are matched to the epochs in the Epoch object. The resulting Epoch
object contains only those epochs where the values match up.
Otherwise, the value is compared to the attributes or annotation of the
Epoch object as such, and depending on the comparison, either the
complete Epoch object is returned or not.
If None or an empty dictionary is passed, all Epoch Objects will
be considered
reset_time: bool
If True the times stamps of all sliced objects are set to fall
in the range from 0 to the duration of the epoch duration.
If False, original time stamps are retained.
Default is False.
Returns:
--------
None
"""
if not isinstance(block, neo.Block):
raise TypeError(
'block needs to be a Block, not %s' % type(block))
new_block = neo.Block()
for seg in block.segments:
epochs = _get_from_list(seg.epochs, prop=properties)
if len(epochs) > 1:
warnings.warn(
'Segment %s contains multiple epochs with '
'requested properties (%s). Sub-segments can '
'have overlapping times' % (seg.name, properties))
elif len(epochs) == 0:
warnings.warn(
'No epoch is matching the requested epoch properties %s. '
'No cutting of segment %s performed.' % (properties, seg.name))
for epoch in epochs:
new_segments = cut_segment_by_epoch(
seg, epoch=epoch, reset_time=reset_time)
new_block.segments.extend(new_segments)
new_block.create_many_to_one_relationship(force=True)
return new_block
def cut_segment_by_epoch(seg, epoch, reset_time=False):
"""
Cuts a Segment according to an Epoch object
The function returns a list of Segments, where each segment corresponds
to an epoch in the Epoch object and contains the data of the original
Segment cut to that particular Epoch.
The resulting segments may either retain their original time stamps,
or can be shifted to a common time axis.
Parameters
----------
seg: Segment
The Segment containing the original uncut data.
epoch: Epoch
For each epoch in this input, one segment is generated according to
the epoch time and duration.
reset_time: bool
If True the times stamps of all sliced objects are set to fall
in the range from 0 to the duration of the epoch duration.
If False, original time stamps are retained.
Default is False.
Returns:
--------
segments: list of Segments
Per epoch in the input, a Segment with AnalogSignal and/or
SpikeTrain Objects will be generated and returned. Each Segment will
receive the annotations of the corresponding epoch in the input.
"""
if not isinstance(seg, neo.Segment):
raise TypeError(
'Seg needs to be of type Segment, not %s' % type(seg))
if not isinstance(epoch, neo.Epoch):
raise TypeError(
'Epoch needs to be of type Epoch, not %s' % type(epoch))
segments = []
for ep_id in range(len(epoch)):
subseg = seg.time_slice(epoch.times[ep_id],
epoch.times[ep_id] + epoch.durations[ep_id],
reset_time=reset_time)
subseg.annotate(**copy.copy(epoch.annotations))
# Add array-annotations of Epoch
for key, val in epoch.array_annotations.items():
if len(val):
subseg.annotations[key] = copy.copy(val[ep_id])
segments.append(subseg)
return segments
def is_block_rawio_compatible(block, return_problems=False):
"""
The neo.rawio layer have some restriction compared to neo.io layer:
* consistent channels across segments
* no IrregularlySampledSignal
* consistent sampling rate across segments
This function tests if a neo.Block that could be written in a nix file could be read
back with the NIXRawIO.
Parameters
----------
block: Block
A block
return_problems: bool (False by default)
Controls whether a list of str that describe problems is also provided as return value
Returns:
--------
is_rawio_compatible: bool
Compatible or not.
problems: list of str
Optional, depending on value of `return_problems`.
A list that describe problems for rawio compatibility.
"""
assert len(block.segments) > 0, "This block doesn't have segments"
problems = []
# check that all Segments have the same number of object.
n_sig = len(block.segments[0].analogsignals)
n_st = len(block.segments[0].spiketrains)
n_ev = len(block.segments[0].events)
n_ep = len(block.segments[0].epochs)
sig_count_consistent = True
for seg in block.segments:
if len(seg.analogsignals) != n_sig:
problems.append('Number of AnalogSignals is not consistent across segments')
sig_count_consistent = False
if len(seg.spiketrains) != n_st:
problems.append('Number of SpikeTrains is not consistent across segments')
if len(seg.events) != n_ev:
problems.append('Number of Events is not consistent across segments')
if len(seg.epochs) != n_ep:
problems.append('Number of Epochs is not consistent across segments')
# check for AnalogSigal that sampling_rate/units/number of channel
# is consistent across segments.
if sig_count_consistent:
seg0 = block.segments[0]
for i in range(n_sig):
for seg in block.segments:
if seg.analogsignals[i].sampling_rate != seg0.analogsignals[i].sampling_rate:
problems.append('AnalogSignals have inconsistent sampling rate across segments')
if seg.analogsignals[i].shape[1] != seg0.analogsignals[i].shape[1]:
problems.append('AnalogSignals have inconsistent channel count across segments')
if seg.analogsignals[i].units != seg0.analogsignals[i].units:
problems.append('AnalogSignals have inconsistent units across segments')
# check no IrregularlySampledSignal
for seg in block.segments:
if len(seg.irregularlysampledsignals) > 0:
problems.append('IrregularlySampledSignals are not raw compatible')
# returns
is_rawio_compatible = (len(problems) == 0)
if return_problems:
return is_rawio_compatible, problems
else:
return is_rawio_compatible | unknown | codeparrot/codeparrot-clean | ||
// Copyright 2016 The etcd Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//go:build linux
package netutil
import (
"bytes"
"encoding/binary"
"fmt"
"net"
"slices"
"syscall"
"go.etcd.io/etcd/pkg/v3/cpuutil"
)
var (
errNoDefaultRoute = fmt.Errorf("could not find default route")
errNoDefaultHost = fmt.Errorf("could not find default host")
errNoDefaultInterface = fmt.Errorf("could not find default interface")
)
// GetDefaultHost obtains the first IP address of machine from the routing table and returns the IP address as string.
// An IPv4 address is preferred to an IPv6 address for backward compatibility.
func GetDefaultHost() (string, error) {
rmsgs, rerr := getDefaultRoutes()
if rerr != nil {
return "", rerr
}
// prioritize IPv4
if rmsg, ok := rmsgs[syscall.AF_INET]; ok {
if host, err := chooseHost(syscall.AF_INET, rmsg); host != "" || err != nil {
return host, err
}
delete(rmsgs, syscall.AF_INET)
}
// sort so choice is deterministic
var families []uint8
for family := range rmsgs {
families = append(families, family)
}
slices.Sort(families)
for _, family := range families {
if host, err := chooseHost(family, rmsgs[family]); host != "" || err != nil {
return host, err
}
}
return "", errNoDefaultHost
}
func chooseHost(family uint8, rmsg *syscall.NetlinkMessage) (string, error) {
host, oif, err := parsePREFSRC(rmsg)
if host != "" || err != nil {
return host, err
}
// prefsrc not detected, fall back to getting address from iface
ifmsg, ierr := getIfaceAddr(oif, family)
if ierr != nil {
return "", ierr
}
attrs, aerr := syscall.ParseNetlinkRouteAttr(ifmsg)
if aerr != nil {
return "", aerr
}
for _, attr := range attrs {
// search for RTA_DST because ipv6 doesn't have RTA_SRC
if attr.Attr.Type == syscall.RTA_DST {
return net.IP(attr.Value).String(), nil
}
}
return "", nil
}
func getDefaultRoutes() (map[uint8]*syscall.NetlinkMessage, error) {
dat, err := syscall.NetlinkRIB(syscall.RTM_GETROUTE, syscall.AF_UNSPEC)
if err != nil {
return nil, err
}
msgs, msgErr := syscall.ParseNetlinkMessage(dat)
if msgErr != nil {
return nil, msgErr
}
routes := make(map[uint8]*syscall.NetlinkMessage)
rtmsg := syscall.RtMsg{}
for _, m := range msgs {
if m.Header.Type != syscall.RTM_NEWROUTE {
continue
}
buf := bytes.NewBuffer(m.Data[:syscall.SizeofRtMsg])
if rerr := binary.Read(buf, cpuutil.ByteOrder(), &rtmsg); rerr != nil {
continue
}
if rtmsg.Dst_len == 0 && rtmsg.Table == syscall.RT_TABLE_MAIN {
// zero-length Dst_len implies default route
msg := m
routes[rtmsg.Family] = &msg
}
}
if len(routes) > 0 {
return routes, nil
}
return nil, errNoDefaultRoute
}
// Used to get an address of interface.
func getIfaceAddr(idx uint32, family uint8) (*syscall.NetlinkMessage, error) {
dat, err := syscall.NetlinkRIB(syscall.RTM_GETADDR, int(family))
if err != nil {
return nil, err
}
msgs, msgErr := syscall.ParseNetlinkMessage(dat)
if msgErr != nil {
return nil, msgErr
}
ifaddrmsg := syscall.IfAddrmsg{}
for _, m := range msgs {
if m.Header.Type != syscall.RTM_NEWADDR {
continue
}
buf := bytes.NewBuffer(m.Data[:syscall.SizeofIfAddrmsg])
if rerr := binary.Read(buf, cpuutil.ByteOrder(), &ifaddrmsg); rerr != nil {
continue
}
if ifaddrmsg.Index == idx {
return &m, nil
}
}
return nil, fmt.Errorf("could not find address for interface index %v", idx)
}
// Used to get a name of interface.
func getIfaceLink(idx uint32) (*syscall.NetlinkMessage, error) {
dat, err := syscall.NetlinkRIB(syscall.RTM_GETLINK, syscall.AF_UNSPEC)
if err != nil {
return nil, err
}
msgs, msgErr := syscall.ParseNetlinkMessage(dat)
if msgErr != nil {
return nil, msgErr
}
ifinfomsg := syscall.IfInfomsg{}
for _, m := range msgs {
if m.Header.Type != syscall.RTM_NEWLINK {
continue
}
buf := bytes.NewBuffer(m.Data[:syscall.SizeofIfInfomsg])
if rerr := binary.Read(buf, cpuutil.ByteOrder(), &ifinfomsg); rerr != nil {
continue
}
if ifinfomsg.Index == int32(idx) {
return &m, nil
}
}
return nil, fmt.Errorf("could not find link for interface index %v", idx)
}
// GetDefaultInterfaces gets names of interfaces and returns a map[interface]families.
func GetDefaultInterfaces() (map[string]uint8, error) {
interfaces := make(map[string]uint8)
rmsgs, rerr := getDefaultRoutes()
if rerr != nil {
return interfaces, rerr
}
for family, rmsg := range rmsgs {
_, oif, err := parsePREFSRC(rmsg)
if err != nil {
return interfaces, err
}
ifmsg, ierr := getIfaceLink(oif)
if ierr != nil {
return interfaces, ierr
}
attrs, aerr := syscall.ParseNetlinkRouteAttr(ifmsg)
if aerr != nil {
return interfaces, aerr
}
for _, attr := range attrs {
if attr.Attr.Type == syscall.IFLA_IFNAME {
// key is an interface name
// possible values: 2 - AF_INET, 10 - AF_INET6, 12 - dualstack
interfaces[string(attr.Value[:len(attr.Value)-1])] += family
}
}
}
if len(interfaces) > 0 {
return interfaces, nil
}
return interfaces, errNoDefaultInterface
}
// parsePREFSRC returns preferred source address and output interface index (RTA_OIF).
func parsePREFSRC(m *syscall.NetlinkMessage) (host string, oif uint32, err error) {
var attrs []syscall.NetlinkRouteAttr
attrs, err = syscall.ParseNetlinkRouteAttr(m)
if err != nil {
return "", 0, err
}
for _, attr := range attrs {
if attr.Attr.Type == syscall.RTA_PREFSRC {
host = net.IP(attr.Value).String()
}
if attr.Attr.Type == syscall.RTA_OIF {
oif = cpuutil.ByteOrder().Uint32(attr.Value)
}
if host != "" && oif != uint32(0) {
break
}
}
if oif == 0 {
err = errNoDefaultRoute
}
return host, oif, err
} | go | github | https://github.com/etcd-io/etcd | pkg/netutil/routes_linux.go |
require('./sourcemap-register.js');/******/ (() => { // webpackBootstrap
/******/ var __webpack_modules__ = ({
/***/ 722:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
var __create = Object.create;
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getProtoOf = Object.getPrototypeOf;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
// If the importer is in node compatibility mode or this is not an ESM
// file that has been converted to a CommonJS file using a Babel-
// compatible transform (i.e. "__esModule" has not been set), then set
// "default" to the CommonJS "module.exports" for node compatibility.
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
mod
));
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
// platforms/nodejs.ts
var nodejs_exports = {};
__export(nodejs_exports, {
Redis: () => Redis2,
errors: () => error_exports
});
module.exports = __toCommonJS(nodejs_exports);
// pkg/error.ts
var error_exports = {};
__export(error_exports, {
UpstashError: () => UpstashError,
UrlError: () => UrlError
});
var UpstashError = class extends Error {
constructor(message) {
super(message);
this.name = "UpstashError";
}
};
var UrlError = class extends Error {
constructor(url) {
super(
`Upstash Redis client was passed an invalid URL. You should pass a URL starting with https. Received: "${url}". `
);
this.name = "UrlError";
}
};
// pkg/http.ts
var HttpClient = class {
baseUrl;
headers;
options;
readYourWrites;
upstashSyncToken = "";
hasCredentials;
retry;
constructor(config) {
this.options = {
backend: config.options?.backend,
agent: config.agent,
responseEncoding: config.responseEncoding ?? "base64",
// default to base64
cache: config.cache,
signal: config.signal,
keepAlive: config.keepAlive ?? true
};
this.upstashSyncToken = "";
this.readYourWrites = config.readYourWrites ?? true;
this.baseUrl = (config.baseUrl || "").replace(/\/$/, "");
const urlRegex = /^https?:\/\/[^\s#$./?].\S*$/;
if (this.baseUrl && !urlRegex.test(this.baseUrl)) {
throw new UrlError(this.baseUrl);
}
this.headers = {
"Content-Type": "application/json",
...config.headers
};
this.hasCredentials = Boolean(this.baseUrl && this.headers.authorization.split(" ")[1]);
if (this.options.responseEncoding === "base64") {
this.headers["Upstash-Encoding"] = "base64";
}
this.retry = typeof config.retry === "boolean" && !config.retry ? {
attempts: 1,
backoff: () => 0
} : {
attempts: config.retry?.retries ?? 5,
backoff: config.retry?.backoff ?? ((retryCount) => Math.exp(retryCount) * 50)
};
}
mergeTelemetry(telemetry) {
this.headers = merge(this.headers, "Upstash-Telemetry-Runtime", telemetry.runtime);
this.headers = merge(this.headers, "Upstash-Telemetry-Platform", telemetry.platform);
this.headers = merge(this.headers, "Upstash-Telemetry-Sdk", telemetry.sdk);
}
async request(req) {
const requestOptions = {
//@ts-expect-error this should throw due to bun regression
cache: this.options.cache,
method: "POST",
headers: this.headers,
body: JSON.stringify(req.body),
keepalive: this.options.keepAlive,
agent: this.options.agent,
signal: this.options.signal,
/**
* Fastly specific
*/
backend: this.options.backend
};
if (!this.hasCredentials) {
console.warn(
"[Upstash Redis] Redis client was initialized without url or token. Failed to execute command."
);
}
if (this.readYourWrites) {
const newHeader = this.upstashSyncToken;
this.headers["upstash-sync-token"] = newHeader;
}
let res = null;
let error = null;
for (let i = 0; i <= this.retry.attempts; i++) {
try {
res = await fetch([this.baseUrl, ...req.path ?? []].join("/"), requestOptions);
break;
} catch (error_) {
if (this.options.signal?.aborted) {
const myBlob = new Blob([
JSON.stringify({ result: this.options.signal.reason ?? "Aborted" })
]);
const myOptions = {
status: 200,
statusText: this.options.signal.reason ?? "Aborted"
};
res = new Response(myBlob, myOptions);
break;
}
error = error_;
await new Promise((r) => setTimeout(r, this.retry.backoff(i)));
}
}
if (!res) {
throw error ?? new Error("Exhausted all retries");
}
const body = await res.json();
if (!res.ok) {
throw new UpstashError(`${body.error}, command was: ${JSON.stringify(req.body)}`);
}
if (this.readYourWrites) {
const headers = res.headers;
this.upstashSyncToken = headers.get("upstash-sync-token") ?? "";
}
if (this.readYourWrites) {
const headers = res.headers;
this.upstashSyncToken = headers.get("upstash-sync-token") ?? "";
}
if (this.options.responseEncoding === "base64") {
if (Array.isArray(body)) {
return body.map(({ result: result2, error: error2 }) => ({
result: decode(result2),
error: error2
}));
}
const result = decode(body.result);
return { result, error: body.error };
}
return body;
}
};
function base64decode(b64) {
let dec = "";
try {
const binString = atob(b64);
const size = binString.length;
const bytes = new Uint8Array(size);
for (let i = 0; i < size; i++) {
bytes[i] = binString.charCodeAt(i);
}
dec = new TextDecoder().decode(bytes);
} catch {
dec = b64;
}
return dec;
}
function decode(raw) {
let result = void 0;
switch (typeof raw) {
case "undefined": {
return raw;
}
case "number": {
result = raw;
break;
}
case "object": {
if (Array.isArray(raw)) {
result = raw.map(
(v) => typeof v === "string" ? base64decode(v) : Array.isArray(v) ? v.map((element) => decode(element)) : v
);
} else {
result = null;
}
break;
}
case "string": {
result = raw === "OK" ? "OK" : base64decode(raw);
break;
}
default: {
break;
}
}
return result;
}
function merge(obj, key, value) {
if (!value) {
return obj;
}
obj[key] = obj[key] ? [obj[key], value].join(",") : value;
return obj;
}
// pkg/auto-pipeline.ts
function createAutoPipelineProxy(_redis, json) {
const redis = _redis;
if (!redis.autoPipelineExecutor) {
redis.autoPipelineExecutor = new AutoPipelineExecutor(redis);
}
return new Proxy(redis, {
get: (redis2, command) => {
if (command === "pipelineCounter") {
return redis2.autoPipelineExecutor.pipelineCounter;
}
if (command === "json") {
return createAutoPipelineProxy(redis2, true);
}
const commandInRedisButNotPipeline = command in redis2 && !(command in redis2.autoPipelineExecutor.pipeline);
if (commandInRedisButNotPipeline) {
return redis2[command];
}
const isFunction = json ? typeof redis2.autoPipelineExecutor.pipeline.json[command] === "function" : typeof redis2.autoPipelineExecutor.pipeline[command] === "function";
if (isFunction) {
return (...args) => {
return redis2.autoPipelineExecutor.withAutoPipeline((pipeline) => {
if (json) {
pipeline.json[command](
...args
);
} else {
pipeline[command](...args);
}
});
};
}
return redis2.autoPipelineExecutor.pipeline[command];
}
});
}
var AutoPipelineExecutor = class {
pipelinePromises = /* @__PURE__ */ new WeakMap();
activePipeline = null;
indexInCurrentPipeline = 0;
redis;
pipeline;
// only to make sure that proxy can work
pipelineCounter = 0;
// to keep track of how many times a pipeline was executed
constructor(redis) {
this.redis = redis;
this.pipeline = redis.pipeline();
}
async withAutoPipeline(executeWithPipeline) {
const pipeline = this.activePipeline ?? this.redis.pipeline();
if (!this.activePipeline) {
this.activePipeline = pipeline;
this.indexInCurrentPipeline = 0;
}
const index = this.indexInCurrentPipeline++;
executeWithPipeline(pipeline);
const pipelineDone = this.deferExecution().then(() => {
if (!this.pipelinePromises.has(pipeline)) {
const pipelinePromise = pipeline.exec({ keepErrors: true });
this.pipelineCounter += 1;
this.pipelinePromises.set(pipeline, pipelinePromise);
this.activePipeline = null;
}
return this.pipelinePromises.get(pipeline);
});
const results = await pipelineDone;
const commandResult = results[index];
if (commandResult.error) {
throw new UpstashError(`Command failed: ${commandResult.error}`);
}
return commandResult.result;
}
async deferExecution() {
await Promise.resolve();
await Promise.resolve();
}
};
// pkg/util.ts
function parseRecursive(obj) {
const parsed = Array.isArray(obj) ? obj.map((o) => {
try {
return parseRecursive(o);
} catch {
return o;
}
}) : JSON.parse(obj);
if (typeof parsed === "number" && parsed.toString() !== obj) {
return obj;
}
return parsed;
}
function parseResponse(result) {
try {
return parseRecursive(result);
} catch {
return result;
}
}
function deserializeScanResponse(result) {
return [result[0], ...parseResponse(result.slice(1))];
}
// pkg/commands/command.ts
var defaultSerializer = (c) => {
switch (typeof c) {
case "string":
case "number":
case "boolean": {
return c;
}
default: {
return JSON.stringify(c);
}
}
};
var Command = class {
command;
serialize;
deserialize;
/**
* Create a new command instance.
*
* You can define a custom `deserialize` function. By default we try to deserialize as json.
*/
constructor(command, opts) {
this.serialize = defaultSerializer;
this.deserialize = opts?.automaticDeserialization === void 0 || opts.automaticDeserialization ? opts?.deserialize ?? parseResponse : (x) => x;
this.command = command.map((c) => this.serialize(c));
if (opts?.latencyLogging) {
const originalExec = this.exec.bind(this);
this.exec = async (client) => {
const start = performance.now();
const result = await originalExec(client);
const end = performance.now();
const loggerResult = (end - start).toFixed(2);
console.log(
`Latency for \x1B[38;2;19;185;39m${this.command[0].toString().toUpperCase()}\x1B[0m: \x1B[38;2;0;255;255m${loggerResult} ms\x1B[0m`
);
return result;
};
}
}
/**
* Execute the command using a client.
*/
async exec(client) {
const { result, error } = await client.request({
body: this.command,
upstashSyncToken: client.upstashSyncToken
});
if (error) {
throw new UpstashError(error);
}
if (result === void 0) {
throw new TypeError("Request did not return a result");
}
return this.deserialize(result);
}
};
// pkg/commands/append.ts
var AppendCommand = class extends Command {
constructor(cmd, opts) {
super(["append", ...cmd], opts);
}
};
// pkg/commands/bitcount.ts
var BitCountCommand = class extends Command {
constructor([key, start, end], opts) {
const command = ["bitcount", key];
if (typeof start === "number") {
command.push(start);
}
if (typeof end === "number") {
command.push(end);
}
super(command, opts);
}
};
// pkg/commands/bitfield.ts
var BitFieldCommand = class {
constructor(args, client, opts, execOperation = (command) => command.exec(this.client)) {
this.client = client;
this.opts = opts;
this.execOperation = execOperation;
this.command = ["bitfield", ...args];
}
command;
chain(...args) {
this.command.push(...args);
return this;
}
get(...args) {
return this.chain("get", ...args);
}
set(...args) {
return this.chain("set", ...args);
}
incrby(...args) {
return this.chain("incrby", ...args);
}
overflow(overflow) {
return this.chain("overflow", overflow);
}
exec() {
const command = new Command(this.command, this.opts);
return this.execOperation(command);
}
};
// pkg/commands/bitop.ts
var BitOpCommand = class extends Command {
constructor(cmd, opts) {
super(["bitop", ...cmd], opts);
}
};
// pkg/commands/bitpos.ts
var BitPosCommand = class extends Command {
constructor(cmd, opts) {
super(["bitpos", ...cmd], opts);
}
};
// pkg/commands/copy.ts
var CopyCommand = class extends Command {
constructor([key, destinationKey, opts], commandOptions) {
super(["COPY", key, destinationKey, ...opts?.replace ? ["REPLACE"] : []], {
...commandOptions,
deserialize(result) {
if (result > 0) {
return "COPIED";
}
return "NOT_COPIED";
}
});
}
};
// pkg/commands/dbsize.ts
var DBSizeCommand = class extends Command {
constructor(opts) {
super(["dbsize"], opts);
}
};
// pkg/commands/decr.ts
var DecrCommand = class extends Command {
constructor(cmd, opts) {
super(["decr", ...cmd], opts);
}
};
// pkg/commands/decrby.ts
var DecrByCommand = class extends Command {
constructor(cmd, opts) {
super(["decrby", ...cmd], opts);
}
};
// pkg/commands/del.ts
var DelCommand = class extends Command {
constructor(cmd, opts) {
super(["del", ...cmd], opts);
}
};
// pkg/commands/echo.ts
var EchoCommand = class extends Command {
constructor(cmd, opts) {
super(["echo", ...cmd], opts);
}
};
// pkg/commands/eval.ts
var EvalCommand = class extends Command {
constructor([script, keys, args], opts) {
super(["eval", script, keys.length, ...keys, ...args ?? []], opts);
}
};
// pkg/commands/evalsha.ts
var EvalshaCommand = class extends Command {
constructor([sha, keys, args], opts) {
super(["evalsha", sha, keys.length, ...keys, ...args ?? []], opts);
}
};
// pkg/commands/exists.ts
var ExistsCommand = class extends Command {
constructor(cmd, opts) {
super(["exists", ...cmd], opts);
}
};
// pkg/commands/expire.ts
var ExpireCommand = class extends Command {
constructor(cmd, opts) {
super(["expire", ...cmd.filter(Boolean)], opts);
}
};
// pkg/commands/expireat.ts
var ExpireAtCommand = class extends Command {
constructor(cmd, opts) {
super(["expireat", ...cmd], opts);
}
};
// pkg/commands/flushall.ts
var FlushAllCommand = class extends Command {
constructor(args, opts) {
const command = ["flushall"];
if (args && args.length > 0 && args[0].async) {
command.push("async");
}
super(command, opts);
}
};
// pkg/commands/flushdb.ts
var FlushDBCommand = class extends Command {
constructor([opts], cmdOpts) {
const command = ["flushdb"];
if (opts?.async) {
command.push("async");
}
super(command, cmdOpts);
}
};
// pkg/commands/geo_add.ts
var GeoAddCommand = class extends Command {
constructor([key, arg1, ...arg2], opts) {
const command = ["geoadd", key];
if ("nx" in arg1 && arg1.nx) {
command.push("nx");
} else if ("xx" in arg1 && arg1.xx) {
command.push("xx");
}
if ("ch" in arg1 && arg1.ch) {
command.push("ch");
}
if ("latitude" in arg1 && arg1.latitude) {
command.push(arg1.longitude, arg1.latitude, arg1.member);
}
command.push(
...arg2.flatMap(({ latitude, longitude, member }) => [longitude, latitude, member])
);
super(command, opts);
}
};
// pkg/commands/geo_dist.ts
var GeoDistCommand = class extends Command {
constructor([key, member1, member2, unit = "M"], opts) {
super(["GEODIST", key, member1, member2, unit], opts);
}
};
// pkg/commands/geo_hash.ts
var GeoHashCommand = class extends Command {
constructor(cmd, opts) {
const [key] = cmd;
const members = Array.isArray(cmd[1]) ? cmd[1] : cmd.slice(1);
super(["GEOHASH", key, ...members], opts);
}
};
// pkg/commands/geo_pos.ts
var GeoPosCommand = class extends Command {
constructor(cmd, opts) {
const [key] = cmd;
const members = Array.isArray(cmd[1]) ? cmd[1] : cmd.slice(1);
super(["GEOPOS", key, ...members], {
deserialize: (result) => transform(result),
...opts
});
}
};
function transform(result) {
const final = [];
for (const pos of result) {
if (!pos?.[0] || !pos?.[1]) {
continue;
}
final.push({ lng: Number.parseFloat(pos[0]), lat: Number.parseFloat(pos[1]) });
}
return final;
}
// pkg/commands/geo_search.ts
var GeoSearchCommand = class extends Command {
constructor([key, centerPoint, shape, order, opts], commandOptions) {
const command = ["GEOSEARCH", key];
if (centerPoint.type === "FROMMEMBER" || centerPoint.type === "frommember") {
command.push(centerPoint.type, centerPoint.member);
}
if (centerPoint.type === "FROMLONLAT" || centerPoint.type === "fromlonlat") {
command.push(centerPoint.type, centerPoint.coordinate.lon, centerPoint.coordinate.lat);
}
if (shape.type === "BYRADIUS" || shape.type === "byradius") {
command.push(shape.type, shape.radius, shape.radiusType);
}
if (shape.type === "BYBOX" || shape.type === "bybox") {
command.push(shape.type, shape.rect.width, shape.rect.height, shape.rectType);
}
command.push(order);
if (opts?.count) {
command.push("COUNT", opts.count.limit, ...opts.count.any ? ["ANY"] : []);
}
const transform2 = (result) => {
if (!opts?.withCoord && !opts?.withDist && !opts?.withHash) {
return result.map((member) => {
try {
return { member: JSON.parse(member) };
} catch {
return { member };
}
});
}
return result.map((members) => {
let counter = 1;
const obj = {};
try {
obj.member = JSON.parse(members[0]);
} catch {
obj.member = members[0];
}
if (opts.withDist) {
obj.dist = Number.parseFloat(members[counter++]);
}
if (opts.withHash) {
obj.hash = members[counter++].toString();
}
if (opts.withCoord) {
obj.coord = {
long: Number.parseFloat(members[counter][0]),
lat: Number.parseFloat(members[counter][1])
};
}
return obj;
});
};
super(
[
...command,
...opts?.withCoord ? ["WITHCOORD"] : [],
...opts?.withDist ? ["WITHDIST"] : [],
...opts?.withHash ? ["WITHHASH"] : []
],
{
deserialize: transform2,
...commandOptions
}
);
}
};
// pkg/commands/geo_search_store.ts
var GeoSearchStoreCommand = class extends Command {
constructor([destination, key, centerPoint, shape, order, opts], commandOptions) {
const command = ["GEOSEARCHSTORE", destination, key];
if (centerPoint.type === "FROMMEMBER" || centerPoint.type === "frommember") {
command.push(centerPoint.type, centerPoint.member);
}
if (centerPoint.type === "FROMLONLAT" || centerPoint.type === "fromlonlat") {
command.push(centerPoint.type, centerPoint.coordinate.lon, centerPoint.coordinate.lat);
}
if (shape.type === "BYRADIUS" || shape.type === "byradius") {
command.push(shape.type, shape.radius, shape.radiusType);
}
if (shape.type === "BYBOX" || shape.type === "bybox") {
command.push(shape.type, shape.rect.width, shape.rect.height, shape.rectType);
}
command.push(order);
if (opts?.count) {
command.push("COUNT", opts.count.limit, ...opts.count.any ? ["ANY"] : []);
}
super([...command, ...opts?.storeDist ? ["STOREDIST"] : []], commandOptions);
}
};
// pkg/commands/get.ts
var GetCommand = class extends Command {
constructor(cmd, opts) {
super(["get", ...cmd], opts);
}
};
// pkg/commands/getbit.ts
var GetBitCommand = class extends Command {
constructor(cmd, opts) {
super(["getbit", ...cmd], opts);
}
};
// pkg/commands/getdel.ts
var GetDelCommand = class extends Command {
constructor(cmd, opts) {
super(["getdel", ...cmd], opts);
}
};
// pkg/commands/getrange.ts
var GetRangeCommand = class extends Command {
constructor(cmd, opts) {
super(["getrange", ...cmd], opts);
}
};
// pkg/commands/getset.ts
var GetSetCommand = class extends Command {
constructor(cmd, opts) {
super(["getset", ...cmd], opts);
}
};
// pkg/commands/hdel.ts
var HDelCommand = class extends Command {
constructor(cmd, opts) {
super(["hdel", ...cmd], opts);
}
};
// pkg/commands/hexists.ts
var HExistsCommand = class extends Command {
constructor(cmd, opts) {
super(["hexists", ...cmd], opts);
}
};
// pkg/commands/hget.ts
var HGetCommand = class extends Command {
constructor(cmd, opts) {
super(["hget", ...cmd], opts);
}
};
// pkg/commands/hgetall.ts
function deserialize(result) {
if (result.length === 0) {
return null;
}
const obj = {};
while (result.length >= 2) {
const key = result.shift();
const value = result.shift();
try {
const valueIsNumberAndNotSafeInteger = !Number.isNaN(Number(value)) && !Number.isSafeInteger(Number(value));
obj[key] = valueIsNumberAndNotSafeInteger ? value : JSON.parse(value);
} catch {
obj[key] = value;
}
}
return obj;
}
var HGetAllCommand = class extends Command {
constructor(cmd, opts) {
super(["hgetall", ...cmd], {
deserialize: (result) => deserialize(result),
...opts
});
}
};
// pkg/commands/hincrby.ts
var HIncrByCommand = class extends Command {
constructor(cmd, opts) {
super(["hincrby", ...cmd], opts);
}
};
// pkg/commands/hincrbyfloat.ts
var HIncrByFloatCommand = class extends Command {
constructor(cmd, opts) {
super(["hincrbyfloat", ...cmd], opts);
}
};
// pkg/commands/hkeys.ts
var HKeysCommand = class extends Command {
constructor([key], opts) {
super(["hkeys", key], opts);
}
};
// pkg/commands/hlen.ts
var HLenCommand = class extends Command {
constructor(cmd, opts) {
super(["hlen", ...cmd], opts);
}
};
// pkg/commands/hmget.ts
function deserialize2(fields, result) {
if (result.every((field) => field === null)) {
return null;
}
const obj = {};
for (const [i, field] of fields.entries()) {
try {
obj[field] = JSON.parse(result[i]);
} catch {
obj[field] = result[i];
}
}
return obj;
}
var HMGetCommand = class extends Command {
constructor([key, ...fields], opts) {
super(["hmget", key, ...fields], {
deserialize: (result) => deserialize2(fields, result),
...opts
});
}
};
// pkg/commands/hmset.ts
var HMSetCommand = class extends Command {
constructor([key, kv], opts) {
super(["hmset", key, ...Object.entries(kv).flatMap(([field, value]) => [field, value])], opts);
}
};
// pkg/commands/hrandfield.ts
function deserialize3(result) {
if (result.length === 0) {
return null;
}
const obj = {};
while (result.length >= 2) {
const key = result.shift();
const value = result.shift();
try {
obj[key] = JSON.parse(value);
} catch {
obj[key] = value;
}
}
return obj;
}
var HRandFieldCommand = class extends Command {
constructor(cmd, opts) {
const command = ["hrandfield", cmd[0]];
if (typeof cmd[1] === "number") {
command.push(cmd[1]);
}
if (cmd[2]) {
command.push("WITHVALUES");
}
super(command, {
// @ts-expect-error to silence compiler
deserialize: cmd[2] ? (result) => deserialize3(result) : opts?.deserialize,
...opts
});
}
};
// pkg/commands/hscan.ts
var HScanCommand = class extends Command {
constructor([key, cursor, cmdOpts], opts) {
const command = ["hscan", key, cursor];
if (cmdOpts?.match) {
command.push("match", cmdOpts.match);
}
if (typeof cmdOpts?.count === "number") {
command.push("count", cmdOpts.count);
}
super(command, {
deserialize: deserializeScanResponse,
...opts
});
}
};
// pkg/commands/hset.ts
var HSetCommand = class extends Command {
constructor([key, kv], opts) {
super(["hset", key, ...Object.entries(kv).flatMap(([field, value]) => [field, value])], opts);
}
};
// pkg/commands/hsetnx.ts
var HSetNXCommand = class extends Command {
constructor(cmd, opts) {
super(["hsetnx", ...cmd], opts);
}
};
// pkg/commands/hstrlen.ts
var HStrLenCommand = class extends Command {
constructor(cmd, opts) {
super(["hstrlen", ...cmd], opts);
}
};
// pkg/commands/hvals.ts
var HValsCommand = class extends Command {
constructor(cmd, opts) {
super(["hvals", ...cmd], opts);
}
};
// pkg/commands/incr.ts
var IncrCommand = class extends Command {
constructor(cmd, opts) {
super(["incr", ...cmd], opts);
}
};
// pkg/commands/incrby.ts
var IncrByCommand = class extends Command {
constructor(cmd, opts) {
super(["incrby", ...cmd], opts);
}
};
// pkg/commands/incrbyfloat.ts
var IncrByFloatCommand = class extends Command {
constructor(cmd, opts) {
super(["incrbyfloat", ...cmd], opts);
}
};
// pkg/commands/json_arrappend.ts
var JsonArrAppendCommand = class extends Command {
constructor(cmd, opts) {
super(["JSON.ARRAPPEND", ...cmd], opts);
}
};
// pkg/commands/json_arrindex.ts
var JsonArrIndexCommand = class extends Command {
constructor(cmd, opts) {
super(["JSON.ARRINDEX", ...cmd], opts);
}
};
// pkg/commands/json_arrinsert.ts
var JsonArrInsertCommand = class extends Command {
constructor(cmd, opts) {
super(["JSON.ARRINSERT", ...cmd], opts);
}
};
// pkg/commands/json_arrlen.ts
var JsonArrLenCommand = class extends Command {
constructor(cmd, opts) {
super(["JSON.ARRLEN", cmd[0], cmd[1] ?? "$"], opts);
}
};
// pkg/commands/json_arrpop.ts
var JsonArrPopCommand = class extends Command {
constructor(cmd, opts) {
super(["JSON.ARRPOP", ...cmd], opts);
}
};
// pkg/commands/json_arrtrim.ts
var JsonArrTrimCommand = class extends Command {
constructor(cmd, opts) {
const path = cmd[1] ?? "$";
const start = cmd[2] ?? 0;
const stop = cmd[3] ?? 0;
super(["JSON.ARRTRIM", cmd[0], path, start, stop], opts);
}
};
// pkg/commands/json_clear.ts
var JsonClearCommand = class extends Command {
constructor(cmd, opts) {
super(["JSON.CLEAR", ...cmd], opts);
}
};
// pkg/commands/json_del.ts
var JsonDelCommand = class extends Command {
constructor(cmd, opts) {
super(["JSON.DEL", ...cmd], opts);
}
};
// pkg/commands/json_forget.ts
var JsonForgetCommand = class extends Command {
constructor(cmd, opts) {
super(["JSON.FORGET", ...cmd], opts);
}
};
// pkg/commands/json_get.ts
var JsonGetCommand = class extends Command {
constructor(cmd, opts) {
const command = ["JSON.GET"];
if (typeof cmd[1] === "string") {
command.push(...cmd);
} else {
command.push(cmd[0]);
if (cmd[1]) {
if (cmd[1].indent) {
command.push("INDENT", cmd[1].indent);
}
if (cmd[1].newline) {
command.push("NEWLINE", cmd[1].newline);
}
if (cmd[1].space) {
command.push("SPACE", cmd[1].space);
}
}
command.push(...cmd.slice(2));
}
super(command, opts);
}
};
// pkg/commands/json_mget.ts
var JsonMGetCommand = class extends Command {
constructor(cmd, opts) {
super(["JSON.MGET", ...cmd[0], cmd[1]], opts);
}
};
// pkg/commands/json_mset.ts
var JsonMSetCommand = class extends Command {
constructor(cmd, opts) {
const command = ["JSON.MSET"];
for (const c of cmd) {
command.push(c.key, c.path, c.value);
}
super(command, opts);
}
};
// pkg/commands/json_numincrby.ts
var JsonNumIncrByCommand = class extends Command {
constructor(cmd, opts) {
super(["JSON.NUMINCRBY", ...cmd], opts);
}
};
// pkg/commands/json_nummultby.ts
var JsonNumMultByCommand = class extends Command {
constructor(cmd, opts) {
super(["JSON.NUMMULTBY", ...cmd], opts);
}
};
// pkg/commands/json_objkeys.ts
var JsonObjKeysCommand = class extends Command {
constructor(cmd, opts) {
super(["JSON.OBJKEYS", ...cmd], opts);
}
};
// pkg/commands/json_objlen.ts
var JsonObjLenCommand = class extends Command {
constructor(cmd, opts) {
super(["JSON.OBJLEN", ...cmd], opts);
}
};
// pkg/commands/json_resp.ts
var JsonRespCommand = class extends Command {
constructor(cmd, opts) {
super(["JSON.RESP", ...cmd], opts);
}
};
// pkg/commands/json_set.ts
var JsonSetCommand = class extends Command {
constructor(cmd, opts) {
const command = ["JSON.SET", cmd[0], cmd[1], cmd[2]];
if (cmd[3]) {
if (cmd[3].nx) {
command.push("NX");
} else if (cmd[3].xx) {
command.push("XX");
}
}
super(command, opts);
}
};
// pkg/commands/json_strappend.ts
var JsonStrAppendCommand = class extends Command {
constructor(cmd, opts) {
super(["JSON.STRAPPEND", ...cmd], opts);
}
};
// pkg/commands/json_strlen.ts
var JsonStrLenCommand = class extends Command {
constructor(cmd, opts) {
super(["JSON.STRLEN", ...cmd], opts);
}
};
// pkg/commands/json_toggle.ts
var JsonToggleCommand = class extends Command {
constructor(cmd, opts) {
super(["JSON.TOGGLE", ...cmd], opts);
}
};
// pkg/commands/json_type.ts
var JsonTypeCommand = class extends Command {
constructor(cmd, opts) {
super(["JSON.TYPE", ...cmd], opts);
}
};
// pkg/commands/keys.ts
var KeysCommand = class extends Command {
constructor(cmd, opts) {
super(["keys", ...cmd], opts);
}
};
// pkg/commands/lindex.ts
var LIndexCommand = class extends Command {
constructor(cmd, opts) {
super(["lindex", ...cmd], opts);
}
};
// pkg/commands/linsert.ts
var LInsertCommand = class extends Command {
constructor(cmd, opts) {
super(["linsert", ...cmd], opts);
}
};
// pkg/commands/llen.ts
var LLenCommand = class extends Command {
constructor(cmd, opts) {
super(["llen", ...cmd], opts);
}
};
// pkg/commands/lmove.ts
var LMoveCommand = class extends Command {
constructor(cmd, opts) {
super(["lmove", ...cmd], opts);
}
};
// pkg/commands/lmpop.ts
var LmPopCommand = class extends Command {
constructor(cmd, opts) {
const [numkeys, keys, direction, count] = cmd;
super(["LMPOP", numkeys, ...keys, direction, ...count ? ["COUNT", count] : []], opts);
}
};
// pkg/commands/lpop.ts
var LPopCommand = class extends Command {
constructor(cmd, opts) {
super(["lpop", ...cmd], opts);
}
};
// pkg/commands/lpos.ts
var LPosCommand = class extends Command {
constructor(cmd, opts) {
const args = ["lpos", cmd[0], cmd[1]];
if (typeof cmd[2]?.rank === "number") {
args.push("rank", cmd[2].rank);
}
if (typeof cmd[2]?.count === "number") {
args.push("count", cmd[2].count);
}
if (typeof cmd[2]?.maxLen === "number") {
args.push("maxLen", cmd[2].maxLen);
}
super(args, opts);
}
};
// pkg/commands/lpush.ts
var LPushCommand = class extends Command {
constructor(cmd, opts) {
super(["lpush", ...cmd], opts);
}
};
// pkg/commands/lpushx.ts
var LPushXCommand = class extends Command {
constructor(cmd, opts) {
super(["lpushx", ...cmd], opts);
}
};
// pkg/commands/lrange.ts
var LRangeCommand = class extends Command {
constructor(cmd, opts) {
super(["lrange", ...cmd], opts);
}
};
// pkg/commands/lrem.ts
var LRemCommand = class extends Command {
constructor(cmd, opts) {
super(["lrem", ...cmd], opts);
}
};
// pkg/commands/lset.ts
var LSetCommand = class extends Command {
constructor(cmd, opts) {
super(["lset", ...cmd], opts);
}
};
// pkg/commands/ltrim.ts
var LTrimCommand = class extends Command {
constructor(cmd, opts) {
super(["ltrim", ...cmd], opts);
}
};
// pkg/commands/mget.ts
var MGetCommand = class extends Command {
constructor(cmd, opts) {
const keys = Array.isArray(cmd[0]) ? cmd[0] : cmd;
super(["mget", ...keys], opts);
}
};
// pkg/commands/mset.ts
var MSetCommand = class extends Command {
constructor([kv], opts) {
super(["mset", ...Object.entries(kv).flatMap(([key, value]) => [key, value])], opts);
}
};
// pkg/commands/msetnx.ts
var MSetNXCommand = class extends Command {
constructor([kv], opts) {
super(["msetnx", ...Object.entries(kv).flat()], opts);
}
};
// pkg/commands/persist.ts
var PersistCommand = class extends Command {
constructor(cmd, opts) {
super(["persist", ...cmd], opts);
}
};
// pkg/commands/pexpire.ts
var PExpireCommand = class extends Command {
constructor(cmd, opts) {
super(["pexpire", ...cmd], opts);
}
};
// pkg/commands/pexpireat.ts
var PExpireAtCommand = class extends Command {
constructor(cmd, opts) {
super(["pexpireat", ...cmd], opts);
}
};
// pkg/commands/pfadd.ts
var PfAddCommand = class extends Command {
constructor(cmd, opts) {
super(["pfadd", ...cmd], opts);
}
};
// pkg/commands/pfcount.ts
var PfCountCommand = class extends Command {
constructor(cmd, opts) {
super(["pfcount", ...cmd], opts);
}
};
// pkg/commands/pfmerge.ts
var PfMergeCommand = class extends Command {
constructor(cmd, opts) {
super(["pfmerge", ...cmd], opts);
}
};
// pkg/commands/ping.ts
var PingCommand = class extends Command {
constructor(cmd, opts) {
const command = ["ping"];
if (cmd?.[0] !== void 0) {
command.push(cmd[0]);
}
super(command, opts);
}
};
// pkg/commands/psetex.ts
var PSetEXCommand = class extends Command {
constructor(cmd, opts) {
super(["psetex", ...cmd], opts);
}
};
// pkg/commands/pttl.ts
var PTtlCommand = class extends Command {
constructor(cmd, opts) {
super(["pttl", ...cmd], opts);
}
};
// pkg/commands/publish.ts
var PublishCommand = class extends Command {
constructor(cmd, opts) {
super(["publish", ...cmd], opts);
}
};
// pkg/commands/randomkey.ts
var RandomKeyCommand = class extends Command {
constructor(opts) {
super(["randomkey"], opts);
}
};
// pkg/commands/rename.ts
var RenameCommand = class extends Command {
constructor(cmd, opts) {
super(["rename", ...cmd], opts);
}
};
// pkg/commands/renamenx.ts
var RenameNXCommand = class extends Command {
constructor(cmd, opts) {
super(["renamenx", ...cmd], opts);
}
};
// pkg/commands/rpop.ts
var RPopCommand = class extends Command {
constructor(cmd, opts) {
super(["rpop", ...cmd], opts);
}
};
// pkg/commands/rpush.ts
var RPushCommand = class extends Command {
constructor(cmd, opts) {
super(["rpush", ...cmd], opts);
}
};
// pkg/commands/rpushx.ts
var RPushXCommand = class extends Command {
constructor(cmd, opts) {
super(["rpushx", ...cmd], opts);
}
};
// pkg/commands/sadd.ts
var SAddCommand = class extends Command {
constructor(cmd, opts) {
super(["sadd", ...cmd], opts);
}
};
// pkg/commands/scan.ts
var ScanCommand = class extends Command {
constructor([cursor, opts], cmdOpts) {
const command = ["scan", cursor];
if (opts?.match) {
command.push("match", opts.match);
}
if (typeof opts?.count === "number") {
command.push("count", opts.count);
}
if (opts?.type && opts.type.length > 0) {
command.push("type", opts.type);
}
super(command, {
deserialize: deserializeScanResponse,
...cmdOpts
});
}
};
// pkg/commands/scard.ts
var SCardCommand = class extends Command {
constructor(cmd, opts) {
super(["scard", ...cmd], opts);
}
};
// pkg/commands/script_exists.ts
var ScriptExistsCommand = class extends Command {
constructor(hashes, opts) {
super(["script", "exists", ...hashes], {
deserialize: (result) => result,
...opts
});
}
};
// pkg/commands/script_flush.ts
var ScriptFlushCommand = class extends Command {
constructor([opts], cmdOpts) {
const cmd = ["script", "flush"];
if (opts?.sync) {
cmd.push("sync");
} else if (opts?.async) {
cmd.push("async");
}
super(cmd, cmdOpts);
}
};
// pkg/commands/script_load.ts
var ScriptLoadCommand = class extends Command {
constructor(args, opts) {
super(["script", "load", ...args], opts);
}
};
// pkg/commands/sdiff.ts
var SDiffCommand = class extends Command {
constructor(cmd, opts) {
super(["sdiff", ...cmd], opts);
}
};
// pkg/commands/sdiffstore.ts
var SDiffStoreCommand = class extends Command {
constructor(cmd, opts) {
super(["sdiffstore", ...cmd], opts);
}
};
// pkg/commands/set.ts
var SetCommand = class extends Command {
constructor([key, value, opts], cmdOpts) {
const command = ["set", key, value];
if (opts) {
if ("nx" in opts && opts.nx) {
command.push("nx");
} else if ("xx" in opts && opts.xx) {
command.push("xx");
}
if ("get" in opts && opts.get) {
command.push("get");
}
if ("ex" in opts && typeof opts.ex === "number") {
command.push("ex", opts.ex);
} else if ("px" in opts && typeof opts.px === "number") {
command.push("px", opts.px);
} else if ("exat" in opts && typeof opts.exat === "number") {
command.push("exat", opts.exat);
} else if ("pxat" in opts && typeof opts.pxat === "number") {
command.push("pxat", opts.pxat);
} else if ("keepTtl" in opts && opts.keepTtl) {
command.push("keepTtl");
}
}
super(command, cmdOpts);
}
};
// pkg/commands/setbit.ts
var SetBitCommand = class extends Command {
constructor(cmd, opts) {
super(["setbit", ...cmd], opts);
}
};
// pkg/commands/setex.ts
var SetExCommand = class extends Command {
constructor(cmd, opts) {
super(["setex", ...cmd], opts);
}
};
// pkg/commands/setnx.ts
var SetNxCommand = class extends Command {
constructor(cmd, opts) {
super(["setnx", ...cmd], opts);
}
};
// pkg/commands/setrange.ts
var SetRangeCommand = class extends Command {
constructor(cmd, opts) {
super(["setrange", ...cmd], opts);
}
};
// pkg/commands/sinter.ts
var SInterCommand = class extends Command {
constructor(cmd, opts) {
super(["sinter", ...cmd], opts);
}
};
// pkg/commands/sinterstore.ts
var SInterStoreCommand = class extends Command {
constructor(cmd, opts) {
super(["sinterstore", ...cmd], opts);
}
};
// pkg/commands/sismember.ts
var SIsMemberCommand = class extends Command {
constructor(cmd, opts) {
super(["sismember", ...cmd], opts);
}
};
// pkg/commands/smembers.ts
var SMembersCommand = class extends Command {
constructor(cmd, opts) {
super(["smembers", ...cmd], opts);
}
};
// pkg/commands/smismember.ts
var SMIsMemberCommand = class extends Command {
constructor(cmd, opts) {
super(["smismember", cmd[0], ...cmd[1]], opts);
}
};
// pkg/commands/smove.ts
var SMoveCommand = class extends Command {
constructor(cmd, opts) {
super(["smove", ...cmd], opts);
}
};
// pkg/commands/spop.ts
var SPopCommand = class extends Command {
constructor([key, count], opts) {
const command = ["spop", key];
if (typeof count === "number") {
command.push(count);
}
super(command, opts);
}
};
// pkg/commands/srandmember.ts
var SRandMemberCommand = class extends Command {
constructor([key, count], opts) {
const command = ["srandmember", key];
if (typeof count === "number") {
command.push(count);
}
super(command, opts);
}
};
// pkg/commands/srem.ts
var SRemCommand = class extends Command {
constructor(cmd, opts) {
super(["srem", ...cmd], opts);
}
};
// pkg/commands/sscan.ts
var SScanCommand = class extends Command {
constructor([key, cursor, opts], cmdOpts) {
const command = ["sscan", key, cursor];
if (opts?.match) {
command.push("match", opts.match);
}
if (typeof opts?.count === "number") {
command.push("count", opts.count);
}
super(command, {
deserialize: deserializeScanResponse,
...cmdOpts
});
}
};
// pkg/commands/strlen.ts
var StrLenCommand = class extends Command {
constructor(cmd, opts) {
super(["strlen", ...cmd], opts);
}
};
// pkg/commands/sunion.ts
var SUnionCommand = class extends Command {
constructor(cmd, opts) {
super(["sunion", ...cmd], opts);
}
};
// pkg/commands/sunionstore.ts
var SUnionStoreCommand = class extends Command {
constructor(cmd, opts) {
super(["sunionstore", ...cmd], opts);
}
};
// pkg/commands/time.ts
var TimeCommand = class extends Command {
constructor(opts) {
super(["time"], opts);
}
};
// pkg/commands/touch.ts
var TouchCommand = class extends Command {
constructor(cmd, opts) {
super(["touch", ...cmd], opts);
}
};
// pkg/commands/ttl.ts
var TtlCommand = class extends Command {
constructor(cmd, opts) {
super(["ttl", ...cmd], opts);
}
};
// pkg/commands/type.ts
var TypeCommand = class extends Command {
constructor(cmd, opts) {
super(["type", ...cmd], opts);
}
};
// pkg/commands/unlink.ts
var UnlinkCommand = class extends Command {
constructor(cmd, opts) {
super(["unlink", ...cmd], opts);
}
};
// pkg/commands/xack.ts
var XAckCommand = class extends Command {
constructor([key, group, id], opts) {
const ids = Array.isArray(id) ? [...id] : [id];
super(["XACK", key, group, ...ids], opts);
}
};
// pkg/commands/xadd.ts
var XAddCommand = class extends Command {
constructor([key, id, entries, opts], commandOptions) {
const command = ["XADD", key];
if (opts) {
if (opts.nomkStream) {
command.push("NOMKSTREAM");
}
if (opts.trim) {
command.push(opts.trim.type, opts.trim.comparison, opts.trim.threshold);
if (opts.trim.limit !== void 0) {
command.push("LIMIT", opts.trim.limit);
}
}
}
command.push(id);
for (const [k, v] of Object.entries(entries)) {
command.push(k, v);
}
super(command, commandOptions);
}
};
// pkg/commands/xautoclaim.ts
var XAutoClaim = class extends Command {
constructor([key, group, consumer, minIdleTime, start, options], opts) {
const commands = [];
if (options?.count) {
commands.push("COUNT", options.count);
}
if (options?.justId) {
commands.push("JUSTID");
}
super(["XAUTOCLAIM", key, group, consumer, minIdleTime, start, ...commands], opts);
}
};
// pkg/commands/xclaim.ts
var XClaimCommand = class extends Command {
constructor([key, group, consumer, minIdleTime, id, options], opts) {
const ids = Array.isArray(id) ? [...id] : [id];
const commands = [];
if (options?.idleMS) {
commands.push("IDLE", options.idleMS);
}
if (options?.idleMS) {
commands.push("TIME", options.timeMS);
}
if (options?.retryCount) {
commands.push("RETRYCOUNT", options.retryCount);
}
if (options?.force) {
commands.push("FORCE");
}
if (options?.justId) {
commands.push("JUSTID");
}
if (options?.lastId) {
commands.push("LASTID", options.lastId);
}
super(["XCLAIM", key, group, consumer, minIdleTime, ...ids, ...commands], opts);
}
};
// pkg/commands/xdel.ts
var XDelCommand = class extends Command {
constructor([key, ids], opts) {
const cmds = Array.isArray(ids) ? [...ids] : [ids];
super(["XDEL", key, ...cmds], opts);
}
};
// pkg/commands/xgroup.ts
var XGroupCommand = class extends Command {
constructor([key, opts], commandOptions) {
const command = ["XGROUP"];
switch (opts.type) {
case "CREATE": {
command.push("CREATE", key, opts.group, opts.id);
if (opts.options) {
if (opts.options.MKSTREAM) {
command.push("MKSTREAM");
}
if (opts.options.ENTRIESREAD !== void 0) {
command.push("ENTRIESREAD", opts.options.ENTRIESREAD.toString());
}
}
break;
}
case "CREATECONSUMER": {
command.push("CREATECONSUMER", key, opts.group, opts.consumer);
break;
}
case "DELCONSUMER": {
command.push("DELCONSUMER", key, opts.group, opts.consumer);
break;
}
case "DESTROY": {
command.push("DESTROY", key, opts.group);
break;
}
case "SETID": {
command.push("SETID", key, opts.group, opts.id);
if (opts.options?.ENTRIESREAD !== void 0) {
command.push("ENTRIESREAD", opts.options.ENTRIESREAD.toString());
}
break;
}
default: {
throw new Error("Invalid XGROUP");
}
}
super(command, commandOptions);
}
};
// pkg/commands/xinfo.ts
var XInfoCommand = class extends Command {
constructor([key, options], opts) {
const cmds = [];
if (options.type === "CONSUMERS") {
cmds.push("CONSUMERS", key, options.group);
} else {
cmds.push("GROUPS", key);
}
super(["XINFO", ...cmds], opts);
}
};
// pkg/commands/xlen.ts
var XLenCommand = class extends Command {
constructor(cmd, opts) {
super(["XLEN", ...cmd], opts);
}
};
// pkg/commands/xpending.ts
var XPendingCommand = class extends Command {
constructor([key, group, start, end, count, options], opts) {
const consumers = options?.consumer === void 0 ? [] : Array.isArray(options.consumer) ? [...options.consumer] : [options.consumer];
super(
[
"XPENDING",
key,
group,
...options?.idleTime ? ["IDLE", options.idleTime] : [],
start,
end,
count,
...consumers
],
opts
);
}
};
// pkg/commands/xrange.ts
function deserialize4(result) {
const obj = {};
for (const e of result) {
while (e.length >= 2) {
const streamId = e.shift();
const entries = e.shift();
if (!(streamId in obj)) {
obj[streamId] = {};
}
while (entries.length >= 2) {
const field = entries.shift();
const value = entries.shift();
try {
obj[streamId][field] = JSON.parse(value);
} catch {
obj[streamId][field] = value;
}
}
}
}
return obj;
}
var XRangeCommand = class extends Command {
constructor([key, start, end, count], opts) {
const command = ["XRANGE", key, start, end];
if (typeof count === "number") {
command.push("COUNT", count);
}
super(command, {
deserialize: (result) => deserialize4(result),
...opts
});
}
};
// pkg/commands/xread.ts
var UNBALANCED_XREAD_ERR = "ERR Unbalanced XREAD list of streams: for each stream key an ID or '$' must be specified";
var XReadCommand = class extends Command {
constructor([key, id, options], opts) {
if (Array.isArray(key) && Array.isArray(id) && key.length !== id.length) {
throw new Error(UNBALANCED_XREAD_ERR);
}
const commands = [];
if (typeof options?.count === "number") {
commands.push("COUNT", options.count);
}
if (typeof options?.blockMS === "number") {
commands.push("BLOCK", options.blockMS);
}
commands.push(
"STREAMS",
...Array.isArray(key) ? [...key] : [key],
...Array.isArray(id) ? [...id] : [id]
);
super(["XREAD", ...commands], opts);
}
};
// pkg/commands/xreadgroup.ts
var UNBALANCED_XREADGROUP_ERR = "ERR Unbalanced XREADGROUP list of streams: for each stream key an ID or '$' must be specified";
var XReadGroupCommand = class extends Command {
constructor([group, consumer, key, id, options], opts) {
if (Array.isArray(key) && Array.isArray(id) && key.length !== id.length) {
throw new Error(UNBALANCED_XREADGROUP_ERR);
}
const commands = [];
if (typeof options?.count === "number") {
commands.push("COUNT", options.count);
}
if (typeof options?.blockMS === "number") {
commands.push("BLOCK", options.blockMS);
}
if (typeof options?.NOACK === "boolean" && options.NOACK) {
commands.push("NOACK");
}
commands.push(
"STREAMS",
...Array.isArray(key) ? [...key] : [key],
...Array.isArray(id) ? [...id] : [id]
);
super(["XREADGROUP", "GROUP", group, consumer, ...commands], opts);
}
};
// pkg/commands/xrevrange.ts
var XRevRangeCommand = class extends Command {
constructor([key, end, start, count], opts) {
const command = ["XREVRANGE", key, end, start];
if (typeof count === "number") {
command.push("COUNT", count);
}
super(command, {
deserialize: (result) => deserialize5(result),
...opts
});
}
};
function deserialize5(result) {
const obj = {};
for (const e of result) {
while (e.length >= 2) {
const streamId = e.shift();
const entries = e.shift();
if (!(streamId in obj)) {
obj[streamId] = {};
}
while (entries.length >= 2) {
const field = entries.shift();
const value = entries.shift();
try {
obj[streamId][field] = JSON.parse(value);
} catch {
obj[streamId][field] = value;
}
}
}
}
return obj;
}
// pkg/commands/xtrim.ts
var XTrimCommand = class extends Command {
constructor([key, options], opts) {
const { limit, strategy, threshold, exactness = "~" } = options;
super(["XTRIM", key, strategy, exactness, threshold, ...limit ? ["LIMIT", limit] : []], opts);
}
};
// pkg/commands/zadd.ts
var ZAddCommand = class extends Command {
constructor([key, arg1, ...arg2], opts) {
const command = ["zadd", key];
if ("nx" in arg1 && arg1.nx) {
command.push("nx");
} else if ("xx" in arg1 && arg1.xx) {
command.push("xx");
}
if ("ch" in arg1 && arg1.ch) {
command.push("ch");
}
if ("incr" in arg1 && arg1.incr) {
command.push("incr");
}
if ("lt" in arg1 && arg1.lt) {
command.push("lt");
} else if ("gt" in arg1 && arg1.gt) {
command.push("gt");
}
if ("score" in arg1 && "member" in arg1) {
command.push(arg1.score, arg1.member);
}
command.push(...arg2.flatMap(({ score, member }) => [score, member]));
super(command, opts);
}
};
// pkg/commands/zcard.ts
var ZCardCommand = class extends Command {
constructor(cmd, opts) {
super(["zcard", ...cmd], opts);
}
};
// pkg/commands/zcount.ts
var ZCountCommand = class extends Command {
constructor(cmd, opts) {
super(["zcount", ...cmd], opts);
}
};
// pkg/commands/zincrby.ts
var ZIncrByCommand = class extends Command {
constructor(cmd, opts) {
super(["zincrby", ...cmd], opts);
}
};
// pkg/commands/zinterstore.ts
var ZInterStoreCommand = class extends Command {
constructor([destination, numKeys, keyOrKeys, opts], cmdOpts) {
const command = ["zinterstore", destination, numKeys];
if (Array.isArray(keyOrKeys)) {
command.push(...keyOrKeys);
} else {
command.push(keyOrKeys);
}
if (opts) {
if ("weights" in opts && opts.weights) {
command.push("weights", ...opts.weights);
} else if ("weight" in opts && typeof opts.weight === "number") {
command.push("weights", opts.weight);
}
if ("aggregate" in opts) {
command.push("aggregate", opts.aggregate);
}
}
super(command, cmdOpts);
}
};
// pkg/commands/zlexcount.ts
var ZLexCountCommand = class extends Command {
constructor(cmd, opts) {
super(["zlexcount", ...cmd], opts);
}
};
// pkg/commands/zpopmax.ts
var ZPopMaxCommand = class extends Command {
constructor([key, count], opts) {
const command = ["zpopmax", key];
if (typeof count === "number") {
command.push(count);
}
super(command, opts);
}
};
// pkg/commands/zpopmin.ts
var ZPopMinCommand = class extends Command {
constructor([key, count], opts) {
const command = ["zpopmin", key];
if (typeof count === "number") {
command.push(count);
}
super(command, opts);
}
};
// pkg/commands/zrange.ts
var ZRangeCommand = class extends Command {
constructor([key, min, max, opts], cmdOpts) {
const command = ["zrange", key, min, max];
if (opts?.byScore) {
command.push("byscore");
}
if (opts?.byLex) {
command.push("bylex");
}
if (opts?.rev) {
command.push("rev");
}
if (opts?.count !== void 0 && opts.offset !== void 0) {
command.push("limit", opts.offset, opts.count);
}
if (opts?.withScores) {
command.push("withscores");
}
super(command, cmdOpts);
}
};
// pkg/commands/zrank.ts
var ZRankCommand = class extends Command {
constructor(cmd, opts) {
super(["zrank", ...cmd], opts);
}
};
// pkg/commands/zrem.ts
var ZRemCommand = class extends Command {
constructor(cmd, opts) {
super(["zrem", ...cmd], opts);
}
};
// pkg/commands/zremrangebylex.ts
var ZRemRangeByLexCommand = class extends Command {
constructor(cmd, opts) {
super(["zremrangebylex", ...cmd], opts);
}
};
// pkg/commands/zremrangebyrank.ts
var ZRemRangeByRankCommand = class extends Command {
constructor(cmd, opts) {
super(["zremrangebyrank", ...cmd], opts);
}
};
// pkg/commands/zremrangebyscore.ts
var ZRemRangeByScoreCommand = class extends Command {
constructor(cmd, opts) {
super(["zremrangebyscore", ...cmd], opts);
}
};
// pkg/commands/zrevrank.ts
var ZRevRankCommand = class extends Command {
constructor(cmd, opts) {
super(["zrevrank", ...cmd], opts);
}
};
// pkg/commands/zscan.ts
var ZScanCommand = class extends Command {
constructor([key, cursor, opts], cmdOpts) {
const command = ["zscan", key, cursor];
if (opts?.match) {
command.push("match", opts.match);
}
if (typeof opts?.count === "number") {
command.push("count", opts.count);
}
super(command, {
deserialize: deserializeScanResponse,
...cmdOpts
});
}
};
// pkg/commands/zscore.ts
var ZScoreCommand = class extends Command {
constructor(cmd, opts) {
super(["zscore", ...cmd], opts);
}
};
// pkg/commands/zunion.ts
var ZUnionCommand = class extends Command {
constructor([numKeys, keyOrKeys, opts], cmdOpts) {
const command = ["zunion", numKeys];
if (Array.isArray(keyOrKeys)) {
command.push(...keyOrKeys);
} else {
command.push(keyOrKeys);
}
if (opts) {
if ("weights" in opts && opts.weights) {
command.push("weights", ...opts.weights);
} else if ("weight" in opts && typeof opts.weight === "number") {
command.push("weights", opts.weight);
}
if ("aggregate" in opts) {
command.push("aggregate", opts.aggregate);
}
if (opts.withScores) {
command.push("withscores");
}
}
super(command, cmdOpts);
}
};
// pkg/commands/zunionstore.ts
var ZUnionStoreCommand = class extends Command {
constructor([destination, numKeys, keyOrKeys, opts], cmdOpts) {
const command = ["zunionstore", destination, numKeys];
if (Array.isArray(keyOrKeys)) {
command.push(...keyOrKeys);
} else {
command.push(keyOrKeys);
}
if (opts) {
if ("weights" in opts && opts.weights) {
command.push("weights", ...opts.weights);
} else if ("weight" in opts && typeof opts.weight === "number") {
command.push("weights", opts.weight);
}
if ("aggregate" in opts) {
command.push("aggregate", opts.aggregate);
}
}
super(command, cmdOpts);
}
};
// pkg/commands/zdiffstore.ts
var ZDiffStoreCommand = class extends Command {
constructor(cmd, opts) {
super(["zdiffstore", ...cmd], opts);
}
};
// pkg/commands/zmscore.ts
var ZMScoreCommand = class extends Command {
constructor(cmd, opts) {
const [key, members] = cmd;
super(["zmscore", key, ...members], opts);
}
};
// pkg/pipeline.ts
var Pipeline = class {
client;
commands;
commandOptions;
multiExec;
constructor(opts) {
this.client = opts.client;
this.commands = [];
this.commandOptions = opts.commandOptions;
this.multiExec = opts.multiExec ?? false;
if (this.commandOptions?.latencyLogging) {
const originalExec = this.exec.bind(this);
this.exec = async (options) => {
const start = performance.now();
const result = await (options ? originalExec(options) : originalExec());
const end = performance.now();
const loggerResult = (end - start).toFixed(2);
console.log(
`Latency for \x1B[38;2;19;185;39m${this.multiExec ? ["MULTI-EXEC"] : ["PIPELINE"].toString().toUpperCase()}\x1B[0m: \x1B[38;2;0;255;255m${loggerResult} ms\x1B[0m`
);
return result;
};
}
}
exec = async (options) => {
if (this.commands.length === 0) {
throw new Error("Pipeline is empty");
}
const path = this.multiExec ? ["multi-exec"] : ["pipeline"];
const res = await this.client.request({
path,
body: Object.values(this.commands).map((c) => c.command)
});
return options?.keepErrors ? res.map(({ error, result }, i) => {
return {
error,
result: this.commands[i].deserialize(result)
};
}) : res.map(({ error, result }, i) => {
if (error) {
throw new UpstashError(
`Command ${i + 1} [ ${this.commands[i].command[0]} ] failed: ${error}`
);
}
return this.commands[i].deserialize(result);
});
};
/**
* Returns the length of pipeline before the execution
*/
length() {
return this.commands.length;
}
/**
* Pushes a command into the pipeline and returns a chainable instance of the
* pipeline
*/
chain(command) {
this.commands.push(command);
return this;
}
/**
* @see https://redis.io/commands/append
*/
append = (...args) => this.chain(new AppendCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/bitcount
*/
bitcount = (...args) => this.chain(new BitCountCommand(args, this.commandOptions));
/**
* Returns an instance that can be used to execute `BITFIELD` commands on one key.
*
* @example
* ```typescript
* redis.set("mykey", 0);
* const result = await redis.pipeline()
* .bitfield("mykey")
* .set("u4", 0, 16)
* .incr("u4", "#1", 1)
* .exec();
* console.log(result); // [[0, 1]]
* ```
*
* @see https://redis.io/commands/bitfield
*/
bitfield = (...args) => new BitFieldCommand(args, this.client, this.commandOptions, this.chain.bind(this));
/**
* @see https://redis.io/commands/bitop
*/
bitop = (op, destinationKey, sourceKey, ...sourceKeys) => this.chain(
new BitOpCommand([op, destinationKey, sourceKey, ...sourceKeys], this.commandOptions)
);
/**
* @see https://redis.io/commands/bitpos
*/
bitpos = (...args) => this.chain(new BitPosCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/copy
*/
copy = (...args) => this.chain(new CopyCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/zdiffstore
*/
zdiffstore = (...args) => this.chain(new ZDiffStoreCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/dbsize
*/
dbsize = () => this.chain(new DBSizeCommand(this.commandOptions));
/**
* @see https://redis.io/commands/decr
*/
decr = (...args) => this.chain(new DecrCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/decrby
*/
decrby = (...args) => this.chain(new DecrByCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/del
*/
del = (...args) => this.chain(new DelCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/echo
*/
echo = (...args) => this.chain(new EchoCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/eval
*/
eval = (...args) => this.chain(new EvalCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/evalsha
*/
evalsha = (...args) => this.chain(new EvalshaCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/exists
*/
exists = (...args) => this.chain(new ExistsCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/expire
*/
expire = (...args) => this.chain(new ExpireCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/expireat
*/
expireat = (...args) => this.chain(new ExpireAtCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/flushall
*/
flushall = (args) => this.chain(new FlushAllCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/flushdb
*/
flushdb = (...args) => this.chain(new FlushDBCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/geoadd
*/
geoadd = (...args) => this.chain(new GeoAddCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/geodist
*/
geodist = (...args) => this.chain(new GeoDistCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/geopos
*/
geopos = (...args) => this.chain(new GeoPosCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/geohash
*/
geohash = (...args) => this.chain(new GeoHashCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/geosearch
*/
geosearch = (...args) => this.chain(new GeoSearchCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/geosearchstore
*/
geosearchstore = (...args) => this.chain(new GeoSearchStoreCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/get
*/
get = (...args) => this.chain(new GetCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/getbit
*/
getbit = (...args) => this.chain(new GetBitCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/getdel
*/
getdel = (...args) => this.chain(new GetDelCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/getrange
*/
getrange = (...args) => this.chain(new GetRangeCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/getset
*/
getset = (key, value) => this.chain(new GetSetCommand([key, value], this.commandOptions));
/**
* @see https://redis.io/commands/hdel
*/
hdel = (...args) => this.chain(new HDelCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/hexists
*/
hexists = (...args) => this.chain(new HExistsCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/hget
*/
hget = (...args) => this.chain(new HGetCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/hgetall
*/
hgetall = (...args) => this.chain(new HGetAllCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/hincrby
*/
hincrby = (...args) => this.chain(new HIncrByCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/hincrbyfloat
*/
hincrbyfloat = (...args) => this.chain(new HIncrByFloatCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/hkeys
*/
hkeys = (...args) => this.chain(new HKeysCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/hlen
*/
hlen = (...args) => this.chain(new HLenCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/hmget
*/
hmget = (...args) => this.chain(new HMGetCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/hmset
*/
hmset = (key, kv) => this.chain(new HMSetCommand([key, kv], this.commandOptions));
/**
* @see https://redis.io/commands/hrandfield
*/
hrandfield = (key, count, withValues) => this.chain(new HRandFieldCommand([key, count, withValues], this.commandOptions));
/**
* @see https://redis.io/commands/hscan
*/
hscan = (...args) => this.chain(new HScanCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/hset
*/
hset = (key, kv) => this.chain(new HSetCommand([key, kv], this.commandOptions));
/**
* @see https://redis.io/commands/hsetnx
*/
hsetnx = (key, field, value) => this.chain(new HSetNXCommand([key, field, value], this.commandOptions));
/**
* @see https://redis.io/commands/hstrlen
*/
hstrlen = (...args) => this.chain(new HStrLenCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/hvals
*/
hvals = (...args) => this.chain(new HValsCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/incr
*/
incr = (...args) => this.chain(new IncrCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/incrby
*/
incrby = (...args) => this.chain(new IncrByCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/incrbyfloat
*/
incrbyfloat = (...args) => this.chain(new IncrByFloatCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/keys
*/
keys = (...args) => this.chain(new KeysCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/lindex
*/
lindex = (...args) => this.chain(new LIndexCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/linsert
*/
linsert = (key, direction, pivot, value) => this.chain(new LInsertCommand([key, direction, pivot, value], this.commandOptions));
/**
* @see https://redis.io/commands/llen
*/
llen = (...args) => this.chain(new LLenCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/lmove
*/
lmove = (...args) => this.chain(new LMoveCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/lpop
*/
lpop = (...args) => this.chain(new LPopCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/lmpop
*/
lmpop = (...args) => this.chain(new LmPopCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/lpos
*/
lpos = (...args) => this.chain(new LPosCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/lpush
*/
lpush = (key, ...elements) => this.chain(new LPushCommand([key, ...elements], this.commandOptions));
/**
* @see https://redis.io/commands/lpushx
*/
lpushx = (key, ...elements) => this.chain(new LPushXCommand([key, ...elements], this.commandOptions));
/**
* @see https://redis.io/commands/lrange
*/
lrange = (...args) => this.chain(new LRangeCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/lrem
*/
lrem = (key, count, value) => this.chain(new LRemCommand([key, count, value], this.commandOptions));
/**
* @see https://redis.io/commands/lset
*/
lset = (key, index, value) => this.chain(new LSetCommand([key, index, value], this.commandOptions));
/**
* @see https://redis.io/commands/ltrim
*/
ltrim = (...args) => this.chain(new LTrimCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/mget
*/
mget = (...args) => this.chain(new MGetCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/mset
*/
mset = (kv) => this.chain(new MSetCommand([kv], this.commandOptions));
/**
* @see https://redis.io/commands/msetnx
*/
msetnx = (kv) => this.chain(new MSetNXCommand([kv], this.commandOptions));
/**
* @see https://redis.io/commands/persist
*/
persist = (...args) => this.chain(new PersistCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/pexpire
*/
pexpire = (...args) => this.chain(new PExpireCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/pexpireat
*/
pexpireat = (...args) => this.chain(new PExpireAtCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/pfadd
*/
pfadd = (...args) => this.chain(new PfAddCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/pfcount
*/
pfcount = (...args) => this.chain(new PfCountCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/pfmerge
*/
pfmerge = (...args) => this.chain(new PfMergeCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/ping
*/
ping = (args) => this.chain(new PingCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/psetex
*/
psetex = (key, ttl, value) => this.chain(new PSetEXCommand([key, ttl, value], this.commandOptions));
/**
* @see https://redis.io/commands/pttl
*/
pttl = (...args) => this.chain(new PTtlCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/publish
*/
publish = (...args) => this.chain(new PublishCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/randomkey
*/
randomkey = () => this.chain(new RandomKeyCommand(this.commandOptions));
/**
* @see https://redis.io/commands/rename
*/
rename = (...args) => this.chain(new RenameCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/renamenx
*/
renamenx = (...args) => this.chain(new RenameNXCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/rpop
*/
rpop = (...args) => this.chain(new RPopCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/rpush
*/
rpush = (key, ...elements) => this.chain(new RPushCommand([key, ...elements], this.commandOptions));
/**
* @see https://redis.io/commands/rpushx
*/
rpushx = (key, ...elements) => this.chain(new RPushXCommand([key, ...elements], this.commandOptions));
/**
* @see https://redis.io/commands/sadd
*/
sadd = (key, member, ...members) => this.chain(new SAddCommand([key, member, ...members], this.commandOptions));
/**
* @see https://redis.io/commands/scan
*/
scan = (...args) => this.chain(new ScanCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/scard
*/
scard = (...args) => this.chain(new SCardCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/script-exists
*/
scriptExists = (...args) => this.chain(new ScriptExistsCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/script-flush
*/
scriptFlush = (...args) => this.chain(new ScriptFlushCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/script-load
*/
scriptLoad = (...args) => this.chain(new ScriptLoadCommand(args, this.commandOptions));
/*)*
* @see https://redis.io/commands/sdiff
*/
sdiff = (...args) => this.chain(new SDiffCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/sdiffstore
*/
sdiffstore = (...args) => this.chain(new SDiffStoreCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/set
*/
set = (key, value, opts) => this.chain(new SetCommand([key, value, opts], this.commandOptions));
/**
* @see https://redis.io/commands/setbit
*/
setbit = (...args) => this.chain(new SetBitCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/setex
*/
setex = (key, ttl, value) => this.chain(new SetExCommand([key, ttl, value], this.commandOptions));
/**
* @see https://redis.io/commands/setnx
*/
setnx = (key, value) => this.chain(new SetNxCommand([key, value], this.commandOptions));
/**
* @see https://redis.io/commands/setrange
*/
setrange = (...args) => this.chain(new SetRangeCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/sinter
*/
sinter = (...args) => this.chain(new SInterCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/sinterstore
*/
sinterstore = (...args) => this.chain(new SInterStoreCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/sismember
*/
sismember = (key, member) => this.chain(new SIsMemberCommand([key, member], this.commandOptions));
/**
* @see https://redis.io/commands/smembers
*/
smembers = (...args) => this.chain(new SMembersCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/smismember
*/
smismember = (key, members) => this.chain(new SMIsMemberCommand([key, members], this.commandOptions));
/**
* @see https://redis.io/commands/smove
*/
smove = (source, destination, member) => this.chain(new SMoveCommand([source, destination, member], this.commandOptions));
/**
* @see https://redis.io/commands/spop
*/
spop = (...args) => this.chain(new SPopCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/srandmember
*/
srandmember = (...args) => this.chain(new SRandMemberCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/srem
*/
srem = (key, ...members) => this.chain(new SRemCommand([key, ...members], this.commandOptions));
/**
* @see https://redis.io/commands/sscan
*/
sscan = (...args) => this.chain(new SScanCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/strlen
*/
strlen = (...args) => this.chain(new StrLenCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/sunion
*/
sunion = (...args) => this.chain(new SUnionCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/sunionstore
*/
sunionstore = (...args) => this.chain(new SUnionStoreCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/time
*/
time = () => this.chain(new TimeCommand(this.commandOptions));
/**
* @see https://redis.io/commands/touch
*/
touch = (...args) => this.chain(new TouchCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/ttl
*/
ttl = (...args) => this.chain(new TtlCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/type
*/
type = (...args) => this.chain(new TypeCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/unlink
*/
unlink = (...args) => this.chain(new UnlinkCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/zadd
*/
zadd = (...args) => {
if ("score" in args[1]) {
return this.chain(
new ZAddCommand([args[0], args[1], ...args.slice(2)], this.commandOptions)
);
}
return this.chain(
new ZAddCommand(
[args[0], args[1], ...args.slice(2)],
this.commandOptions
)
);
};
/**
* @see https://redis.io/commands/xadd
*/
xadd = (...args) => this.chain(new XAddCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/xack
*/
xack = (...args) => this.chain(new XAckCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/xdel
*/
xdel = (...args) => this.chain(new XDelCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/xgroup
*/
xgroup = (...args) => this.chain(new XGroupCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/xread
*/
xread = (...args) => this.chain(new XReadCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/xreadgroup
*/
xreadgroup = (...args) => this.chain(new XReadGroupCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/xinfo
*/
xinfo = (...args) => this.chain(new XInfoCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/xlen
*/
xlen = (...args) => this.chain(new XLenCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/xpending
*/
xpending = (...args) => this.chain(new XPendingCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/xclaim
*/
xclaim = (...args) => this.chain(new XClaimCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/xautoclaim
*/
xautoclaim = (...args) => this.chain(new XAutoClaim(args, this.commandOptions));
/**
* @see https://redis.io/commands/xtrim
*/
xtrim = (...args) => this.chain(new XTrimCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/xrange
*/
xrange = (...args) => this.chain(new XRangeCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/xrevrange
*/
xrevrange = (...args) => this.chain(new XRevRangeCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/zcard
*/
zcard = (...args) => this.chain(new ZCardCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/zcount
*/
zcount = (...args) => this.chain(new ZCountCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/zincrby
*/
zincrby = (key, increment, member) => this.chain(new ZIncrByCommand([key, increment, member], this.commandOptions));
/**
* @see https://redis.io/commands/zinterstore
*/
zinterstore = (...args) => this.chain(new ZInterStoreCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/zlexcount
*/
zlexcount = (...args) => this.chain(new ZLexCountCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/zmscore
*/
zmscore = (...args) => this.chain(new ZMScoreCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/zpopmax
*/
zpopmax = (...args) => this.chain(new ZPopMaxCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/zpopmin
*/
zpopmin = (...args) => this.chain(new ZPopMinCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/zrange
*/
zrange = (...args) => this.chain(new ZRangeCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/zrank
*/
zrank = (key, member) => this.chain(new ZRankCommand([key, member], this.commandOptions));
/**
* @see https://redis.io/commands/zrem
*/
zrem = (key, ...members) => this.chain(new ZRemCommand([key, ...members], this.commandOptions));
/**
* @see https://redis.io/commands/zremrangebylex
*/
zremrangebylex = (...args) => this.chain(new ZRemRangeByLexCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/zremrangebyrank
*/
zremrangebyrank = (...args) => this.chain(new ZRemRangeByRankCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/zremrangebyscore
*/
zremrangebyscore = (...args) => this.chain(new ZRemRangeByScoreCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/zrevrank
*/
zrevrank = (key, member) => this.chain(new ZRevRankCommand([key, member], this.commandOptions));
/**
* @see https://redis.io/commands/zscan
*/
zscan = (...args) => this.chain(new ZScanCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/zscore
*/
zscore = (key, member) => this.chain(new ZScoreCommand([key, member], this.commandOptions));
/**
* @see https://redis.io/commands/zunionstore
*/
zunionstore = (...args) => this.chain(new ZUnionStoreCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/zunion
*/
zunion = (...args) => this.chain(new ZUnionCommand(args, this.commandOptions));
/**
* @see https://redis.io/commands/?group=json
*/
get json() {
return {
/**
* @see https://redis.io/commands/json.arrappend
*/
arrappend: (...args) => this.chain(new JsonArrAppendCommand(args, this.commandOptions)),
/**
* @see https://redis.io/commands/json.arrindex
*/
arrindex: (...args) => this.chain(new JsonArrIndexCommand(args, this.commandOptions)),
/**
* @see https://redis.io/commands/json.arrinsert
*/
arrinsert: (...args) => this.chain(new JsonArrInsertCommand(args, this.commandOptions)),
/**
* @see https://redis.io/commands/json.arrlen
*/
arrlen: (...args) => this.chain(new JsonArrLenCommand(args, this.commandOptions)),
/**
* @see https://redis.io/commands/json.arrpop
*/
arrpop: (...args) => this.chain(new JsonArrPopCommand(args, this.commandOptions)),
/**
* @see https://redis.io/commands/json.arrtrim
*/
arrtrim: (...args) => this.chain(new JsonArrTrimCommand(args, this.commandOptions)),
/**
* @see https://redis.io/commands/json.clear
*/
clear: (...args) => this.chain(new JsonClearCommand(args, this.commandOptions)),
/**
* @see https://redis.io/commands/json.del
*/
del: (...args) => this.chain(new JsonDelCommand(args, this.commandOptions)),
/**
* @see https://redis.io/commands/json.forget
*/
forget: (...args) => this.chain(new JsonForgetCommand(args, this.commandOptions)),
/**
* @see https://redis.io/commands/json.get
*/
get: (...args) => this.chain(new JsonGetCommand(args, this.commandOptions)),
/**
* @see https://redis.io/commands/json.mget
*/
mget: (...args) => this.chain(new JsonMGetCommand(args, this.commandOptions)),
/**
* @see https://redis.io/commands/json.mset
*/
mset: (...args) => this.chain(new JsonMSetCommand(args, this.commandOptions)),
/**
* @see https://redis.io/commands/json.numincrby
*/
numincrby: (...args) => this.chain(new JsonNumIncrByCommand(args, this.commandOptions)),
/**
* @see https://redis.io/commands/json.nummultby
*/
nummultby: (...args) => this.chain(new JsonNumMultByCommand(args, this.commandOptions)),
/**
* @see https://redis.io/commands/json.objkeys
*/
objkeys: (...args) => this.chain(new JsonObjKeysCommand(args, this.commandOptions)),
/**
* @see https://redis.io/commands/json.objlen
*/
objlen: (...args) => this.chain(new JsonObjLenCommand(args, this.commandOptions)),
/**
* @see https://redis.io/commands/json.resp
*/
resp: (...args) => this.chain(new JsonRespCommand(args, this.commandOptions)),
/**
* @see https://redis.io/commands/json.set
*/
set: (...args) => this.chain(new JsonSetCommand(args, this.commandOptions)),
/**
* @see https://redis.io/commands/json.strappend
*/
strappend: (...args) => this.chain(new JsonStrAppendCommand(args, this.commandOptions)),
/**
* @see https://redis.io/commands/json.strlen
*/
strlen: (...args) => this.chain(new JsonStrLenCommand(args, this.commandOptions)),
/**
* @see https://redis.io/commands/json.toggle
*/
toggle: (...args) => this.chain(new JsonToggleCommand(args, this.commandOptions)),
/**
* @see https://redis.io/commands/json.type
*/
type: (...args) => this.chain(new JsonTypeCommand(args, this.commandOptions))
};
}
};
// pkg/script.ts
var import_enc_hex = __toESM(__nccwpck_require__(606));
var import_sha1 = __toESM(__nccwpck_require__(377));
var Script = class {
script;
sha1;
redis;
constructor(redis, script) {
this.redis = redis;
this.sha1 = this.digest(script);
this.script = script;
}
/**
* Send an `EVAL` command to redis.
*/
async eval(keys, args) {
return await this.redis.eval(this.script, keys, args);
}
/**
* Calculates the sha1 hash of the script and then calls `EVALSHA`.
*/
async evalsha(keys, args) {
return await this.redis.evalsha(this.sha1, keys, args);
}
/**
* Optimistically try to run `EVALSHA` first.
* If the script is not loaded in redis, it will fall back and try again with `EVAL`.
*
* Following calls will be able to use the cached script
*/
async exec(keys, args) {
const res = await this.redis.evalsha(this.sha1, keys, args).catch(async (error) => {
if (error instanceof Error && error.message.toLowerCase().includes("noscript")) {
return await this.redis.eval(this.script, keys, args);
}
throw error;
});
return res;
}
/**
* Compute the sha1 hash of the script and return its hex representation.
*/
digest(s) {
return import_enc_hex.default.stringify((0, import_sha1.default)(s));
}
};
// pkg/redis.ts
var Redis = class {
client;
opts;
enableTelemetry;
enableAutoPipelining;
/**
* Create a new redis client
*
* @example
* ```typescript
* const redis = new Redis({
* url: "<UPSTASH_REDIS_REST_URL>",
* token: "<UPSTASH_REDIS_REST_TOKEN>",
* });
* ```
*/
constructor(client, opts) {
this.client = client;
this.opts = opts;
this.enableTelemetry = opts?.enableTelemetry ?? true;
if (opts?.readYourWrites === false) {
this.client.readYourWrites = false;
}
this.enableAutoPipelining = opts?.enableAutoPipelining ?? true;
}
get readYourWritesSyncToken() {
return this.client.upstashSyncToken;
}
set readYourWritesSyncToken(session) {
this.client.upstashSyncToken = session;
}
get json() {
return {
/**
* @see https://redis.io/commands/json.arrappend
*/
arrappend: (...args) => new JsonArrAppendCommand(args, this.opts).exec(this.client),
/**
* @see https://redis.io/commands/json.arrindex
*/
arrindex: (...args) => new JsonArrIndexCommand(args, this.opts).exec(this.client),
/**
* @see https://redis.io/commands/json.arrinsert
*/
arrinsert: (...args) => new JsonArrInsertCommand(args, this.opts).exec(this.client),
/**
* @see https://redis.io/commands/json.arrlen
*/
arrlen: (...args) => new JsonArrLenCommand(args, this.opts).exec(this.client),
/**
* @see https://redis.io/commands/json.arrpop
*/
arrpop: (...args) => new JsonArrPopCommand(args, this.opts).exec(this.client),
/**
* @see https://redis.io/commands/json.arrtrim
*/
arrtrim: (...args) => new JsonArrTrimCommand(args, this.opts).exec(this.client),
/**
* @see https://redis.io/commands/json.clear
*/
clear: (...args) => new JsonClearCommand(args, this.opts).exec(this.client),
/**
* @see https://redis.io/commands/json.del
*/
del: (...args) => new JsonDelCommand(args, this.opts).exec(this.client),
/**
* @see https://redis.io/commands/json.forget
*/
forget: (...args) => new JsonForgetCommand(args, this.opts).exec(this.client),
/**
* @see https://redis.io/commands/json.get
*/
get: (...args) => new JsonGetCommand(args, this.opts).exec(this.client),
/**
* @see https://redis.io/commands/json.mget
*/
mget: (...args) => new JsonMGetCommand(args, this.opts).exec(this.client),
/**
* @see https://redis.io/commands/json.mset
*/
mset: (...args) => new JsonMSetCommand(args, this.opts).exec(this.client),
/**
* @see https://redis.io/commands/json.numincrby
*/
numincrby: (...args) => new JsonNumIncrByCommand(args, this.opts).exec(this.client),
/**
* @see https://redis.io/commands/json.nummultby
*/
nummultby: (...args) => new JsonNumMultByCommand(args, this.opts).exec(this.client),
/**
* @see https://redis.io/commands/json.objkeys
*/
objkeys: (...args) => new JsonObjKeysCommand(args, this.opts).exec(this.client),
/**
* @see https://redis.io/commands/json.objlen
*/
objlen: (...args) => new JsonObjLenCommand(args, this.opts).exec(this.client),
/**
* @see https://redis.io/commands/json.resp
*/
resp: (...args) => new JsonRespCommand(args, this.opts).exec(this.client),
/**
* @see https://redis.io/commands/json.set
*/
set: (...args) => new JsonSetCommand(args, this.opts).exec(this.client),
/**
* @see https://redis.io/commands/json.strappend
*/
strappend: (...args) => new JsonStrAppendCommand(args, this.opts).exec(this.client),
/**
* @see https://redis.io/commands/json.strlen
*/
strlen: (...args) => new JsonStrLenCommand(args, this.opts).exec(this.client),
/**
* @see https://redis.io/commands/json.toggle
*/
toggle: (...args) => new JsonToggleCommand(args, this.opts).exec(this.client),
/**
* @see https://redis.io/commands/json.type
*/
type: (...args) => new JsonTypeCommand(args, this.opts).exec(this.client)
};
}
/**
* Wrap a new middleware around the HTTP client.
*/
use = (middleware) => {
const makeRequest = this.client.request.bind(this.client);
this.client.request = (req) => middleware(req, makeRequest);
};
/**
* Technically this is not private, we can hide it from intellisense by doing this
*/
addTelemetry = (telemetry) => {
if (!this.enableTelemetry) {
return;
}
try {
this.client.mergeTelemetry(telemetry);
} catch {
}
};
createScript(script) {
return new Script(this, script);
}
/**
* Create a new pipeline that allows you to send requests in bulk.
*
* @see {@link Pipeline}
*/
pipeline = () => new Pipeline({
client: this.client,
commandOptions: this.opts,
multiExec: false
});
autoPipeline = () => {
return createAutoPipelineProxy(this);
};
/**
* Create a new transaction to allow executing multiple steps atomically.
*
* All the commands in a transaction are serialized and executed sequentially. A request sent by
* another client will never be served in the middle of the execution of a Redis Transaction. This
* guarantees that the commands are executed as a single isolated operation.
*
* @see {@link Pipeline}
*/
multi = () => new Pipeline({
client: this.client,
commandOptions: this.opts,
multiExec: true
});
/**
* Returns an instance that can be used to execute `BITFIELD` commands on one key.
*
* @example
* ```typescript
* redis.set("mykey", 0);
* const result = await redis.bitfield("mykey")
* .set("u4", 0, 16)
* .incr("u4", "#1", 1)
* .exec();
* console.log(result); // [0, 1]
* ```
*
* @see https://redis.io/commands/bitfield
*/
bitfield = (...args) => new BitFieldCommand(args, this.client, this.opts);
/**
* @see https://redis.io/commands/append
*/
append = (...args) => new AppendCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/bitcount
*/
bitcount = (...args) => new BitCountCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/bitop
*/
bitop = (op, destinationKey, sourceKey, ...sourceKeys) => new BitOpCommand([op, destinationKey, sourceKey, ...sourceKeys], this.opts).exec(
this.client
);
/**
* @see https://redis.io/commands/bitpos
*/
bitpos = (...args) => new BitPosCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/copy
*/
copy = (...args) => new CopyCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/dbsize
*/
dbsize = () => new DBSizeCommand(this.opts).exec(this.client);
/**
* @see https://redis.io/commands/decr
*/
decr = (...args) => new DecrCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/decrby
*/
decrby = (...args) => new DecrByCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/del
*/
del = (...args) => new DelCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/echo
*/
echo = (...args) => new EchoCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/eval
*/
eval = (...args) => new EvalCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/evalsha
*/
evalsha = (...args) => new EvalshaCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/exists
*/
exists = (...args) => new ExistsCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/expire
*/
expire = (...args) => new ExpireCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/expireat
*/
expireat = (...args) => new ExpireAtCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/flushall
*/
flushall = (args) => new FlushAllCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/flushdb
*/
flushdb = (...args) => new FlushDBCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/geoadd
*/
geoadd = (...args) => new GeoAddCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/geopos
*/
geopos = (...args) => new GeoPosCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/geodist
*/
geodist = (...args) => new GeoDistCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/geohash
*/
geohash = (...args) => new GeoHashCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/geosearch
*/
geosearch = (...args) => new GeoSearchCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/geosearchstore
*/
geosearchstore = (...args) => new GeoSearchStoreCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/get
*/
get = (...args) => new GetCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/getbit
*/
getbit = (...args) => new GetBitCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/getdel
*/
getdel = (...args) => new GetDelCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/getrange
*/
getrange = (...args) => new GetRangeCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/getset
*/
getset = (key, value) => new GetSetCommand([key, value], this.opts).exec(this.client);
/**
* @see https://redis.io/commands/hdel
*/
hdel = (...args) => new HDelCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/hexists
*/
hexists = (...args) => new HExistsCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/hget
*/
hget = (...args) => new HGetCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/hgetall
*/
hgetall = (...args) => new HGetAllCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/hincrby
*/
hincrby = (...args) => new HIncrByCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/hincrbyfloat
*/
hincrbyfloat = (...args) => new HIncrByFloatCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/hkeys
*/
hkeys = (...args) => new HKeysCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/hlen
*/
hlen = (...args) => new HLenCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/hmget
*/
hmget = (...args) => new HMGetCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/hmset
*/
hmset = (key, kv) => new HMSetCommand([key, kv], this.opts).exec(this.client);
/**
* @see https://redis.io/commands/hrandfield
*/
hrandfield = (key, count, withValues) => new HRandFieldCommand([key, count, withValues], this.opts).exec(this.client);
/**
* @see https://redis.io/commands/hscan
*/
hscan = (...args) => new HScanCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/hset
*/
hset = (key, kv) => new HSetCommand([key, kv], this.opts).exec(this.client);
/**
* @see https://redis.io/commands/hsetnx
*/
hsetnx = (key, field, value) => new HSetNXCommand([key, field, value], this.opts).exec(this.client);
/**
* @see https://redis.io/commands/hstrlen
*/
hstrlen = (...args) => new HStrLenCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/hvals
*/
hvals = (...args) => new HValsCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/incr
*/
incr = (...args) => new IncrCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/incrby
*/
incrby = (...args) => new IncrByCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/incrbyfloat
*/
incrbyfloat = (...args) => new IncrByFloatCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/keys
*/
keys = (...args) => new KeysCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/lindex
*/
lindex = (...args) => new LIndexCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/linsert
*/
linsert = (key, direction, pivot, value) => new LInsertCommand([key, direction, pivot, value], this.opts).exec(this.client);
/**
* @see https://redis.io/commands/llen
*/
llen = (...args) => new LLenCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/lmove
*/
lmove = (...args) => new LMoveCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/lpop
*/
lpop = (...args) => new LPopCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/lmpop
*/
lmpop = (...args) => new LmPopCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/lpos
*/
lpos = (...args) => new LPosCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/lpush
*/
lpush = (key, ...elements) => new LPushCommand([key, ...elements], this.opts).exec(this.client);
/**
* @see https://redis.io/commands/lpushx
*/
lpushx = (key, ...elements) => new LPushXCommand([key, ...elements], this.opts).exec(this.client);
/**
* @see https://redis.io/commands/lrange
*/
lrange = (...args) => new LRangeCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/lrem
*/
lrem = (key, count, value) => new LRemCommand([key, count, value], this.opts).exec(this.client);
/**
* @see https://redis.io/commands/lset
*/
lset = (key, index, value) => new LSetCommand([key, index, value], this.opts).exec(this.client);
/**
* @see https://redis.io/commands/ltrim
*/
ltrim = (...args) => new LTrimCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/mget
*/
mget = (...args) => new MGetCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/mset
*/
mset = (kv) => new MSetCommand([kv], this.opts).exec(this.client);
/**
* @see https://redis.io/commands/msetnx
*/
msetnx = (kv) => new MSetNXCommand([kv], this.opts).exec(this.client);
/**
* @see https://redis.io/commands/persist
*/
persist = (...args) => new PersistCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/pexpire
*/
pexpire = (...args) => new PExpireCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/pexpireat
*/
pexpireat = (...args) => new PExpireAtCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/pfadd
*/
pfadd = (...args) => new PfAddCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/pfcount
*/
pfcount = (...args) => new PfCountCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/pfmerge
*/
pfmerge = (...args) => new PfMergeCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/ping
*/
ping = (args) => new PingCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/psetex
*/
psetex = (key, ttl, value) => new PSetEXCommand([key, ttl, value], this.opts).exec(this.client);
/**
* @see https://redis.io/commands/pttl
*/
pttl = (...args) => new PTtlCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/publish
*/
publish = (...args) => new PublishCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/randomkey
*/
randomkey = () => new RandomKeyCommand().exec(this.client);
/**
* @see https://redis.io/commands/rename
*/
rename = (...args) => new RenameCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/renamenx
*/
renamenx = (...args) => new RenameNXCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/rpop
*/
rpop = (...args) => new RPopCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/rpush
*/
rpush = (key, ...elements) => new RPushCommand([key, ...elements], this.opts).exec(this.client);
/**
* @see https://redis.io/commands/rpushx
*/
rpushx = (key, ...elements) => new RPushXCommand([key, ...elements], this.opts).exec(this.client);
/**
* @see https://redis.io/commands/sadd
*/
sadd = (key, member, ...members) => new SAddCommand([key, member, ...members], this.opts).exec(this.client);
/**
* @see https://redis.io/commands/scan
*/
scan = (...args) => new ScanCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/scard
*/
scard = (...args) => new SCardCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/script-exists
*/
scriptExists = (...args) => new ScriptExistsCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/script-flush
*/
scriptFlush = (...args) => new ScriptFlushCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/script-load
*/
scriptLoad = (...args) => new ScriptLoadCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/sdiff
*/
sdiff = (...args) => new SDiffCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/sdiffstore
*/
sdiffstore = (...args) => new SDiffStoreCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/set
*/
set = (key, value, opts) => new SetCommand([key, value, opts], this.opts).exec(this.client);
/**
* @see https://redis.io/commands/setbit
*/
setbit = (...args) => new SetBitCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/setex
*/
setex = (key, ttl, value) => new SetExCommand([key, ttl, value], this.opts).exec(this.client);
/**
* @see https://redis.io/commands/setnx
*/
setnx = (key, value) => new SetNxCommand([key, value], this.opts).exec(this.client);
/**
* @see https://redis.io/commands/setrange
*/
setrange = (...args) => new SetRangeCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/sinter
*/
sinter = (...args) => new SInterCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/sinterstore
*/
sinterstore = (...args) => new SInterStoreCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/sismember
*/
sismember = (key, member) => new SIsMemberCommand([key, member], this.opts).exec(this.client);
/**
* @see https://redis.io/commands/smismember
*/
smismember = (key, members) => new SMIsMemberCommand([key, members], this.opts).exec(this.client);
/**
* @see https://redis.io/commands/smembers
*/
smembers = (...args) => new SMembersCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/smove
*/
smove = (source, destination, member) => new SMoveCommand([source, destination, member], this.opts).exec(this.client);
/**
* @see https://redis.io/commands/spop
*/
spop = (...args) => new SPopCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/srandmember
*/
srandmember = (...args) => new SRandMemberCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/srem
*/
srem = (key, ...members) => new SRemCommand([key, ...members], this.opts).exec(this.client);
/**
* @see https://redis.io/commands/sscan
*/
sscan = (...args) => new SScanCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/strlen
*/
strlen = (...args) => new StrLenCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/sunion
*/
sunion = (...args) => new SUnionCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/sunionstore
*/
sunionstore = (...args) => new SUnionStoreCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/time
*/
time = () => new TimeCommand().exec(this.client);
/**
* @see https://redis.io/commands/touch
*/
touch = (...args) => new TouchCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/ttl
*/
ttl = (...args) => new TtlCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/type
*/
type = (...args) => new TypeCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/unlink
*/
unlink = (...args) => new UnlinkCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/xadd
*/
xadd = (...args) => new XAddCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/xack
*/
xack = (...args) => new XAckCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/xdel
*/
xdel = (...args) => new XDelCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/xgroup
*/
xgroup = (...args) => new XGroupCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/xread
*/
xread = (...args) => new XReadCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/xreadgroup
*/
xreadgroup = (...args) => new XReadGroupCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/xinfo
*/
xinfo = (...args) => new XInfoCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/xlen
*/
xlen = (...args) => new XLenCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/xpending
*/
xpending = (...args) => new XPendingCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/xclaim
*/
xclaim = (...args) => new XClaimCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/xautoclaim
*/
xautoclaim = (...args) => new XAutoClaim(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/xtrim
*/
xtrim = (...args) => new XTrimCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/xrange
*/
xrange = (...args) => new XRangeCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/xrevrange
*/
xrevrange = (...args) => new XRevRangeCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/zadd
*/
zadd = (...args) => {
if ("score" in args[1]) {
return new ZAddCommand([args[0], args[1], ...args.slice(2)], this.opts).exec(
this.client
);
}
return new ZAddCommand(
[args[0], args[1], ...args.slice(2)],
this.opts
).exec(this.client);
};
/**
* @see https://redis.io/commands/zcard
*/
zcard = (...args) => new ZCardCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/zcount
*/
zcount = (...args) => new ZCountCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/zdiffstore
*/
zdiffstore = (...args) => new ZDiffStoreCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/zincrby
*/
zincrby = (key, increment, member) => new ZIncrByCommand([key, increment, member], this.opts).exec(this.client);
/**
* @see https://redis.io/commands/zinterstore
*/
zinterstore = (...args) => new ZInterStoreCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/zlexcount
*/
zlexcount = (...args) => new ZLexCountCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/zmscore
*/
zmscore = (...args) => new ZMScoreCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/zpopmax
*/
zpopmax = (...args) => new ZPopMaxCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/zpopmin
*/
zpopmin = (...args) => new ZPopMinCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/zrange
*/
zrange = (...args) => new ZRangeCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/zrank
*/
zrank = (key, member) => new ZRankCommand([key, member], this.opts).exec(this.client);
/**
* @see https://redis.io/commands/zrem
*/
zrem = (key, ...members) => new ZRemCommand([key, ...members], this.opts).exec(this.client);
/**
* @see https://redis.io/commands/zremrangebylex
*/
zremrangebylex = (...args) => new ZRemRangeByLexCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/zremrangebyrank
*/
zremrangebyrank = (...args) => new ZRemRangeByRankCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/zremrangebyscore
*/
zremrangebyscore = (...args) => new ZRemRangeByScoreCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/zrevrank
*/
zrevrank = (key, member) => new ZRevRankCommand([key, member], this.opts).exec(this.client);
/**
* @see https://redis.io/commands/zscan
*/
zscan = (...args) => new ZScanCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/zscore
*/
zscore = (key, member) => new ZScoreCommand([key, member], this.opts).exec(this.client);
/**
* @see https://redis.io/commands/zunion
*/
zunion = (...args) => new ZUnionCommand(args, this.opts).exec(this.client);
/**
* @see https://redis.io/commands/zunionstore
*/
zunionstore = (...args) => new ZUnionStoreCommand(args, this.opts).exec(this.client);
};
// version.ts
var VERSION = "v1.34.3";
// platforms/nodejs.ts
if (typeof atob === "undefined") {
global.atob = (b64) => Buffer.from(b64, "base64").toString("utf8");
}
var Redis2 = class _Redis extends Redis {
/**
* Create a new redis client by providing a custom `Requester` implementation
*
* @example
* ```ts
*
* import { UpstashRequest, Requester, UpstashResponse, Redis } from "@upstash/redis"
*
* const requester: Requester = {
* request: <TResult>(req: UpstashRequest): Promise<UpstashResponse<TResult>> => {
* // ...
* }
* }
*
* const redis = new Redis(requester)
* ```
*/
constructor(configOrRequester) {
if ("request" in configOrRequester) {
super(configOrRequester);
return;
}
if (!configOrRequester.url) {
console.warn(
`[Upstash Redis] The 'url' property is missing or undefined in your Redis config.`
);
} else if (configOrRequester.url.startsWith(" ") || configOrRequester.url.endsWith(" ") || /\r|\n/.test(configOrRequester.url)) {
console.warn(
"[Upstash Redis] The redis url contains whitespace or newline, which can cause errors!"
);
}
if (!configOrRequester.token) {
console.warn(
`[Upstash Redis] The 'token' property is missing or undefined in your Redis config.`
);
} else if (configOrRequester.token.startsWith(" ") || configOrRequester.token.endsWith(" ") || /\r|\n/.test(configOrRequester.token)) {
console.warn(
"[Upstash Redis] The redis token contains whitespace or newline, which can cause errors!"
);
}
const client = new HttpClient({
baseUrl: configOrRequester.url,
retry: configOrRequester.retry,
headers: { authorization: `Bearer ${configOrRequester.token}` },
agent: configOrRequester.agent,
responseEncoding: configOrRequester.responseEncoding,
cache: configOrRequester.cache ?? "no-store",
signal: configOrRequester.signal,
keepAlive: configOrRequester.keepAlive,
readYourWrites: configOrRequester.readYourWrites
});
super(client, {
automaticDeserialization: configOrRequester.automaticDeserialization,
enableTelemetry: !process.env.UPSTASH_DISABLE_TELEMETRY,
latencyLogging: configOrRequester.latencyLogging,
enableAutoPipelining: configOrRequester.enableAutoPipelining
});
this.addTelemetry({
runtime: (
// @ts-expect-error to silence compiler
typeof EdgeRuntime === "string" ? "edge-light" : `node@${process.version}`
),
platform: process.env.VERCEL ? "vercel" : process.env.AWS_REGION ? "aws" : "unknown",
sdk: `@upstash/redis@${VERSION}`
});
if (this.enableAutoPipelining) {
return this.autoPipeline();
}
}
/**
* Create a new Upstash Redis instance from environment variables.
*
* Use this to automatically load connection secrets from your environment
* variables. For instance when using the Vercel integration.
*
* This tries to load `UPSTASH_REDIS_REST_URL` and `UPSTASH_REDIS_REST_TOKEN` from
* your environment using `process.env`.
*/
static fromEnv(config) {
if (process.env === void 0) {
throw new TypeError(
'[Upstash Redis] Unable to get environment variables, `process.env` is undefined. If you are deploying to cloudflare, please import from "@upstash/redis/cloudflare" instead'
);
}
const url = process.env.UPSTASH_REDIS_REST_URL || process.env.KV_REST_API_URL;
if (!url) {
console.warn("[Upstash Redis] Unable to find environment variable: `UPSTASH_REDIS_REST_URL`");
}
const token = process.env.UPSTASH_REDIS_REST_TOKEN || process.env.KV_REST_API_TOKEN;
if (!token) {
console.warn(
"[Upstash Redis] Unable to find environment variable: `UPSTASH_REDIS_REST_TOKEN`"
);
}
return new _Redis({ ...config, url, token });
}
};
// Annotate the CommonJS export names for ESM import in node:
0 && (0);
/***/ }),
/***/ 255:
/***/ (function(module, exports, __nccwpck_require__) {
;(function (root, factory) {
if (true) {
// CommonJS
module.exports = exports = factory();
}
else {}
}(this, function () {
/*globals window, global, require*/
/**
* CryptoJS core components.
*/
var CryptoJS = CryptoJS || (function (Math, undefined) {
var crypto;
// Native crypto from window (Browser)
if (typeof window !== 'undefined' && window.crypto) {
crypto = window.crypto;
}
// Native crypto in web worker (Browser)
if (typeof self !== 'undefined' && self.crypto) {
crypto = self.crypto;
}
// Native crypto from worker
if (typeof globalThis !== 'undefined' && globalThis.crypto) {
crypto = globalThis.crypto;
}
// Native (experimental IE 11) crypto from window (Browser)
if (!crypto && typeof window !== 'undefined' && window.msCrypto) {
crypto = window.msCrypto;
}
// Native crypto from global (NodeJS)
if (!crypto && typeof global !== 'undefined' && global.crypto) {
crypto = global.crypto;
}
// Native crypto import via require (NodeJS)
if (!crypto && "function" === 'function') {
try {
crypto = __nccwpck_require__(982);
} catch (err) {}
}
/*
* Cryptographically secure pseudorandom number generator
*
* As Math.random() is cryptographically not safe to use
*/
var cryptoSecureRandomInt = function () {
if (crypto) {
// Use getRandomValues method (Browser)
if (typeof crypto.getRandomValues === 'function') {
try {
return crypto.getRandomValues(new Uint32Array(1))[0];
} catch (err) {}
}
// Use randomBytes method (NodeJS)
if (typeof crypto.randomBytes === 'function') {
try {
return crypto.randomBytes(4).readInt32LE();
} catch (err) {}
}
}
throw new Error('Native crypto module could not be used to get secure random number.');
};
/*
* Local polyfill of Object.create
*/
var create = Object.create || (function () {
function F() {}
return function (obj) {
var subtype;
F.prototype = obj;
subtype = new F();
F.prototype = null;
return subtype;
};
}());
/**
* CryptoJS namespace.
*/
var C = {};
/**
* Library namespace.
*/
var C_lib = C.lib = {};
/**
* Base object for prototypal inheritance.
*/
var Base = C_lib.Base = (function () {
return {
/**
* Creates a new object that inherits from this object.
*
* @param {Object} overrides Properties to copy into the new object.
*
* @return {Object} The new object.
*
* @static
*
* @example
*
* var MyType = CryptoJS.lib.Base.extend({
* field: 'value',
*
* method: function () {
* }
* });
*/
extend: function (overrides) {
// Spawn
var subtype = create(this);
// Augment
if (overrides) {
subtype.mixIn(overrides);
}
// Create default initializer
if (!subtype.hasOwnProperty('init') || this.init === subtype.init) {
subtype.init = function () {
subtype.$super.init.apply(this, arguments);
};
}
// Initializer's prototype is the subtype object
subtype.init.prototype = subtype;
// Reference supertype
subtype.$super = this;
return subtype;
},
/**
* Extends this object and runs the init method.
* Arguments to create() will be passed to init().
*
* @return {Object} The new object.
*
* @static
*
* @example
*
* var instance = MyType.create();
*/
create: function () {
var instance = this.extend();
instance.init.apply(instance, arguments);
return instance;
},
/**
* Initializes a newly created object.
* Override this method to add some logic when your objects are created.
*
* @example
*
* var MyType = CryptoJS.lib.Base.extend({
* init: function () {
* // ...
* }
* });
*/
init: function () {
},
/**
* Copies properties into this object.
*
* @param {Object} properties The properties to mix in.
*
* @example
*
* MyType.mixIn({
* field: 'value'
* });
*/
mixIn: function (properties) {
for (var propertyName in properties) {
if (properties.hasOwnProperty(propertyName)) {
this[propertyName] = properties[propertyName];
}
}
// IE won't copy toString using the loop above
if (properties.hasOwnProperty('toString')) {
this.toString = properties.toString;
}
},
/**
* Creates a copy of this object.
*
* @return {Object} The clone.
*
* @example
*
* var clone = instance.clone();
*/
clone: function () {
return this.init.prototype.extend(this);
}
};
}());
/**
* An array of 32-bit words.
*
* @property {Array} words The array of 32-bit words.
* @property {number} sigBytes The number of significant bytes in this word array.
*/
var WordArray = C_lib.WordArray = Base.extend({
/**
* Initializes a newly created word array.
*
* @param {Array} words (Optional) An array of 32-bit words.
* @param {number} sigBytes (Optional) The number of significant bytes in the words.
*
* @example
*
* var wordArray = CryptoJS.lib.WordArray.create();
* var wordArray = CryptoJS.lib.WordArray.create([0x00010203, 0x04050607]);
* var wordArray = CryptoJS.lib.WordArray.create([0x00010203, 0x04050607], 6);
*/
init: function (words, sigBytes) {
words = this.words = words || [];
if (sigBytes != undefined) {
this.sigBytes = sigBytes;
} else {
this.sigBytes = words.length * 4;
}
},
/**
* Converts this word array to a string.
*
* @param {Encoder} encoder (Optional) The encoding strategy to use. Default: CryptoJS.enc.Hex
*
* @return {string} The stringified word array.
*
* @example
*
* var string = wordArray + '';
* var string = wordArray.toString();
* var string = wordArray.toString(CryptoJS.enc.Utf8);
*/
toString: function (encoder) {
return (encoder || Hex).stringify(this);
},
/**
* Concatenates a word array to this word array.
*
* @param {WordArray} wordArray The word array to append.
*
* @return {WordArray} This word array.
*
* @example
*
* wordArray1.concat(wordArray2);
*/
concat: function (wordArray) {
// Shortcuts
var thisWords = this.words;
var thatWords = wordArray.words;
var thisSigBytes = this.sigBytes;
var thatSigBytes = wordArray.sigBytes;
// Clamp excess bits
this.clamp();
// Concat
if (thisSigBytes % 4) {
// Copy one byte at a time
for (var i = 0; i < thatSigBytes; i++) {
var thatByte = (thatWords[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff;
thisWords[(thisSigBytes + i) >>> 2] |= thatByte << (24 - ((thisSigBytes + i) % 4) * 8);
}
} else {
// Copy one word at a time
for (var j = 0; j < thatSigBytes; j += 4) {
thisWords[(thisSigBytes + j) >>> 2] = thatWords[j >>> 2];
}
}
this.sigBytes += thatSigBytes;
// Chainable
return this;
},
/**
* Removes insignificant bits.
*
* @example
*
* wordArray.clamp();
*/
clamp: function () {
// Shortcuts
var words = this.words;
var sigBytes = this.sigBytes;
// Clamp
words[sigBytes >>> 2] &= 0xffffffff << (32 - (sigBytes % 4) * 8);
words.length = Math.ceil(sigBytes / 4);
},
/**
* Creates a copy of this word array.
*
* @return {WordArray} The clone.
*
* @example
*
* var clone = wordArray.clone();
*/
clone: function () {
var clone = Base.clone.call(this);
clone.words = this.words.slice(0);
return clone;
},
/**
* Creates a word array filled with random bytes.
*
* @param {number} nBytes The number of random bytes to generate.
*
* @return {WordArray} The random word array.
*
* @static
*
* @example
*
* var wordArray = CryptoJS.lib.WordArray.random(16);
*/
random: function (nBytes) {
var words = [];
for (var i = 0; i < nBytes; i += 4) {
words.push(cryptoSecureRandomInt());
}
return new WordArray.init(words, nBytes);
}
});
/**
* Encoder namespace.
*/
var C_enc = C.enc = {};
/**
* Hex encoding strategy.
*/
var Hex = C_enc.Hex = {
/**
* Converts a word array to a hex string.
*
* @param {WordArray} wordArray The word array.
*
* @return {string} The hex string.
*
* @static
*
* @example
*
* var hexString = CryptoJS.enc.Hex.stringify(wordArray);
*/
stringify: function (wordArray) {
// Shortcuts
var words = wordArray.words;
var sigBytes = wordArray.sigBytes;
// Convert
var hexChars = [];
for (var i = 0; i < sigBytes; i++) {
var bite = (words[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff;
hexChars.push((bite >>> 4).toString(16));
hexChars.push((bite & 0x0f).toString(16));
}
return hexChars.join('');
},
/**
* Converts a hex string to a word array.
*
* @param {string} hexStr The hex string.
*
* @return {WordArray} The word array.
*
* @static
*
* @example
*
* var wordArray = CryptoJS.enc.Hex.parse(hexString);
*/
parse: function (hexStr) {
// Shortcut
var hexStrLength = hexStr.length;
// Convert
var words = [];
for (var i = 0; i < hexStrLength; i += 2) {
words[i >>> 3] |= parseInt(hexStr.substr(i, 2), 16) << (24 - (i % 8) * 4);
}
return new WordArray.init(words, hexStrLength / 2);
}
};
/**
* Latin1 encoding strategy.
*/
var Latin1 = C_enc.Latin1 = {
/**
* Converts a word array to a Latin1 string.
*
* @param {WordArray} wordArray The word array.
*
* @return {string} The Latin1 string.
*
* @static
*
* @example
*
* var latin1String = CryptoJS.enc.Latin1.stringify(wordArray);
*/
stringify: function (wordArray) {
// Shortcuts
var words = wordArray.words;
var sigBytes = wordArray.sigBytes;
// Convert
var latin1Chars = [];
for (var i = 0; i < sigBytes; i++) {
var bite = (words[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff;
latin1Chars.push(String.fromCharCode(bite));
}
return latin1Chars.join('');
},
/**
* Converts a Latin1 string to a word array.
*
* @param {string} latin1Str The Latin1 string.
*
* @return {WordArray} The word array.
*
* @static
*
* @example
*
* var wordArray = CryptoJS.enc.Latin1.parse(latin1String);
*/
parse: function (latin1Str) {
// Shortcut
var latin1StrLength = latin1Str.length;
// Convert
var words = [];
for (var i = 0; i < latin1StrLength; i++) {
words[i >>> 2] |= (latin1Str.charCodeAt(i) & 0xff) << (24 - (i % 4) * 8);
}
return new WordArray.init(words, latin1StrLength);
}
};
/**
* UTF-8 encoding strategy.
*/
var Utf8 = C_enc.Utf8 = {
/**
* Converts a word array to a UTF-8 string.
*
* @param {WordArray} wordArray The word array.
*
* @return {string} The UTF-8 string.
*
* @static
*
* @example
*
* var utf8String = CryptoJS.enc.Utf8.stringify(wordArray);
*/
stringify: function (wordArray) {
try {
return decodeURIComponent(escape(Latin1.stringify(wordArray)));
} catch (e) {
throw new Error('Malformed UTF-8 data');
}
},
/**
* Converts a UTF-8 string to a word array.
*
* @param {string} utf8Str The UTF-8 string.
*
* @return {WordArray} The word array.
*
* @static
*
* @example
*
* var wordArray = CryptoJS.enc.Utf8.parse(utf8String);
*/
parse: function (utf8Str) {
return Latin1.parse(unescape(encodeURIComponent(utf8Str)));
}
};
/**
* Abstract buffered block algorithm template.
*
* The property blockSize must be implemented in a concrete subtype.
*
* @property {number} _minBufferSize The number of blocks that should be kept unprocessed in the buffer. Default: 0
*/
var BufferedBlockAlgorithm = C_lib.BufferedBlockAlgorithm = Base.extend({
/**
* Resets this block algorithm's data buffer to its initial state.
*
* @example
*
* bufferedBlockAlgorithm.reset();
*/
reset: function () {
// Initial values
this._data = new WordArray.init();
this._nDataBytes = 0;
},
/**
* Adds new data to this block algorithm's buffer.
*
* @param {WordArray|string} data The data to append. Strings are converted to a WordArray using UTF-8.
*
* @example
*
* bufferedBlockAlgorithm._append('data');
* bufferedBlockAlgorithm._append(wordArray);
*/
_append: function (data) {
// Convert string to WordArray, else assume WordArray already
if (typeof data == 'string') {
data = Utf8.parse(data);
}
// Append
this._data.concat(data);
this._nDataBytes += data.sigBytes;
},
/**
* Processes available data blocks.
*
* This method invokes _doProcessBlock(offset), which must be implemented by a concrete subtype.
*
* @param {boolean} doFlush Whether all blocks and partial blocks should be processed.
*
* @return {WordArray} The processed data.
*
* @example
*
* var processedData = bufferedBlockAlgorithm._process();
* var processedData = bufferedBlockAlgorithm._process(!!'flush');
*/
_process: function (doFlush) {
var processedWords;
// Shortcuts
var data = this._data;
var dataWords = data.words;
var dataSigBytes = data.sigBytes;
var blockSize = this.blockSize;
var blockSizeBytes = blockSize * 4;
// Count blocks ready
var nBlocksReady = dataSigBytes / blockSizeBytes;
if (doFlush) {
// Round up to include partial blocks
nBlocksReady = Math.ceil(nBlocksReady);
} else {
// Round down to include only full blocks,
// less the number of blocks that must remain in the buffer
nBlocksReady = Math.max((nBlocksReady | 0) - this._minBufferSize, 0);
}
// Count words ready
var nWordsReady = nBlocksReady * blockSize;
// Count bytes ready
var nBytesReady = Math.min(nWordsReady * 4, dataSigBytes);
// Process blocks
if (nWordsReady) {
for (var offset = 0; offset < nWordsReady; offset += blockSize) {
// Perform concrete-algorithm logic
this._doProcessBlock(dataWords, offset);
}
// Remove processed words
processedWords = dataWords.splice(0, nWordsReady);
data.sigBytes -= nBytesReady;
}
// Return processed words
return new WordArray.init(processedWords, nBytesReady);
},
/**
* Creates a copy of this object.
*
* @return {Object} The clone.
*
* @example
*
* var clone = bufferedBlockAlgorithm.clone();
*/
clone: function () {
var clone = Base.clone.call(this);
clone._data = this._data.clone();
return clone;
},
_minBufferSize: 0
});
/**
* Abstract hasher template.
*
* @property {number} blockSize The number of 32-bit words this hasher operates on. Default: 16 (512 bits)
*/
var Hasher = C_lib.Hasher = BufferedBlockAlgorithm.extend({
/**
* Configuration options.
*/
cfg: Base.extend(),
/**
* Initializes a newly created hasher.
*
* @param {Object} cfg (Optional) The configuration options to use for this hash computation.
*
* @example
*
* var hasher = CryptoJS.algo.SHA256.create();
*/
init: function (cfg) {
// Apply config defaults
this.cfg = this.cfg.extend(cfg);
// Set initial values
this.reset();
},
/**
* Resets this hasher to its initial state.
*
* @example
*
* hasher.reset();
*/
reset: function () {
// Reset data buffer
BufferedBlockAlgorithm.reset.call(this);
// Perform concrete-hasher logic
this._doReset();
},
/**
* Updates this hasher with a message.
*
* @param {WordArray|string} messageUpdate The message to append.
*
* @return {Hasher} This hasher.
*
* @example
*
* hasher.update('message');
* hasher.update(wordArray);
*/
update: function (messageUpdate) {
// Append
this._append(messageUpdate);
// Update the hash
this._process();
// Chainable
return this;
},
/**
* Finalizes the hash computation.
* Note that the finalize operation is effectively a destructive, read-once operation.
*
* @param {WordArray|string} messageUpdate (Optional) A final message update.
*
* @return {WordArray} The hash.
*
* @example
*
* var hash = hasher.finalize();
* var hash = hasher.finalize('message');
* var hash = hasher.finalize(wordArray);
*/
finalize: function (messageUpdate) {
// Final message update
if (messageUpdate) {
this._append(messageUpdate);
}
// Perform concrete-hasher logic
var hash = this._doFinalize();
return hash;
},
blockSize: 512/32,
/**
* Creates a shortcut function to a hasher's object interface.
*
* @param {Hasher} hasher The hasher to create a helper for.
*
* @return {Function} The shortcut function.
*
* @static
*
* @example
*
* var SHA256 = CryptoJS.lib.Hasher._createHelper(CryptoJS.algo.SHA256);
*/
_createHelper: function (hasher) {
return function (message, cfg) {
return new hasher.init(cfg).finalize(message);
};
},
/**
* Creates a shortcut function to the HMAC's object interface.
*
* @param {Hasher} hasher The hasher to use in this HMAC helper.
*
* @return {Function} The shortcut function.
*
* @static
*
* @example
*
* var HmacSHA256 = CryptoJS.lib.Hasher._createHmacHelper(CryptoJS.algo.SHA256);
*/
_createHmacHelper: function (hasher) {
return function (message, key) {
return new C_algo.HMAC.init(hasher, key).finalize(message);
};
}
});
/**
* Algorithm namespace.
*/
var C_algo = C.algo = {};
return C;
}(Math));
return CryptoJS;
}));
/***/ }),
/***/ 606:
/***/ (function(module, exports, __nccwpck_require__) {
;(function (root, factory) {
if (true) {
// CommonJS
module.exports = exports = factory(__nccwpck_require__(255));
}
else {}
}(this, function (CryptoJS) {
return CryptoJS.enc.Hex;
}));
/***/ }),
/***/ 377:
/***/ (function(module, exports, __nccwpck_require__) {
;(function (root, factory) {
if (true) {
// CommonJS
module.exports = exports = factory(__nccwpck_require__(255));
}
else {}
}(this, function (CryptoJS) {
(function () {
// Shortcuts
var C = CryptoJS;
var C_lib = C.lib;
var WordArray = C_lib.WordArray;
var Hasher = C_lib.Hasher;
var C_algo = C.algo;
// Reusable object
var W = [];
/**
* SHA-1 hash algorithm.
*/
var SHA1 = C_algo.SHA1 = Hasher.extend({
_doReset: function () {
this._hash = new WordArray.init([
0x67452301, 0xefcdab89,
0x98badcfe, 0x10325476,
0xc3d2e1f0
]);
},
_doProcessBlock: function (M, offset) {
// Shortcut
var H = this._hash.words;
// Working variables
var a = H[0];
var b = H[1];
var c = H[2];
var d = H[3];
var e = H[4];
// Computation
for (var i = 0; i < 80; i++) {
if (i < 16) {
W[i] = M[offset + i] | 0;
} else {
var n = W[i - 3] ^ W[i - 8] ^ W[i - 14] ^ W[i - 16];
W[i] = (n << 1) | (n >>> 31);
}
var t = ((a << 5) | (a >>> 27)) + e + W[i];
if (i < 20) {
t += ((b & c) | (~b & d)) + 0x5a827999;
} else if (i < 40) {
t += (b ^ c ^ d) + 0x6ed9eba1;
} else if (i < 60) {
t += ((b & c) | (b & d) | (c & d)) - 0x70e44324;
} else /* if (i < 80) */ {
t += (b ^ c ^ d) - 0x359d3e2a;
}
e = d;
d = c;
c = (b << 30) | (b >>> 2);
b = a;
a = t;
}
// Intermediate hash value
H[0] = (H[0] + a) | 0;
H[1] = (H[1] + b) | 0;
H[2] = (H[2] + c) | 0;
H[3] = (H[3] + d) | 0;
H[4] = (H[4] + e) | 0;
},
_doFinalize: function () {
// Shortcuts
var data = this._data;
var dataWords = data.words;
var nBitsTotal = this._nDataBytes * 8;
var nBitsLeft = data.sigBytes * 8;
// Add padding
dataWords[nBitsLeft >>> 5] |= 0x80 << (24 - nBitsLeft % 32);
dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 14] = Math.floor(nBitsTotal / 0x100000000);
dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 15] = nBitsTotal;
data.sigBytes = dataWords.length * 4;
// Hash final blocks
this._process();
// Return final computed hash
return this._hash;
},
clone: function () {
var clone = Hasher.clone.call(this);
clone._hash = this._hash.clone();
return clone;
}
});
/**
* Shortcut function to the hasher's object interface.
*
* @param {WordArray|string} message The message to hash.
*
* @return {WordArray} The hash.
*
* @static
*
* @example
*
* var hash = CryptoJS.SHA1('message');
* var hash = CryptoJS.SHA1(wordArray);
*/
C.SHA1 = Hasher._createHelper(SHA1);
/**
* Shortcut function to the HMAC's object interface.
*
* @param {WordArray|string} message The message to hash.
* @param {WordArray|string} key The secret key.
*
* @return {WordArray} The HMAC.
*
* @static
*
* @example
*
* var hmac = CryptoJS.HmacSHA1(message, key);
*/
C.HmacSHA1 = Hasher._createHmacHelper(SHA1);
}());
return CryptoJS.SHA1;
}));
/***/ }),
/***/ 982:
/***/ ((module) => {
"use strict";
module.exports = require("crypto");
/***/ }),
/***/ 943:
/***/ ((module) => {
"use strict";
module.exports = require("fs/promises");
/***/ }),
/***/ 928:
/***/ ((module) => {
"use strict";
module.exports = require("path");
/***/ }),
/***/ 287:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({value: true}));// src/index.ts
var _redis = __nccwpck_require__(722);
var _kv = null;
process.env.UPSTASH_DISABLE_TELEMETRY = "1";
var VercelKV = class extends _redis.Redis {
// This API is based on https://github.com/redis/node-redis#scan-iterator which is not supported in @upstash/redis
/**
* Same as `scan` but returns an AsyncIterator to allow iteration via `for await`.
*/
async *scanIterator(options) {
let cursor = "0";
let keys;
do {
[cursor, keys] = await this.scan(cursor, options);
for (const key of keys) {
yield key;
}
} while (cursor !== "0");
}
/**
* Same as `hscan` but returns an AsyncIterator to allow iteration via `for await`.
*/
async *hscanIterator(key, options) {
let cursor = "0";
let items;
do {
[cursor, items] = await this.hscan(key, cursor, options);
for (const item of items) {
yield item;
}
} while (cursor !== "0");
}
/**
* Same as `sscan` but returns an AsyncIterator to allow iteration via `for await`.
*/
async *sscanIterator(key, options) {
let cursor = "0";
let items;
do {
[cursor, items] = await this.sscan(key, cursor, options);
for (const item of items) {
yield item;
}
} while (cursor !== "0");
}
/**
* Same as `zscan` but returns an AsyncIterator to allow iteration via `for await`.
*/
async *zscanIterator(key, options) {
let cursor = "0";
let items;
do {
[cursor, items] = await this.zscan(key, cursor, options);
for (const item of items) {
yield item;
}
} while (cursor !== "0");
}
};
function createClient(config) {
return new VercelKV({
// The Next.js team recommends no value or `default` for fetch requests's `cache` option
// upstash/redis defaults to `no-store`, so we enforce `default`
cache: "default",
enableAutoPipelining: true,
...config
});
}
var src_default = new Proxy(
{},
{
get(target, prop, receiver) {
if (prop === "then" || prop === "parse") {
return Reflect.get(target, prop, receiver);
}
if (!_kv) {
if (!process.env.KV_REST_API_URL || !process.env.KV_REST_API_TOKEN) {
throw new Error(
"@vercel/kv: Missing required environment variables KV_REST_API_URL and KV_REST_API_TOKEN"
);
}
console.warn(
'\x1B[33m"The default export has been moved to a named export and it will be removed in version 1, change to import { kv }\x1B[0m"'
);
_kv = createClient({
url: process.env.KV_REST_API_URL,
token: process.env.KV_REST_API_TOKEN
});
}
return Reflect.get(_kv, prop);
}
}
);
var kv = new Proxy(
{},
{
get(target, prop) {
if (!_kv) {
if (!process.env.KV_REST_API_URL || !process.env.KV_REST_API_TOKEN) {
throw new Error(
"@vercel/kv: Missing required environment variables KV_REST_API_URL and KV_REST_API_TOKEN"
);
}
_kv = createClient({
url: process.env.KV_REST_API_URL,
token: process.env.KV_REST_API_TOKEN
});
}
return Reflect.get(_kv, prop);
}
}
);
exports.VercelKV = VercelKV; exports.createClient = createClient; exports["default"] = src_default; exports.kv = kv;
//# sourceMappingURL=index.cjs.map
/***/ })
/******/ });
/************************************************************************/
/******/ // The module cache
/******/ var __webpack_module_cache__ = {};
/******/
/******/ // The require function
/******/ function __nccwpck_require__(moduleId) {
/******/ // Check if module is in cache
/******/ var cachedModule = __webpack_module_cache__[moduleId];
/******/ if (cachedModule !== undefined) {
/******/ return cachedModule.exports;
/******/ }
/******/ // Create a new module (and put it into the cache)
/******/ var module = __webpack_module_cache__[moduleId] = {
/******/ // no module.id needed
/******/ // no module.loaded needed
/******/ exports: {}
/******/ };
/******/
/******/ // Execute the module function
/******/ var threw = true;
/******/ try {
/******/ __webpack_modules__[moduleId].call(module.exports, module, module.exports, __nccwpck_require__);
/******/ threw = false;
/******/ } finally {
/******/ if(threw) delete __webpack_module_cache__[moduleId];
/******/ }
/******/
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/
/************************************************************************/
/******/ /* webpack/runtime/compat */
/******/
/******/ if (typeof __nccwpck_require__ !== 'undefined') __nccwpck_require__.ab = __dirname + "/";
/******/
/************************************************************************/
var __webpack_exports__ = {};
const fs = __nccwpck_require__(943)
const path = __nccwpck_require__(928)
const { createClient } = __nccwpck_require__(287)
async function collectExamplesResult(manifestFile) {
const file = path.join(process.cwd(), manifestFile)
const contents = await fs.readFile(file, 'utf-8')
const results = JSON.parse(contents)
let failingCount = 0
let passingCount = 0
const currentDate = new Date()
const isoString = currentDate.toISOString()
const timestamp = isoString.slice(0, 19).replace('T', ' ')
for (const isPassing of Object.values(results)) {
if (isPassing) {
passingCount += 1
} else {
failingCount += 1
}
}
const status = `${process.env.GITHUB_SHA}\t${timestamp}\t${passingCount}/${
passingCount + failingCount
}`
return {
status,
// Uses JSON.stringify to create minified JSON, otherwise whitespace is preserved.
data: JSON.stringify(results),
}
}
async function collectResults(manifestFile) {
const file = path.join(process.cwd(), manifestFile)
const contents = await fs.readFile(file, 'utf-8')
const results = JSON.parse(contents)
let passingTests = ''
let failingTests = ''
let passCount = 0
let failCount = 0
const currentDate = new Date()
const isoString = currentDate.toISOString()
const timestamp = isoString.slice(0, 19).replace('T', ' ')
if (results.version === 2) {
for (const [testFileName, result] of Object.entries(results.suites)) {
let suitePassCount = 0
let suiteFailCount = 0
suitePassCount += result.passed.length
suiteFailCount += result.failed.length
if (suitePassCount > 0) {
passingTests += `${testFileName}\n`
}
if (suiteFailCount > 0) {
failingTests += `${testFileName}\n`
}
for (const passed of result.passed) {
const passedName = passed.replaceAll('`', '\\`')
passingTests += `* ${passedName}\n`
}
for (const passed of result.failed) {
const failedName = passed.replaceAll('`', '\\`')
failingTests += `* ${failedName}\n`
}
passCount += suitePassCount
failCount += suiteFailCount
if (suitePassCount > 0) {
passingTests += `\n`
}
if (suiteFailCount > 0) {
failingTests += `\n`
}
}
const testRun = `${process.env.GITHUB_SHA}\t${timestamp}\t${passCount}/${
passCount + failCount
}`
return { testRun, passingTests, failingTests }
} else {
for (const [testFileName, result] of Object.entries(results)) {
let suitePassCount = 0
let suiteFailCount = 0
suitePassCount += result.passed.length
suiteFailCount += result.failed.length
if (suitePassCount > 0) {
passingTests += `${testFileName}\n`
}
if (suiteFailCount > 0) {
failingTests += `${testFileName}\n`
}
for (const passed of result.passed) {
const passedName = passed.replaceAll('`', '\\`')
passingTests += `* ${passedName}\n`
}
for (const passed of result.failed) {
const failedName = passed.replaceAll('`', '\\`')
failingTests += `* ${failedName}\n`
}
passCount += suitePassCount
failCount += suiteFailCount
if (suitePassCount > 0) {
passingTests += `\n`
}
if (suiteFailCount > 0) {
failingTests += `\n`
}
}
const testRun = `${process.env.GITHUB_SHA}\t${timestamp}\t${passCount}/${
passCount + failCount
}`
return { testRun, passingTests, failingTests }
}
}
async function collectAndUpload(
kv,
{ jsonPrefix, kvPrefix, deploymentDomain }
) {
const developmentResult = await collectResults(
`test/${jsonPrefix}dev-tests-manifest.json`
)
const productionResult = await collectResults(
`test/${jsonPrefix}build-tests-manifest.json`
)
const developmentExamplesResult = await collectExamplesResult(
`test/${jsonPrefix}dev-examples-manifest.json`
)
console.log('TEST RESULT DEVELOPMENT')
console.log(developmentResult.testRun)
console.log('TEST RESULT PRODUCTION')
console.log(productionResult.testRun)
console.log('EXAMPLES RESULT')
console.log(developmentExamplesResult.status)
await kv.rpush(`${kvPrefix}test-runs`, developmentResult.testRun)
await kv.rpush(`${kvPrefix}test-runs-production`, productionResult.testRun)
await kv.rpush(`${kvPrefix}examples-runs`, developmentExamplesResult.status)
console.log('SUCCESSFULLY SAVED RUNS')
await kv.set(`${kvPrefix}passing-tests`, developmentResult.passingTests)
await kv.set(
`${kvPrefix}passing-tests-production`,
productionResult.passingTests
)
console.log('SUCCESSFULLY SAVED PASSING')
await kv.set(`${kvPrefix}failing-tests`, developmentResult.failingTests)
await kv.set(
`${kvPrefix}failing-tests-production`,
productionResult.failingTests
)
console.log('SUCCESSFULLY SAVED FAILING')
await kv.set(`${kvPrefix}examples-data`, developmentExamplesResult.data)
console.log('SUCCESSFULLY SAVED EXAMPLES')
if (deploymentDomain != null) {
// Upstash does not provide strong consistency, so just wait a couple
// seconds before invalidating the cache in case of replication lag.
//
// https://upstash.com/docs/redis/features/consistency
await new Promise((resolve) => setTimeout(resolve, 2000))
try {
const response = await fetch(
`https://${deploymentDomain}/api/revalidate`,
{
method: 'POST',
headers: {
'X-Auth-Token': process.env.TURBOYET_TOKEN,
'Content-Type': 'application/json',
},
}
)
const responseJson = await response.json()
if (!responseJson.revalidated) {
throw new Error(responseJson.error)
}
console.log('SUCCESSFULLY REVALIDATED VERCEL DATA CACHE')
} catch (error) {
// non-fatal: the cache will eventually expire anyways
console.error('FAILED TO REVALIDATE VERCEL DATA CACHE', error)
}
}
}
async function main() {
try {
const kv = createClient({
url: process.env.TURBOYET_KV_REST_API_URL,
token: process.env.TURBOYET_KV_REST_API_TOKEN,
})
console.log('### UPLOADING TURBOPACK DATA')
await collectAndUpload(kv, {
jsonPrefix: 'turbopack-',
kvPrefix: '',
deploymentDomain: 'areweturboyet.com',
})
console.log('### UPLOADING RSPACK DATA')
await collectAndUpload(kv, {
jsonPrefix: 'rspack-',
kvPrefix: 'rspack-',
deploymentDomain: 'arewerspackyet.com',
})
} catch (error) {
console.log(error)
}
}
main()
module.exports = __webpack_exports__;
/******/ })()
;
//# sourceMappingURL=index.js.map | javascript | github | https://github.com/vercel/next.js | .github/actions/upload-turboyet-data/dist/index.js |
//===----------------------------------------------------------------------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
#include "InefficientStringConcatenationCheck.h"
#include "clang/ASTMatchers/ASTMatchFinder.h"
using namespace clang::ast_matchers;
namespace clang::tidy::performance {
void InefficientStringConcatenationCheck::storeOptions(
ClangTidyOptions::OptionMap &Opts) {
Options.store(Opts, "StrictMode", StrictMode);
}
InefficientStringConcatenationCheck::InefficientStringConcatenationCheck(
StringRef Name, ClangTidyContext *Context)
: ClangTidyCheck(Name, Context),
StrictMode(Options.get("StrictMode", false)) {}
void InefficientStringConcatenationCheck::registerMatchers(
MatchFinder *Finder) {
const auto BasicStringType =
hasType(qualType(hasUnqualifiedDesugaredType(recordType(
hasDeclaration(cxxRecordDecl(hasName("::std::basic_string")))))));
const auto BasicStringPlusOperator = cxxOperatorCallExpr(
hasOverloadedOperatorName("+"),
hasAnyArgument(ignoringImpCasts(declRefExpr(BasicStringType))));
const auto PlusOperator =
cxxOperatorCallExpr(
hasOverloadedOperatorName("+"),
hasAnyArgument(ignoringImpCasts(declRefExpr(BasicStringType))),
hasDescendant(BasicStringPlusOperator))
.bind("plusOperator");
const auto AssignOperator = cxxOperatorCallExpr(
hasOverloadedOperatorName("="),
hasArgument(0, declRefExpr(BasicStringType,
hasDeclaration(decl().bind("lhsStrT")))
.bind("lhsStr")),
hasArgument(1, stmt(hasDescendant(declRefExpr(
hasDeclaration(decl(equalsBoundNode("lhsStrT"))))))),
hasDescendant(BasicStringPlusOperator));
if (StrictMode) {
Finder->addMatcher(cxxOperatorCallExpr(anyOf(AssignOperator, PlusOperator)),
this);
} else {
Finder->addMatcher(
cxxOperatorCallExpr(anyOf(AssignOperator, PlusOperator),
hasAncestor(stmt(anyOf(cxxForRangeStmt(),
whileStmt(), forStmt())))),
this);
}
}
void InefficientStringConcatenationCheck::check(
const MatchFinder::MatchResult &Result) {
const auto *LhsStr = Result.Nodes.getNodeAs<DeclRefExpr>("lhsStr");
const auto *PlusOperator =
Result.Nodes.getNodeAs<CXXOperatorCallExpr>("plusOperator");
const char *DiagMsg =
"string concatenation results in allocation of unnecessary temporary "
"strings; consider using 'operator+=' or 'string::append()' instead";
if (LhsStr)
diag(LhsStr->getExprLoc(), DiagMsg);
else if (PlusOperator)
diag(PlusOperator->getExprLoc(), DiagMsg);
}
} // namespace clang::tidy::performance | cpp | github | https://github.com/llvm/llvm-project | clang-tools-extra/clang-tidy/performance/InefficientStringConcatenationCheck.cpp |
/*-------------------------------------------------------------------------
*
* pg_publication.h
* definition of the "publication" system catalog (pg_publication)
*
* Portions Copyright (c) 1996-2026, PostgreSQL Global Development Group
* Portions Copyright (c) 1994, Regents of the University of California
*
* src/include/catalog/pg_publication.h
*
* NOTES
* The Catalog.pm module reads this file and derives schema
* information.
*
*-------------------------------------------------------------------------
*/
#ifndef PG_PUBLICATION_H
#define PG_PUBLICATION_H
#include "catalog/genbki.h"
#include "catalog/objectaddress.h"
#include "catalog/pg_publication_d.h" /* IWYU pragma: export */
/* ----------------
* pg_publication definition. cpp turns this into
* typedef struct FormData_pg_publication
* ----------------
*/
CATALOG(pg_publication,6104,PublicationRelationId)
{
Oid oid; /* oid */
NameData pubname; /* name of the publication */
Oid pubowner BKI_LOOKUP(pg_authid); /* publication owner */
/*
* indicates that this is special publication which should encompass all
* tables in the database (except for the unlogged and temp ones)
*/
bool puballtables;
/*
* indicates that this is special publication which should encompass all
* sequences in the database (except for the unlogged and temp ones)
*/
bool puballsequences;
/* true if inserts are published */
bool pubinsert;
/* true if updates are published */
bool pubupdate;
/* true if deletes are published */
bool pubdelete;
/* true if truncates are published */
bool pubtruncate;
/* true if partition changes are published using root schema */
bool pubviaroot;
/*
* 'n'(none) if generated column data should not be published. 's'(stored)
* if stored generated column data should be published.
*/
char pubgencols;
} FormData_pg_publication;
/* ----------------
* Form_pg_publication corresponds to a pointer to a tuple with
* the format of pg_publication relation.
* ----------------
*/
typedef FormData_pg_publication *Form_pg_publication;
DECLARE_UNIQUE_INDEX_PKEY(pg_publication_oid_index, 6110, PublicationObjectIndexId, pg_publication, btree(oid oid_ops));
DECLARE_UNIQUE_INDEX(pg_publication_pubname_index, 6111, PublicationNameIndexId, pg_publication, btree(pubname name_ops));
MAKE_SYSCACHE(PUBLICATIONOID, pg_publication_oid_index, 8);
MAKE_SYSCACHE(PUBLICATIONNAME, pg_publication_pubname_index, 8);
typedef struct PublicationActions
{
bool pubinsert;
bool pubupdate;
bool pubdelete;
bool pubtruncate;
} PublicationActions;
typedef struct PublicationDesc
{
PublicationActions pubactions;
/*
* true if the columns referenced in row filters which are used for UPDATE
* or DELETE are part of the replica identity or the publication actions
* do not include UPDATE or DELETE.
*/
bool rf_valid_for_update;
bool rf_valid_for_delete;
/*
* true if the columns are part of the replica identity or the publication
* actions do not include UPDATE or DELETE.
*/
bool cols_valid_for_update;
bool cols_valid_for_delete;
/*
* true if all generated columns that are part of replica identity are
* published or the publication actions do not include UPDATE or DELETE.
*/
bool gencols_valid_for_update;
bool gencols_valid_for_delete;
} PublicationDesc;
#ifdef EXPOSE_TO_CLIENT_CODE
typedef enum PublishGencolsType
{
/* Generated columns present should not be replicated. */
PUBLISH_GENCOLS_NONE = 'n',
/* Generated columns present should be replicated. */
PUBLISH_GENCOLS_STORED = 's',
} PublishGencolsType;
#endif /* EXPOSE_TO_CLIENT_CODE */
typedef struct Publication
{
Oid oid;
char *name;
bool alltables;
bool allsequences;
bool pubviaroot;
PublishGencolsType pubgencols_type;
PublicationActions pubactions;
} Publication;
typedef struct PublicationRelInfo
{
Relation relation;
Node *whereClause;
List *columns;
} PublicationRelInfo;
extern Publication *GetPublication(Oid pubid);
extern Publication *GetPublicationByName(const char *pubname, bool missing_ok);
extern List *GetRelationPublications(Oid relid);
/*---------
* Expected values for pub_partopt parameter of GetPublicationRelations(),
* which allows callers to specify which partitions of partitioned tables
* mentioned in the publication they expect to see.
*
* ROOT: only the table explicitly mentioned in the publication
* LEAF: only leaf partitions in given tree
* ALL: all partitions in given tree
*/
typedef enum PublicationPartOpt
{
PUBLICATION_PART_ROOT,
PUBLICATION_PART_LEAF,
PUBLICATION_PART_ALL,
} PublicationPartOpt;
extern List *GetPublicationRelations(Oid pubid, PublicationPartOpt pub_partopt);
extern List *GetAllTablesPublications(void);
extern List *GetAllPublicationRelations(char relkind, bool pubviaroot);
extern List *GetPublicationSchemas(Oid pubid);
extern List *GetSchemaPublications(Oid schemaid);
extern List *GetSchemaPublicationRelations(Oid schemaid,
PublicationPartOpt pub_partopt);
extern List *GetAllSchemaPublicationRelations(Oid pubid,
PublicationPartOpt pub_partopt);
extern List *GetPubPartitionOptionRelations(List *result,
PublicationPartOpt pub_partopt,
Oid relid);
extern Oid GetTopMostAncestorInPublication(Oid puboid, List *ancestors,
int *ancestor_level);
extern bool is_publishable_relation(Relation rel);
extern bool is_schema_publication(Oid pubid);
extern bool check_and_fetch_column_list(Publication *pub, Oid relid,
MemoryContext mcxt, Bitmapset **cols);
extern ObjectAddress publication_add_relation(Oid pubid, PublicationRelInfo *pri,
bool if_not_exists);
extern Bitmapset *pub_collist_validate(Relation targetrel, List *columns);
extern ObjectAddress publication_add_schema(Oid pubid, Oid schemaid,
bool if_not_exists);
extern Bitmapset *pub_collist_to_bitmapset(Bitmapset *columns, Datum pubcols,
MemoryContext mcxt);
extern Bitmapset *pub_form_cols_map(Relation relation,
PublishGencolsType include_gencols_type);
#endif /* PG_PUBLICATION_H */ | c | github | https://github.com/postgres/postgres | src/include/catalog/pg_publication.h |
import pytest
from pandas.util._validators import validate_args_and_kwargs
@pytest.fixture
def _fname():
return "func"
def test_invalid_total_length_max_length_one(_fname):
compat_args = ("foo",)
kwargs = {"foo": "FOO"}
args = ("FoO", "BaZ")
min_fname_arg_count = 0
max_length = len(compat_args) + min_fname_arg_count
actual_length = len(kwargs) + len(args) + min_fname_arg_count
msg = (
rf"{_fname}\(\) takes at most {max_length} "
rf"argument \({actual_length} given\)"
)
with pytest.raises(TypeError, match=msg):
validate_args_and_kwargs(_fname, args, kwargs, min_fname_arg_count, compat_args)
def test_invalid_total_length_max_length_multiple(_fname):
compat_args = ("foo", "bar", "baz")
kwargs = {"foo": "FOO", "bar": "BAR"}
args = ("FoO", "BaZ")
min_fname_arg_count = 2
max_length = len(compat_args) + min_fname_arg_count
actual_length = len(kwargs) + len(args) + min_fname_arg_count
msg = (
rf"{_fname}\(\) takes at most {max_length} "
rf"arguments \({actual_length} given\)"
)
with pytest.raises(TypeError, match=msg):
validate_args_and_kwargs(_fname, args, kwargs, min_fname_arg_count, compat_args)
@pytest.mark.parametrize("args,kwargs", [((), {"foo": -5, "bar": 2}), ((-5, 2), {})])
def test_missing_args_or_kwargs(args, kwargs, _fname):
bad_arg = "bar"
min_fname_arg_count = 2
compat_args = {"foo": -5, bad_arg: 1}
msg = (
rf"the '{bad_arg}' parameter is not supported "
rf"in the pandas implementation of {_fname}\(\)"
)
with pytest.raises(ValueError, match=msg):
validate_args_and_kwargs(_fname, args, kwargs, min_fname_arg_count, compat_args)
def test_duplicate_argument(_fname):
min_fname_arg_count = 2
compat_args = {"foo": None, "bar": None, "baz": None}
kwargs = {"foo": None, "bar": None}
args = (None,) # duplicate value for "foo"
msg = rf"{_fname}\(\) got multiple values for keyword argument 'foo'"
with pytest.raises(TypeError, match=msg):
validate_args_and_kwargs(_fname, args, kwargs, min_fname_arg_count, compat_args)
def test_validation(_fname):
# No exceptions should be raised.
compat_args = {"foo": 1, "bar": None, "baz": -2}
kwargs = {"baz": -2}
args = (1, None)
min_fname_arg_count = 2
validate_args_and_kwargs(_fname, args, kwargs, min_fname_arg_count, compat_args) | python | github | https://github.com/pandas-dev/pandas | pandas/tests/util/test_validate_args_and_kwargs.py |
<script src="../../dist/vue.global.js"></script>
<!-- item template -->
<script type="text/x-template" id="item-template">
<li>
<div
:class="{bold: isFolder}"
@click="toggle"
@dblclick="changeType">
{{model.name}}
<span v-if="isFolder">[{{open ? '-' : '+'}}]</span>
</div>
<ul v-if="isFolder" v-show="open">
<tree-item
class="item"
v-for="model in model.children"
:model="model">
</tree-item>
<li class="add" @click="addChild">+</li>
</ul>
</li>
</script>
<!-- item script -->
<script>
const { reactive, computed, toRefs } = Vue
const TreeItem = {
name: 'TreeItem', // necessary for self-reference
template: '#item-template',
props: {
model: Object,
},
setup(props) {
const state = reactive({
open: false,
isFolder: computed(() => {
return props.model.children && props.model.children.length
}),
})
function toggle() {
state.open = !state.open
}
function changeType() {
if (!state.isFolder) {
props.model.children = []
addChild()
state.open = true
}
}
function addChild() {
props.model.children.push({ name: 'new stuff' })
}
return {
...toRefs(state),
toggle,
changeType,
addChild,
}
},
}
</script>
<p>(You can double click on an item to turn it into a folder.)</p>
<!-- the app root element -->
<ul id="demo">
<tree-item class="item" :model="treeData"></tree-item>
</ul>
<script>
const treeData = {
name: 'My Tree',
children: [
{ name: 'hello' },
{ name: 'wat' },
{
name: 'child folder',
children: [
{
name: 'child folder',
children: [{ name: 'hello' }, { name: 'wat' }],
},
{ name: 'hello' },
{ name: 'wat' },
{
name: 'child folder',
children: [{ name: 'hello' }, { name: 'wat' }],
},
],
},
],
}
Vue.createApp({
components: {
TreeItem,
},
data: () => ({
treeData,
}),
}).mount('#demo')
</script>
<style>
body {
font-family: Menlo, Consolas, monospace;
color: #444;
}
.item {
cursor: pointer;
}
.bold {
font-weight: bold;
}
ul {
padding-left: 1em;
line-height: 1.5em;
list-style-type: dot;
}
</style> | html | github | https://github.com/vuejs/core | packages/vue/examples/composition/tree.html |
"""Test suite for the profile module."""
import sys
import pstats
import unittest
from difflib import unified_diff
from io import StringIO
from test.support import run_unittest
import profile
from test.profilee import testfunc, timer
class ProfileTest(unittest.TestCase):
profilerclass = profile.Profile
methodnames = ['print_stats', 'print_callers', 'print_callees']
expected_max_output = ':0(max)'
def get_expected_output(self):
return _ProfileOutput
@classmethod
def do_profiling(cls):
results = []
prof = cls.profilerclass(timer, 0.001)
start_timer = timer()
prof.runctx("testfunc()", globals(), locals())
results.append(timer() - start_timer)
for methodname in cls.methodnames:
s = StringIO()
stats = pstats.Stats(prof, stream=s)
stats.strip_dirs().sort_stats("stdname")
getattr(stats, methodname)()
output = s.getvalue().splitlines()
mod_name = testfunc.__module__.rsplit('.', 1)[1]
# Only compare against stats originating from the test file.
# Prevents outside code (e.g., the io module) from causing
# unexpected output.
output = [line.rstrip() for line in output if mod_name in line]
results.append('\n'.join(output))
return results
def test_cprofile(self):
results = self.do_profiling()
expected = self.get_expected_output()
self.assertEqual(results[0], 1000)
for i, method in enumerate(self.methodnames):
if results[i+1] != expected[method]:
print("Stats.%s output for %s doesn't fit expectation!" %
(method, self.profilerclass.__name__))
print('\n'.join(unified_diff(
results[i+1].split('\n'),
expected[method].split('\n'))))
def test_calling_conventions(self):
# Issue #5330: profile and cProfile wouldn't report C functions called
# with keyword arguments. We test all calling conventions.
stmts = [
"max([0])",
"max([0], key=int)",
"max([0], **dict(key=int))",
"max(*([0],))",
"max(*([0],), key=int)",
"max(*([0],), **dict(key=int))",
]
for stmt in stmts:
s = StringIO()
prof = self.profilerclass(timer, 0.001)
prof.runctx(stmt, globals(), locals())
stats = pstats.Stats(prof, stream=s)
stats.print_stats()
res = s.getvalue()
self.assertIn(self.expected_max_output, res,
"Profiling {0!r} didn't report max:\n{1}".format(stmt, res))
def regenerate_expected_output(filename, cls):
filename = filename.rstrip('co')
print('Regenerating %s...' % filename)
results = cls.do_profiling()
newfile = []
with open(filename, 'r') as f:
for line in f:
newfile.append(line)
if line.startswith('#--cut'):
break
with open(filename, 'w') as f:
f.writelines(newfile)
f.write("_ProfileOutput = {}\n")
for i, method in enumerate(cls.methodnames):
f.write('_ProfileOutput[%r] = """\\\n%s"""\n' % (
method, results[i+1]))
f.write('\nif __name__ == "__main__":\n main()\n')
def test_main():
run_unittest(ProfileTest)
def main():
if '-r' not in sys.argv:
test_main()
else:
regenerate_expected_output(__file__, ProfileTest)
# Don't remove this comment. Everything below it is auto-generated.
#--cut--------------------------------------------------------------------------
_ProfileOutput = {}
_ProfileOutput['print_stats'] = """\
28 27.972 0.999 27.972 0.999 profilee.py:110(__getattr__)
1 269.996 269.996 999.769 999.769 profilee.py:25(testfunc)
23/3 149.937 6.519 169.917 56.639 profilee.py:35(factorial)
20 19.980 0.999 19.980 0.999 profilee.py:48(mul)
2 39.986 19.993 599.830 299.915 profilee.py:55(helper)
4 115.984 28.996 119.964 29.991 profilee.py:73(helper1)
2 -0.006 -0.003 139.946 69.973 profilee.py:84(helper2_indirect)
8 311.976 38.997 399.912 49.989 profilee.py:88(helper2)
8 63.976 7.997 79.960 9.995 profilee.py:98(subhelper)"""
_ProfileOutput['print_callers'] = """\
:0(append) <- profilee.py:73(helper1)(4) 119.964
:0(exc_info) <- profilee.py:73(helper1)(4) 119.964
:0(hasattr) <- profilee.py:73(helper1)(4) 119.964
profilee.py:88(helper2)(8) 399.912
profilee.py:110(__getattr__) <- :0(hasattr)(12) 11.964
profilee.py:98(subhelper)(16) 79.960
profilee.py:25(testfunc) <- <string>:1(<module>)(1) 999.767
profilee.py:35(factorial) <- profilee.py:25(testfunc)(1) 999.769
profilee.py:35(factorial)(20) 169.917
profilee.py:84(helper2_indirect)(2) 139.946
profilee.py:48(mul) <- profilee.py:35(factorial)(20) 169.917
profilee.py:55(helper) <- profilee.py:25(testfunc)(2) 999.769
profilee.py:73(helper1) <- profilee.py:55(helper)(4) 599.830
profilee.py:84(helper2_indirect) <- profilee.py:55(helper)(2) 599.830
profilee.py:88(helper2) <- profilee.py:55(helper)(6) 599.830
profilee.py:84(helper2_indirect)(2) 139.946
profilee.py:98(subhelper) <- profilee.py:88(helper2)(8) 399.912"""
_ProfileOutput['print_callees'] = """\
:0(hasattr) -> profilee.py:110(__getattr__)(12) 27.972
<string>:1(<module>) -> profilee.py:25(testfunc)(1) 999.769
profilee.py:110(__getattr__) ->
profilee.py:25(testfunc) -> profilee.py:35(factorial)(1) 169.917
profilee.py:55(helper)(2) 599.830
profilee.py:35(factorial) -> profilee.py:35(factorial)(20) 169.917
profilee.py:48(mul)(20) 19.980
profilee.py:48(mul) ->
profilee.py:55(helper) -> profilee.py:73(helper1)(4) 119.964
profilee.py:84(helper2_indirect)(2) 139.946
profilee.py:88(helper2)(6) 399.912
profilee.py:73(helper1) -> :0(append)(4) -0.004
profilee.py:84(helper2_indirect) -> profilee.py:35(factorial)(2) 169.917
profilee.py:88(helper2)(2) 399.912
profilee.py:88(helper2) -> :0(hasattr)(8) 11.964
profilee.py:98(subhelper)(8) 79.960
profilee.py:98(subhelper) -> profilee.py:110(__getattr__)(16) 27.972"""
if __name__ == "__main__":
main() | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
Author: darksky83
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import sys
import urllib
import xbmcgui
import xbmcplugin
from common_variables import *
# Function to add a Show directory
def addprograma(name, url, mode, iconimage, number_of_items, information, fanart_image=''):
if (fanart_image == ''):
if iconimage:
fanart_image = iconimage
u = sys.argv[0] + "?url=" + urllib.quote_plus(url) + "&mode=" + str(mode) + "&name=" + urllib.quote_plus(
name) + "&iconimage=" + urllib.quote_plus(fanart_image)
try:
u += "&plot=" + urllib.quote_plus(information["plot"])
except:
pass
ok = True
liz = xbmcgui.ListItem(name, iconImage="DefaultFolder.png", thumbnailImage=iconimage)
if fanart_image == '':
liz.setProperty('fanart_image', os.path.join(artfolder, 'fanart.png'))
else:
liz.setProperty('fanart_image', fanart_image)
liz.setInfo(type="Video", infoLabels=information)
ok = xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=u, listitem=liz, isFolder=True,
totalItems=number_of_items)
return ok
# Function to add a Episode
def addepisode(name, url, mode, iconimage, number_of_items, information, fanart_image):
u = sys.argv[0] + "?url=" + urllib.quote_plus(url) + "&mode=" + str(mode) + "&name=" + urllib.quote_plus(
name) + "&iconimage=" + urllib.quote_plus(iconimage)
ok = True
liz = xbmcgui.ListItem(name, iconImage="DefaultFolder.png", thumbnailImage=iconimage)
if fanart_image == '':
liz.setProperty('fanart_image', os.path.join(artfolder, 'fanart.png'))
else:
liz.setProperty('fanart_image', fanart_image)
liz.setInfo(type="Video", infoLabels=information)
ok = xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=u, listitem=liz, isFolder=False,
totalItems=number_of_items)
return ok
# Function to add a video/audio Link
def addLink(name, url, iconimage, number_of_items):
ok = True
liz = xbmcgui.ListItem(name, iconImage="DefaultVideo.png", thumbnailImage=iconimage)
liz.setProperty('fanart_image', os.path.join(artfolder, 'fanart.png'))
liz.setInfo(type="Video", infoLabels={"Title": name})
ok = xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=url, listitem=liz, isFolder=False,
totalItems=number_of_items)
return ok
# Function to add a regular directory
def addDir(name, url, mode, iconimage, number_of_items, pasta=True, informacion=None):
u = sys.argv[0] + "?url=" + urllib.quote_plus(url) + "&mode=" + str(mode) + "&name=" + urllib.quote_plus(name)
try:
u += "&plot=" + urllib.quote_plus(informacion["plot"])
except:
pass
ok = True
liz = xbmcgui.ListItem(name, iconImage="DefaultFolder.png", thumbnailImage=iconimage)
liz.setProperty('fanart_image', os.path.join(artfolder, 'fanart.png'))
liz.setInfo(type="Video", infoLabels={"Title": name})
ok = xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=u, listitem=liz, isFolder=pasta,
totalItems=number_of_items)
return ok | unknown | codeparrot/codeparrot-clean | ||
from django.dispatch.saferef import *
from django.utils import unittest
class Test1(object):
def x(self):
pass
def test2(obj):
pass
class Test2(object):
def __call__(self, obj):
pass
class Tester(unittest.TestCase):
def setUp(self):
ts = []
ss = []
for x in xrange(5000):
t = Test1()
ts.append(t)
s = safeRef(t.x, self._closure)
ss.append(s)
ts.append(test2)
ss.append(safeRef(test2, self._closure))
for x in xrange(30):
t = Test2()
ts.append(t)
s = safeRef(t, self._closure)
ss.append(s)
self.ts = ts
self.ss = ss
self.closureCount = 0
def tearDown(self):
del self.ts
del self.ss
def testIn(self):
"""Test the "in" operator for safe references (cmp)"""
for t in self.ts[:50]:
self.assertTrue(safeRef(t.x) in self.ss)
def testValid(self):
"""Test that the references are valid (return instance methods)"""
for s in self.ss:
self.assertTrue(s())
def testShortCircuit (self):
"""Test that creation short-circuits to reuse existing references"""
sd = {}
for s in self.ss:
sd[s] = 1
for t in self.ts:
if hasattr(t, 'x'):
self.assertTrue(sd.has_key(safeRef(t.x)))
self.assertTrue(safeRef(t.x) in sd)
else:
self.assertTrue(sd.has_key(safeRef(t)))
self.assertTrue(safeRef(t) in sd)
def testRepresentation (self):
"""Test that the reference object's representation works
XXX Doesn't currently check the results, just that no error
is raised
"""
repr(self.ss[-1])
def _closure(self, ref):
"""Dumb utility mechanism to increment deletion counter"""
self.closureCount +=1
def getSuite():
return unittest.makeSuite(Tester,'test')
if __name__ == "__main__":
unittest.main() | unknown | codeparrot/codeparrot-clean | ||
# (C) British Crown Copyright 2014 - 2016, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""
Unit tests for `iris.fileformats.grib.message.Section`.
"""
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
# Import iris.tests first so that some things can be initialised before
# importing anything else.
import iris.tests as tests
import gribapi
import numpy as np
from iris.fileformats.grib.message import Section
@tests.skip_data
class Test___getitem__(tests.IrisTest):
def setUp(self):
filename = tests.get_data_path(('GRIB', 'uk_t', 'uk_t.grib2'))
with open(filename, 'rb') as grib_fh:
self.grib_id = gribapi.grib_new_from_file(grib_fh)
def test_scalar(self):
section = Section(self.grib_id, None, ['Ni'])
self.assertEqual(section['Ni'], 47)
def test_array(self):
section = Section(self.grib_id, None, ['codedValues'])
codedValues = section['codedValues']
self.assertEqual(codedValues.shape, (1551,))
self.assertArrayAlmostEqual(codedValues[:3],
[-1.78140259, -1.53140259, -1.28140259])
def test_typeOfFirstFixedSurface(self):
section = Section(self.grib_id, None, ['typeOfFirstFixedSurface'])
self.assertEqual(section['typeOfFirstFixedSurface'], 100)
def test_numberOfSection(self):
n = 4
section = Section(self.grib_id, n, ['numberOfSection'])
self.assertEqual(section['numberOfSection'], n)
def test_invalid(self):
section = Section(self.grib_id, None, ['Ni'])
with self.assertRaisesRegexp(KeyError, 'Nii'):
section['Nii']
@tests.skip_data
class Test__getitem___pdt_31(tests.IrisTest):
def setUp(self):
filename = tests.get_data_path(('GRIB', 'umukv', 'ukv_chan9.grib2'))
with open(filename, 'rb') as grib_fh:
self.grib_id = gribapi.grib_new_from_file(grib_fh)
self.keys = ['satelliteSeries', 'satelliteNumber', 'instrumentType',
'scaleFactorOfCentralWaveNumber',
'scaledValueOfCentralWaveNumber']
def test_array(self):
section = Section(self.grib_id, None, self.keys)
for key in self.keys:
value = section[key]
self.assertIsInstance(value, np.ndarray)
self.assertEqual(value.shape, (1,))
@tests.skip_data
class Test_get_computed_key(tests.IrisTest):
def test_gdt40_computed(self):
fname = tests.get_data_path(('GRIB', 'gaussian', 'regular_gg.grib2'))
with open(fname, 'rb') as grib_fh:
self.grib_id = gribapi.grib_new_from_file(grib_fh)
section = Section(self.grib_id, None, [])
latitudes = section.get_computed_key('latitudes')
self.assertTrue(88.55 < latitudes[0] < 88.59)
if __name__ == '__main__':
tests.main() | unknown | codeparrot/codeparrot-clean | ||
from celery import Celery
from celery import Task
from flask import Flask
from flask import render_template
def create_app() -> Flask:
app = Flask(__name__)
app.config.from_mapping(
CELERY=dict(
broker_url="redis://localhost",
result_backend="redis://localhost",
task_ignore_result=True,
),
)
app.config.from_prefixed_env()
celery_init_app(app)
@app.route("/")
def index() -> str:
return render_template("index.html")
from . import views
app.register_blueprint(views.bp)
return app
def celery_init_app(app: Flask) -> Celery:
class FlaskTask(Task):
def __call__(self, *args: object, **kwargs: object) -> object:
with app.app_context():
return self.run(*args, **kwargs)
celery_app = Celery(app.name, task_cls=FlaskTask)
celery_app.config_from_object(app.config["CELERY"])
celery_app.set_default()
app.extensions["celery"] = celery_app
return celery_app | python | github | https://github.com/pallets/flask | examples/celery/src/task_app/__init__.py |
"""Config flow for NZBGet."""
import logging
from typing import Any, Dict, Optional
import voluptuous as vol
from homeassistant.config_entries import CONN_CLASS_LOCAL_POLL, ConfigFlow, OptionsFlow
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_SCAN_INTERVAL,
CONF_SSL,
CONF_USERNAME,
CONF_VERIFY_SSL,
)
from homeassistant.core import callback
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from .const import (
DEFAULT_NAME,
DEFAULT_PORT,
DEFAULT_SCAN_INTERVAL,
DEFAULT_SSL,
DEFAULT_VERIFY_SSL,
)
from .const import DOMAIN # pylint: disable=unused-import
from .coordinator import NZBGetAPI, NZBGetAPIException
_LOGGER = logging.getLogger(__name__)
def validate_input(hass: HomeAssistantType, data: dict) -> Dict[str, Any]:
"""Validate the user input allows us to connect.
Data has the keys from DATA_SCHEMA with values provided by the user.
"""
nzbget_api = NZBGetAPI(
data[CONF_HOST],
data.get(CONF_USERNAME),
data.get(CONF_PASSWORD),
data[CONF_SSL],
data[CONF_VERIFY_SSL],
data[CONF_PORT],
)
nzbget_api.version()
return True
class NZBGetConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for NZBGet."""
VERSION = 1
CONNECTION_CLASS = CONN_CLASS_LOCAL_POLL
@staticmethod
@callback
def async_get_options_flow(config_entry):
"""Get the options flow for this handler."""
return NZBGetOptionsFlowHandler(config_entry)
async def async_step_import(
self, user_input: Optional[ConfigType] = None
) -> Dict[str, Any]:
"""Handle a flow initiated by configuration file."""
if CONF_SCAN_INTERVAL in user_input:
user_input[CONF_SCAN_INTERVAL] = user_input[CONF_SCAN_INTERVAL].seconds
return await self.async_step_user(user_input)
async def async_step_user(
self, user_input: Optional[ConfigType] = None
) -> Dict[str, Any]:
"""Handle a flow initiated by the user."""
if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
errors = {}
if user_input is not None:
if CONF_VERIFY_SSL not in user_input:
user_input[CONF_VERIFY_SSL] = DEFAULT_VERIFY_SSL
try:
await self.hass.async_add_executor_job(
validate_input, self.hass, user_input
)
except NZBGetAPIException:
errors["base"] = "cannot_connect"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
return self.async_abort(reason="unknown")
else:
return self.async_create_entry(
title=user_input[CONF_HOST],
data=user_input,
)
data_schema = {
vol.Required(CONF_HOST): str,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): str,
vol.Optional(CONF_USERNAME): str,
vol.Optional(CONF_PASSWORD): str,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): int,
vol.Optional(CONF_SSL, default=DEFAULT_SSL): bool,
}
if self.show_advanced_options:
data_schema[
vol.Optional(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL)
] = bool
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(data_schema),
errors=errors or {},
)
class NZBGetOptionsFlowHandler(OptionsFlow):
"""Handle NZBGet client options."""
def __init__(self, config_entry):
"""Initialize options flow."""
self.config_entry = config_entry
async def async_step_init(self, user_input: Optional[ConfigType] = None):
"""Manage NZBGet options."""
if user_input is not None:
return self.async_create_entry(title="", data=user_input)
options = {
vol.Optional(
CONF_SCAN_INTERVAL,
default=self.config_entry.options.get(
CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL
),
): int,
}
return self.async_show_form(step_id="init", data_schema=vol.Schema(options)) | unknown | codeparrot/codeparrot-clean | ||
/*
* Copyright 2014-2019 JetBrains s.r.o and contributors. Use of this source code is governed by the Apache 2.0 license.
*/
package io.ktor.client.engine.mock
import io.ktor.client.call.*
import io.ktor.client.engine.*
import io.ktor.client.request.*
import io.ktor.client.statement.*
import kotlinx.coroutines.*
import kotlin.coroutines.*
/**
* Single [HttpClientCall] to [HttpResponse] mapper.
*
* [Report a problem](https://ktor.io/feedback/?fqname=io.ktor.client.engine.mock.MockRequestHandler)
*/
public typealias MockRequestHandler = suspend MockRequestHandleScope.(request: HttpRequestData) -> HttpResponseData
/**
* Scope for [MockRequestHandler].
*
* [Report a problem](https://ktor.io/feedback/?fqname=io.ktor.client.engine.mock.MockRequestHandleScope)
*/
public class MockRequestHandleScope(internal val callContext: CoroutineContext)
/**
* [HttpClientEngineConfig] for [MockEngine].
*
* [Report a problem](https://ktor.io/feedback/?fqname=io.ktor.client.engine.mock.MockEngineConfig)
*/
public class MockEngineConfig : HttpClientEngineConfig() {
/**
* Request handlers.
* Responses are given back in order they were added to [requestHandlers].
*
* [Report a problem](https://ktor.io/feedback/?fqname=io.ktor.client.engine.mock.MockEngineConfig.requestHandlers)
*/
public val requestHandlers: MutableList<MockRequestHandler> = mutableListOf()
/**
* Should engine reuse handlers.
*
* [Report a problem](https://ktor.io/feedback/?fqname=io.ktor.client.engine.mock.MockEngineConfig.reuseHandlers)
*/
public var reuseHandlers: Boolean = true
/**
* Add request handler to [MockEngine]
*
* [Report a problem](https://ktor.io/feedback/?fqname=io.ktor.client.engine.mock.MockEngineConfig.addHandler)
*/
public fun addHandler(handler: MockRequestHandler) {
requestHandlers += handler
}
} | kotlin | github | https://github.com/ktorio/ktor | ktor-client/ktor-client-mock/common/src/io/ktor/client/engine/mock/MockEngineConfig.kt |
/* Generated file to emulate the evaluator namespace. */
export * from "../../harness/_namespaces/evaluator.js"; | typescript | github | https://github.com/microsoft/TypeScript | src/testRunner/_namespaces/evaluator.ts |
import {
type Target,
isReadonly,
isShallow,
reactive,
reactiveMap,
readonly,
readonlyMap,
shallowReactiveMap,
shallowReadonlyMap,
toRaw,
} from './reactive'
import { arrayInstrumentations } from './arrayInstrumentations'
import { ReactiveFlags, TrackOpTypes, TriggerOpTypes } from './constants'
import { ITERATE_KEY, track, trigger } from './dep'
import {
hasChanged,
hasOwn,
isArray,
isIntegerKey,
isObject,
isSymbol,
makeMap,
} from '@vue/shared'
import { isRef } from './ref'
import { warn } from './warning'
const isNonTrackableKeys = /*@__PURE__*/ makeMap(`__proto__,__v_isRef,__isVue`)
const builtInSymbols = new Set(
/*@__PURE__*/
Object.getOwnPropertyNames(Symbol)
// ios10.x Object.getOwnPropertyNames(Symbol) can enumerate 'arguments' and 'caller'
// but accessing them on Symbol leads to TypeError because Symbol is a strict mode
// function
.filter(key => key !== 'arguments' && key !== 'caller')
.map(key => Symbol[key as keyof SymbolConstructor])
.filter(isSymbol),
)
function hasOwnProperty(this: object, key: unknown) {
// #10455 hasOwnProperty may be called with non-string values
if (!isSymbol(key)) key = String(key)
const obj = toRaw(this)
track(obj, TrackOpTypes.HAS, key)
return obj.hasOwnProperty(key as string)
}
class BaseReactiveHandler implements ProxyHandler<Target> {
constructor(
protected readonly _isReadonly = false,
protected readonly _isShallow = false,
) {}
get(target: Target, key: string | symbol, receiver: object): any {
if (key === ReactiveFlags.SKIP) return target[ReactiveFlags.SKIP]
const isReadonly = this._isReadonly,
isShallow = this._isShallow
if (key === ReactiveFlags.IS_REACTIVE) {
return !isReadonly
} else if (key === ReactiveFlags.IS_READONLY) {
return isReadonly
} else if (key === ReactiveFlags.IS_SHALLOW) {
return isShallow
} else if (key === ReactiveFlags.RAW) {
if (
receiver ===
(isReadonly
? isShallow
? shallowReadonlyMap
: readonlyMap
: isShallow
? shallowReactiveMap
: reactiveMap
).get(target) ||
// receiver is not the reactive proxy, but has the same prototype
// this means the receiver is a user proxy of the reactive proxy
Object.getPrototypeOf(target) === Object.getPrototypeOf(receiver)
) {
return target
}
// early return undefined
return
}
const targetIsArray = isArray(target)
if (!isReadonly) {
let fn: Function | undefined
if (targetIsArray && (fn = arrayInstrumentations[key])) {
return fn
}
if (key === 'hasOwnProperty') {
return hasOwnProperty
}
}
const res = Reflect.get(
target,
key,
// if this is a proxy wrapping a ref, return methods using the raw ref
// as receiver so that we don't have to call `toRaw` on the ref in all
// its class methods
isRef(target) ? target : receiver,
)
if (isSymbol(key) ? builtInSymbols.has(key) : isNonTrackableKeys(key)) {
return res
}
if (!isReadonly) {
track(target, TrackOpTypes.GET, key)
}
if (isShallow) {
return res
}
if (isRef(res)) {
// ref unwrapping - skip unwrap for Array + integer key.
const value = targetIsArray && isIntegerKey(key) ? res : res.value
return isReadonly && isObject(value) ? readonly(value) : value
}
if (isObject(res)) {
// Convert returned value into a proxy as well. we do the isObject check
// here to avoid invalid value warning. Also need to lazy access readonly
// and reactive here to avoid circular dependency.
return isReadonly ? readonly(res) : reactive(res)
}
return res
}
}
class MutableReactiveHandler extends BaseReactiveHandler {
constructor(isShallow = false) {
super(false, isShallow)
}
set(
target: Record<string | symbol, unknown>,
key: string | symbol,
value: unknown,
receiver: object,
): boolean {
let oldValue = target[key]
const isArrayWithIntegerKey = isArray(target) && isIntegerKey(key)
if (!this._isShallow) {
const isOldValueReadonly = isReadonly(oldValue)
if (!isShallow(value) && !isReadonly(value)) {
oldValue = toRaw(oldValue)
value = toRaw(value)
}
if (!isArrayWithIntegerKey && isRef(oldValue) && !isRef(value)) {
if (isOldValueReadonly) {
if (__DEV__) {
warn(
`Set operation on key "${String(key)}" failed: target is readonly.`,
target[key],
)
}
return true
} else {
oldValue.value = value
return true
}
}
} else {
// in shallow mode, objects are set as-is regardless of reactive or not
}
const hadKey = isArrayWithIntegerKey
? Number(key) < target.length
: hasOwn(target, key)
const result = Reflect.set(
target,
key,
value,
isRef(target) ? target : receiver,
)
// don't trigger if target is something up in the prototype chain of original
if (target === toRaw(receiver)) {
if (!hadKey) {
trigger(target, TriggerOpTypes.ADD, key, value)
} else if (hasChanged(value, oldValue)) {
trigger(target, TriggerOpTypes.SET, key, value, oldValue)
}
}
return result
}
deleteProperty(
target: Record<string | symbol, unknown>,
key: string | symbol,
): boolean {
const hadKey = hasOwn(target, key)
const oldValue = target[key]
const result = Reflect.deleteProperty(target, key)
if (result && hadKey) {
trigger(target, TriggerOpTypes.DELETE, key, undefined, oldValue)
}
return result
}
has(target: Record<string | symbol, unknown>, key: string | symbol): boolean {
const result = Reflect.has(target, key)
if (!isSymbol(key) || !builtInSymbols.has(key)) {
track(target, TrackOpTypes.HAS, key)
}
return result
}
ownKeys(target: Record<string | symbol, unknown>): (string | symbol)[] {
track(
target,
TrackOpTypes.ITERATE,
isArray(target) ? 'length' : ITERATE_KEY,
)
return Reflect.ownKeys(target)
}
}
class ReadonlyReactiveHandler extends BaseReactiveHandler {
constructor(isShallow = false) {
super(true, isShallow)
}
set(target: object, key: string | symbol) {
if (__DEV__) {
warn(
`Set operation on key "${String(key)}" failed: target is readonly.`,
target,
)
}
return true
}
deleteProperty(target: object, key: string | symbol) {
if (__DEV__) {
warn(
`Delete operation on key "${String(key)}" failed: target is readonly.`,
target,
)
}
return true
}
}
export const mutableHandlers: ProxyHandler<object> =
/*@__PURE__*/ new MutableReactiveHandler()
export const readonlyHandlers: ProxyHandler<object> =
/*@__PURE__*/ new ReadonlyReactiveHandler()
export const shallowReactiveHandlers: MutableReactiveHandler =
/*@__PURE__*/ new MutableReactiveHandler(true)
// Props handlers are special in the sense that it should not unwrap top-level
// refs (in order to allow refs to be explicitly passed down), but should
// retain the reactivity of the normal readonly object.
export const shallowReadonlyHandlers: ReadonlyReactiveHandler =
/*@__PURE__*/ new ReadonlyReactiveHandler(true) | typescript | github | https://github.com/vuejs/core | packages/reactivity/src/baseHandlers.ts |
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build amd64
package math
import "internal/cpu"
var useFMA = cpu.X86.HasAVX && cpu.X86.HasFMA | go | github | https://github.com/golang/go | src/math/exp_amd64.go |
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
from __future__ import unicode_literals
DATE_FORMAT = 'j. F Y'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = 'j. F Y H:i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'd.m.Y'
SHORT_DATETIME_FORMAT = 'd.m.Y H:i'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%d.%m.%Y', '%d.%m.%y', # '25.10.2006', '25.10.06'
# '%d. %B %Y', '%d. %b. %Y', # '25. October 2006', '25. Oct. 2006'
]
DATETIME_INPUT_FORMATS = [
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M:%S.%f', # '25.10.2006 14:30:59.000200'
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
'%d.%m.%Y', # '25.10.2006'
]
# these are the separators for non-monetary numbers. For monetary numbers,
# the DECIMAL_SEPARATOR is a . (decimal point) and the THOUSAND_SEPARATOR is a
# ' (single quote).
# For details, please refer to http://www.bk.admin.ch/dokumentation/sprachen/04915/05016/index.html?lang=de
# (in German) and the documentation
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '\xa0' # non-breaking space
NUMBER_GROUPING = 3 | unknown | codeparrot/codeparrot-clean | ||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing_extensions import Literal
from ..._models import BaseModel
__all__ = ["ResponseAudioTranscriptDeltaEvent"]
class ResponseAudioTranscriptDeltaEvent(BaseModel):
"""Emitted when there is a partial transcript of audio."""
delta: str
"""The partial transcript of the audio response."""
sequence_number: int
"""The sequence number of this event."""
type: Literal["response.audio.transcript.delta"]
"""The type of the event. Always `response.audio.transcript.delta`.""" | python | github | https://github.com/openai/openai-python | src/openai/types/responses/response_audio_transcript_delta_event.py |
"""Misc os module tests
Made for Jython.
"""
import os
import unittest
from test import test_support
class OSTestCase(unittest.TestCase):
def setUp(self):
open(test_support.TESTFN, 'w').close()
def tearDown(self):
if os.path.exists(test_support.TESTFN):
os.remove(test_support.TESTFN)
def test_issue1727(self):
os.stat(*(test_support.TESTFN,))
def test_issue1755(self):
os.remove(test_support.TESTFN)
self.assertRaises(OSError, os.utime, test_support.TESTFN, None)
def test_issue1824(self):
os.remove(test_support.TESTFN)
self.assertRaises(OSError, os.link,
test_support.TESTFN, test_support.TESTFN)
def test_issue1825(self):
os.remove(test_support.TESTFN)
testfnu = unicode(test_support.TESTFN)
try:
os.open(testfnu, os.O_RDONLY)
except OSError, e:
self.assertTrue(isinstance(e.filename, unicode))
self.assertEqual(e.filename, testfnu)
else:
self.assertTrue(False)
# XXX: currently fail
#for fn in os.chdir, os.listdir, os.rmdir:
for fn in (os.rmdir,):
try:
fn(testfnu)
except OSError, e:
self.assertTrue(isinstance(e.filename, unicode))
self.assertEqual(e.filename, testfnu)
else:
self.assertTrue(False)
def test_main():
test_support.run_unittest(OSTestCase)
if __name__ == '__main__':
test_main() | unknown | codeparrot/codeparrot-clean | ||
/*
Copyright 2019 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package v1
import (
admissionv1 "k8s.io/api/admission/v1"
"k8s.io/apimachinery/pkg/runtime/schema"
)
// GroupName is the group name for this API.
const GroupName = "admission.k8s.io"
// SchemeGroupVersion is group version used to register these objects
var SchemeGroupVersion = schema.GroupVersion{Group: GroupName, Version: "v1"}
// Resource takes an unqualified resource and returns a Group qualified GroupResource
func Resource(resource string) schema.GroupResource {
return SchemeGroupVersion.WithResource(resource).GroupResource()
}
var (
localSchemeBuilder = &admissionv1.SchemeBuilder
// AddToScheme is a common registration function for mapping packaged scoped group & version keys to a scheme
AddToScheme = localSchemeBuilder.AddToScheme
)
func init() {
// We only register manually written functions here. The registration of the
// generated functions takes place in the generated files. The separation
// makes the code compile even when the generated files are missing.
localSchemeBuilder.Register(RegisterDefaults)
} | go | github | https://github.com/kubernetes/kubernetes | pkg/apis/admission/v1/register.go |
# Copyright 2011 OpenStack Foundation
# Copyright 2011 Ilya Alekseyev
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import StringIO
import sys
import fixtures
import mock
from nova.cmd import manage
from nova import context
from nova import db
from nova.db import migration
from nova.db.sqlalchemy import migration as sqla_migration
from nova import exception
from nova import objects
from nova import test
from nova.tests.unit.db import fakes as db_fakes
from nova.tests.unit import fake_instance
from nova.tests.unit.objects import test_network
from nova.tests.unit import test_flavors
class FixedIpCommandsTestCase(test.TestCase):
def setUp(self):
super(FixedIpCommandsTestCase, self).setUp()
db_fakes.stub_out_db_network_api(self.stubs)
self.commands = manage.FixedIpCommands()
def test_reserve(self):
self.commands.reserve('192.168.0.100')
address = db.fixed_ip_get_by_address(context.get_admin_context(),
'192.168.0.100')
self.assertEqual(address['reserved'], True)
def test_reserve_nonexistent_address(self):
self.assertEqual(2, self.commands.reserve('55.55.55.55'))
def test_unreserve(self):
self.commands.unreserve('192.168.0.100')
address = db.fixed_ip_get_by_address(context.get_admin_context(),
'192.168.0.100')
self.assertEqual(address['reserved'], False)
def test_unreserve_nonexistent_address(self):
self.assertEqual(2, self.commands.unreserve('55.55.55.55'))
def test_list(self):
self.useFixture(fixtures.MonkeyPatch('sys.stdout',
StringIO.StringIO()))
self.commands.list()
self.assertNotEqual(1, sys.stdout.getvalue().find('192.168.0.100'))
def test_list_just_one_host(self):
def fake_fixed_ip_get_by_host(*args, **kwargs):
return [db_fakes.fixed_ip_fields]
self.useFixture(fixtures.MonkeyPatch(
'nova.db.fixed_ip_get_by_host',
fake_fixed_ip_get_by_host))
self.useFixture(fixtures.MonkeyPatch('sys.stdout',
StringIO.StringIO()))
self.commands.list('banana')
self.assertNotEqual(1, sys.stdout.getvalue().find('192.168.0.100'))
class FloatingIpCommandsTestCase(test.TestCase):
def setUp(self):
super(FloatingIpCommandsTestCase, self).setUp()
db_fakes.stub_out_db_network_api(self.stubs)
self.commands = manage.FloatingIpCommands()
def test_address_to_hosts(self):
def assert_loop(result, expected):
for ip in result:
self.assertIn(str(ip), expected)
address_to_hosts = self.commands.address_to_hosts
# /32 and /31
self.assertRaises(exception.InvalidInput, address_to_hosts,
'192.168.100.1/32')
self.assertRaises(exception.InvalidInput, address_to_hosts,
'192.168.100.1/31')
# /30
expected = ["192.168.100.%s" % i for i in range(1, 3)]
result = address_to_hosts('192.168.100.0/30')
self.assertEqual(2, len(list(result)))
assert_loop(result, expected)
# /29
expected = ["192.168.100.%s" % i for i in range(1, 7)]
result = address_to_hosts('192.168.100.0/29')
self.assertEqual(6, len(list(result)))
assert_loop(result, expected)
# /28
expected = ["192.168.100.%s" % i for i in range(1, 15)]
result = address_to_hosts('192.168.100.0/28')
self.assertEqual(14, len(list(result)))
assert_loop(result, expected)
# /16
result = address_to_hosts('192.168.100.0/16')
self.assertEqual(65534, len(list(result)))
# NOTE(dripton): I don't test /13 because it makes the test take 3s.
# /12 gives over a million IPs, which is ridiculous.
self.assertRaises(exception.InvalidInput, address_to_hosts,
'192.168.100.1/12')
class NetworkCommandsTestCase(test.TestCase):
def setUp(self):
super(NetworkCommandsTestCase, self).setUp()
self.commands = manage.NetworkCommands()
self.net = {'id': 0,
'label': 'fake',
'injected': False,
'cidr': '192.168.0.0/24',
'cidr_v6': 'dead:beef::/64',
'multi_host': False,
'gateway_v6': 'dead:beef::1',
'netmask_v6': '64',
'netmask': '255.255.255.0',
'bridge': 'fa0',
'bridge_interface': 'fake_fa0',
'gateway': '192.168.0.1',
'broadcast': '192.168.0.255',
'dns1': '8.8.8.8',
'dns2': '8.8.4.4',
'vlan': 200,
'vlan_start': 201,
'vpn_public_address': '10.0.0.2',
'vpn_public_port': '2222',
'vpn_private_address': '192.168.0.2',
'dhcp_start': '192.168.0.3',
'project_id': 'fake_project',
'host': 'fake_host',
'uuid': 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'}
def fake_network_get_by_cidr(context, cidr):
self.assertTrue(context.to_dict()['is_admin'])
self.assertEqual(cidr, self.fake_net['cidr'])
return db_fakes.FakeModel(dict(test_network.fake_network,
**self.fake_net))
def fake_network_get_by_uuid(context, uuid):
self.assertTrue(context.to_dict()['is_admin'])
self.assertEqual(uuid, self.fake_net['uuid'])
return db_fakes.FakeModel(dict(test_network.fake_network,
**self.fake_net))
def fake_network_update(context, network_id, values):
self.assertTrue(context.to_dict()['is_admin'])
self.assertEqual(network_id, self.fake_net['id'])
self.assertEqual(values, self.fake_update_value)
self.fake_network_get_by_cidr = fake_network_get_by_cidr
self.fake_network_get_by_uuid = fake_network_get_by_uuid
self.fake_network_update = fake_network_update
def test_create(self):
def fake_create_networks(obj, context, **kwargs):
self.assertTrue(context.to_dict()['is_admin'])
self.assertEqual(kwargs['label'], 'Test')
self.assertEqual(kwargs['cidr'], '10.2.0.0/24')
self.assertEqual(kwargs['multi_host'], False)
self.assertEqual(kwargs['num_networks'], 1)
self.assertEqual(kwargs['network_size'], 256)
self.assertEqual(kwargs['vlan'], 200)
self.assertEqual(kwargs['vlan_start'], 201)
self.assertEqual(kwargs['vpn_start'], 2000)
self.assertEqual(kwargs['cidr_v6'], 'fd00:2::/120')
self.assertEqual(kwargs['gateway'], '10.2.0.1')
self.assertEqual(kwargs['gateway_v6'], 'fd00:2::22')
self.assertEqual(kwargs['bridge'], 'br200')
self.assertEqual(kwargs['bridge_interface'], 'eth0')
self.assertEqual(kwargs['dns1'], '8.8.8.8')
self.assertEqual(kwargs['dns2'], '8.8.4.4')
self.flags(network_manager='nova.network.manager.VlanManager')
from nova.network import manager as net_manager
self.stubs.Set(net_manager.VlanManager, 'create_networks',
fake_create_networks)
self.commands.create(
label='Test',
cidr='10.2.0.0/24',
num_networks=1,
network_size=256,
multi_host='F',
vlan=200,
vlan_start=201,
vpn_start=2000,
cidr_v6='fd00:2::/120',
gateway='10.2.0.1',
gateway_v6='fd00:2::22',
bridge='br200',
bridge_interface='eth0',
dns1='8.8.8.8',
dns2='8.8.4.4',
uuid='aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa')
def test_list(self):
def fake_network_get_all(context):
return [db_fakes.FakeModel(self.net)]
self.stubs.Set(db, 'network_get_all', fake_network_get_all)
output = StringIO.StringIO()
sys.stdout = output
self.commands.list()
sys.stdout = sys.__stdout__
result = output.getvalue()
_fmt = "\t".join(["%(id)-5s", "%(cidr)-18s", "%(cidr_v6)-15s",
"%(dhcp_start)-15s", "%(dns1)-15s", "%(dns2)-15s",
"%(vlan)-15s", "%(project_id)-15s", "%(uuid)-15s"])
head = _fmt % {'id': 'id',
'cidr': 'IPv4',
'cidr_v6': 'IPv6',
'dhcp_start': 'start address',
'dns1': 'DNS1',
'dns2': 'DNS2',
'vlan': 'VlanID',
'project_id': 'project',
'uuid': "uuid"}
body = _fmt % {'id': self.net['id'],
'cidr': self.net['cidr'],
'cidr_v6': self.net['cidr_v6'],
'dhcp_start': self.net['dhcp_start'],
'dns1': self.net['dns1'],
'dns2': self.net['dns2'],
'vlan': self.net['vlan'],
'project_id': self.net['project_id'],
'uuid': self.net['uuid']}
answer = '%s\n%s\n' % (head, body)
self.assertEqual(result, answer)
def test_delete(self):
self.fake_net = self.net
self.fake_net['project_id'] = None
self.fake_net['host'] = None
self.stubs.Set(db, 'network_get_by_uuid',
self.fake_network_get_by_uuid)
def fake_network_delete_safe(context, network_id):
self.assertTrue(context.to_dict()['is_admin'])
self.assertEqual(network_id, self.fake_net['id'])
self.stubs.Set(db, 'network_delete_safe', fake_network_delete_safe)
self.commands.delete(uuid=self.fake_net['uuid'])
def test_delete_by_cidr(self):
self.fake_net = self.net
self.fake_net['project_id'] = None
self.fake_net['host'] = None
self.stubs.Set(db, 'network_get_by_cidr',
self.fake_network_get_by_cidr)
def fake_network_delete_safe(context, network_id):
self.assertTrue(context.to_dict()['is_admin'])
self.assertEqual(network_id, self.fake_net['id'])
self.stubs.Set(db, 'network_delete_safe', fake_network_delete_safe)
self.commands.delete(fixed_range=self.fake_net['cidr'])
def _test_modify_base(self, update_value, project, host, dis_project=None,
dis_host=None):
self.fake_net = self.net
self.fake_update_value = update_value
self.stubs.Set(db, 'network_get_by_cidr',
self.fake_network_get_by_cidr)
self.stubs.Set(db, 'network_update', self.fake_network_update)
self.commands.modify(self.fake_net['cidr'], project=project, host=host,
dis_project=dis_project, dis_host=dis_host)
def test_modify_associate(self):
self._test_modify_base(update_value={'project_id': 'test_project',
'host': 'test_host'},
project='test_project', host='test_host')
def test_modify_unchanged(self):
self._test_modify_base(update_value={}, project=None, host=None)
def test_modify_disassociate(self):
self._test_modify_base(update_value={'project_id': None, 'host': None},
project=None, host=None, dis_project=True,
dis_host=True)
class NeutronV2NetworkCommandsTestCase(test.TestCase):
def setUp(self):
super(NeutronV2NetworkCommandsTestCase, self).setUp()
self.flags(network_api_class='nova.network.neutronv2.api.API')
self.commands = manage.NetworkCommands()
def test_create(self):
self.assertEqual(2, self.commands.create())
def test_list(self):
self.assertEqual(2, self.commands.list())
def test_delete(self):
self.assertEqual(2, self.commands.delete())
def test_modify(self):
self.assertEqual(2, self.commands.modify('192.168.0.1'))
class ProjectCommandsTestCase(test.TestCase):
def setUp(self):
super(ProjectCommandsTestCase, self).setUp()
self.commands = manage.ProjectCommands()
def test_quota(self):
output = StringIO.StringIO()
sys.stdout = output
self.commands.quota(project_id='admin',
key='instances',
value='unlimited',
)
sys.stdout = sys.__stdout__
result = output.getvalue()
print_format = "%-36s %-10s" % ('instances', 'unlimited')
self.assertIn(print_format, result)
def test_quota_update_invalid_key(self):
self.assertEqual(2, self.commands.quota('admin', 'volumes1', '10'))
class VmCommandsTestCase(test.TestCase):
def setUp(self):
super(VmCommandsTestCase, self).setUp()
self.commands = manage.VmCommands()
self.fake_flavor = objects.Flavor(**test_flavors.DEFAULT_FLAVORS[0])
def test_list_without_host(self):
output = StringIO.StringIO()
sys.stdout = output
with mock.patch.object(objects.InstanceList, 'get_by_filters') as get:
get.return_value = objects.InstanceList(
objects=[fake_instance.fake_instance_obj(
context.get_admin_context(), host='foo-host',
flavor=self.fake_flavor,
system_metadata={})])
self.commands.list()
sys.stdout = sys.__stdout__
result = output.getvalue()
self.assertIn('node', result) # check the header line
self.assertIn('m1.tiny', result) # flavor.name
self.assertIn('foo-host', result)
def test_list_with_host(self):
output = StringIO.StringIO()
sys.stdout = output
with mock.patch.object(objects.InstanceList, 'get_by_host') as get:
get.return_value = objects.InstanceList(
objects=[fake_instance.fake_instance_obj(
context.get_admin_context(),
flavor=self.fake_flavor,
system_metadata={})])
self.commands.list(host='fake-host')
sys.stdout = sys.__stdout__
result = output.getvalue()
self.assertIn('node', result) # check the header line
self.assertIn('m1.tiny', result) # flavor.name
self.assertIn('fake-host', result)
class DBCommandsTestCase(test.TestCase):
def setUp(self):
super(DBCommandsTestCase, self).setUp()
self.commands = manage.DbCommands()
def test_archive_deleted_rows_negative(self):
self.assertEqual(1, self.commands.archive_deleted_rows(-1))
@mock.patch.object(migration, 'db_null_instance_uuid_scan',
return_value={'foo': 0})
def test_null_instance_uuid_scan_no_records_found(self, mock_scan):
self.useFixture(fixtures.MonkeyPatch('sys.stdout',
StringIO.StringIO()))
self.commands.null_instance_uuid_scan()
self.assertIn("There were no records found", sys.stdout.getvalue())
@mock.patch.object(migration, 'db_null_instance_uuid_scan',
return_value={'foo': 1, 'bar': 0})
def _test_null_instance_uuid_scan(self, mock_scan, delete):
self.useFixture(fixtures.MonkeyPatch('sys.stdout',
StringIO.StringIO()))
self.commands.null_instance_uuid_scan(delete)
output = sys.stdout.getvalue()
if delete:
self.assertIn("Deleted 1 records from table 'foo'.", output)
self.assertNotIn("Deleted 0 records from table 'bar'.", output)
else:
self.assertIn("1 records in the 'foo' table", output)
self.assertNotIn("0 records in the 'bar' table", output)
self.assertNotIn("There were no records found", output)
def test_null_instance_uuid_scan_readonly(self):
self._test_null_instance_uuid_scan(delete=False)
def test_null_instance_uuid_scan_delete(self):
self._test_null_instance_uuid_scan(delete=True)
def test_migrate_flavor_data_negative(self):
self.assertEqual(1, self.commands.migrate_flavor_data(-1))
@mock.patch.object(sqla_migration, 'db_version', return_value=2)
def test_version(self, sqla_migrate):
self.commands.version()
sqla_migrate.assert_called_once_with(database='main')
@mock.patch.object(sqla_migration, 'db_sync')
def test_sync(self, sqla_sync):
self.commands.sync(version=4)
sqla_sync.assert_called_once_with(version=4, database='main')
class ApiDbCommandsTestCase(test.TestCase):
def setUp(self):
super(ApiDbCommandsTestCase, self).setUp()
self.commands = manage.ApiDbCommands()
@mock.patch.object(sqla_migration, 'db_version', return_value=2)
def test_version(self, sqla_migrate):
self.commands.version()
sqla_migrate.assert_called_once_with(database='api')
@mock.patch.object(sqla_migration, 'db_sync')
def test_sync(self, sqla_sync):
self.commands.sync(version=4)
sqla_sync.assert_called_once_with(version=4, database='api')
class ServiceCommandsTestCase(test.TestCase):
def setUp(self):
super(ServiceCommandsTestCase, self).setUp()
self.commands = manage.ServiceCommands()
def test_service_enable_invalid_params(self):
self.assertEqual(2, self.commands.enable('nohost', 'noservice'))
def test_service_disable_invalid_params(self):
self.assertEqual(2, self.commands.disable('nohost', 'noservice'))
class CellCommandsTestCase(test.TestCase):
def setUp(self):
super(CellCommandsTestCase, self).setUp()
self.commands = manage.CellCommands()
def test_create_transport_hosts_multiple(self):
"""Test the _create_transport_hosts method
when broker_hosts is set.
"""
brokers = "127.0.0.1:5672,127.0.0.2:5671"
thosts = self.commands._create_transport_hosts(
'guest', 'devstack',
broker_hosts=brokers)
self.assertEqual(2, len(thosts))
self.assertEqual('127.0.0.1', thosts[0].hostname)
self.assertEqual(5672, thosts[0].port)
self.assertEqual('127.0.0.2', thosts[1].hostname)
self.assertEqual(5671, thosts[1].port)
def test_create_transport_hosts_single(self):
"""Test the _create_transport_hosts method when hostname is passed."""
thosts = self.commands._create_transport_hosts('guest', 'devstack',
hostname='127.0.0.1',
port=80)
self.assertEqual(1, len(thosts))
self.assertEqual('127.0.0.1', thosts[0].hostname)
self.assertEqual(80, thosts[0].port)
def test_create_transport_hosts_single_broker(self):
"""Test the _create_transport_hosts method for single broker_hosts."""
thosts = self.commands._create_transport_hosts(
'guest', 'devstack',
broker_hosts='127.0.0.1:5672')
self.assertEqual(1, len(thosts))
self.assertEqual('127.0.0.1', thosts[0].hostname)
self.assertEqual(5672, thosts[0].port)
def test_create_transport_hosts_both(self):
"""Test the _create_transport_hosts method when both broker_hosts
and hostname/port are passed.
"""
thosts = self.commands._create_transport_hosts(
'guest', 'devstack',
broker_hosts='127.0.0.1:5672',
hostname='127.0.0.2', port=80)
self.assertEqual(1, len(thosts))
self.assertEqual('127.0.0.1', thosts[0].hostname)
self.assertEqual(5672, thosts[0].port)
def test_create_transport_hosts_wrong_val(self):
"""Test the _create_transport_hosts method when broker_hosts
is wrongly sepcified
"""
self.assertRaises(ValueError,
self.commands._create_transport_hosts,
'guest', 'devstack',
broker_hosts='127.0.0.1:5672,127.0.0.1')
def test_create_transport_hosts_wrong_port_val(self):
"""Test the _create_transport_hosts method when port in
broker_hosts is wrongly sepcified
"""
self.assertRaises(ValueError,
self.commands._create_transport_hosts,
'guest', 'devstack',
broker_hosts='127.0.0.1:')
def test_create_transport_hosts_wrong_port_arg(self):
"""Test the _create_transport_hosts method when port
argument is wrongly sepcified
"""
self.assertRaises(ValueError,
self.commands._create_transport_hosts,
'guest', 'devstack',
hostname='127.0.0.1', port='ab')
@mock.patch.object(context, 'get_admin_context')
@mock.patch.object(db, 'cell_create')
def test_create_broker_hosts(self, mock_db_cell_create, mock_ctxt):
"""Test the create function when broker_hosts is
passed
"""
cell_tp_url = "fake://guest:devstack@127.0.0.1:5432"
cell_tp_url += ",guest:devstack@127.0.0.2:9999/"
ctxt = mock.sentinel
mock_ctxt.return_value = mock.sentinel
self.commands.create("test",
broker_hosts='127.0.0.1:5432,127.0.0.2:9999',
woffset=0, wscale=0,
username="guest", password="devstack")
exp_values = {'name': "test",
'is_parent': False,
'transport_url': cell_tp_url,
'weight_offset': 0.0,
'weight_scale': 0.0}
mock_db_cell_create.assert_called_once_with(ctxt, exp_values)
@mock.patch.object(context, 'get_admin_context')
@mock.patch.object(db, 'cell_create')
def test_create_broker_hosts_with_url_decoding_fix(self,
mock_db_cell_create,
mock_ctxt):
"""Test the create function when broker_hosts is
passed
"""
cell_tp_url = "fake://the=user:the=password@127.0.0.1:5432/"
ctxt = mock.sentinel
mock_ctxt.return_value = mock.sentinel
self.commands.create("test",
broker_hosts='127.0.0.1:5432',
woffset=0, wscale=0,
username="the=user",
password="the=password")
exp_values = {'name': "test",
'is_parent': False,
'transport_url': cell_tp_url,
'weight_offset': 0.0,
'weight_scale': 0.0}
mock_db_cell_create.assert_called_once_with(ctxt, exp_values)
@mock.patch.object(context, 'get_admin_context')
@mock.patch.object(db, 'cell_create')
def test_create_hostname(self, mock_db_cell_create, mock_ctxt):
"""Test the create function when hostname and port is
passed
"""
cell_tp_url = "fake://guest:devstack@127.0.0.1:9999/"
ctxt = mock.sentinel
mock_ctxt.return_value = mock.sentinel
self.commands.create("test",
hostname='127.0.0.1', port="9999",
woffset=0, wscale=0,
username="guest", password="devstack")
exp_values = {'name': "test",
'is_parent': False,
'transport_url': cell_tp_url,
'weight_offset': 0.0,
'weight_scale': 0.0}
mock_db_cell_create.assert_called_once_with(ctxt, exp_values) | unknown | codeparrot/codeparrot-clean | ||
[{"Limit":100,"ProximityPaths":["/usr/home/user/clang-tools-extra/clangd/benchmarks/IndexBenchmark.cpp"],"Query":"OMP","RestrictForCodeCompletion":true,"Scopes":["clang::"], "AnyScope":false, "PreferredTypes":[]},
{"Limit":100,"ProximityPaths":[],"Query":"s","RestrictForCodeCompletion":true,"Scopes":["llvm::", ""], "AnyScope":false, "PreferredTypes":[]},
{"Limit":100,"ProximityPaths":[],"Query":"sy","RestrictForCodeCompletion":true,"Scopes":["llvm::", ""], "AnyScope":false, "PreferredTypes":[]},
{"Limit":100,"ProximityPaths":[],"Query":"sys","RestrictForCodeCompletion":true,"Scopes":["llvm::", ""], "AnyScope":false, "PreferredTypes":[]},
{"Limit":100,"ProximityPaths":[],"Query":"sys","RestrictForCodeCompletion":true,"Scopes":["llvm::", ""], "AnyScope":false, "PreferredTypes":[]},
{"Limit":100,"ProximityPaths":[],"Query":"Dex","RestrictForCodeCompletion":true,"Scopes":["clang::clangd::", "clang::", "clang::clangd::dex::"],"AnyScope":false, "PreferredTypes":[]},
{"Limit":100,"ProximityPaths":[],"Query":"Variable","RestrictForCodeCompletion":true,"Scopes":[""], "AnyScope":false, "PreferredTypes":[]}] | json | github | https://github.com/llvm/llvm-project | clang-tools-extra/clangd/test/Inputs/requests.json |
#define TORCH_ASSERT_ONLY_METHOD_OPERATORS
#include <ATen/native/Cross.h>
#include <ATen/core/Tensor.h>
#include <ATen/Dispatch.h>
#include <ATen/TensorMeta.h>
#include <ATen/WrapDimUtils.h>
#include <ATen/ExpandUtils.h>
#include <ATen/native/Resize.h>
#include <ATen/MemoryOverlap.h>
#ifndef AT_PER_OPERATOR_HEADERS
#include <ATen/Functions.h>
#include <ATen/NativeFunctions.h>
#else
#include <ATen/ops/cross_native.h>
#include <ATen/ops/linalg_cross.h>
#include <ATen/ops/linalg_cross_native.h>
#endif
namespace at::meta {
TORCH_META_FUNC(linalg_cross)
(const Tensor & input, const Tensor & other, int64_t dim) {
auto x_d = input.dim();
auto y_d = other.dim();
// This is to avoid things like
// linalg.cross(torch.randn(2, 3), torch.randn(5, 2, 3), dim=2)
TORCH_CHECK(x_d == y_d, "linalg.cross: inputs must have the same number of dimensions.");
TORCH_CHECK(input.size(dim) == 3 && other.size(dim) == 3, "linalg.cross: inputs dimension ", dim, " must have length 3. Got ", input.size(dim), " and ", other.size(dim));
// Broadcast the batch dimension of input and other.
// Since the non-batch dimensions agree, this is the same as broadcast all the inputs
auto out_size = infer_size(input.sizes(), other.sizes());
set_output_raw_strided(0, out_size, {}, input.options());
}
} // namespace at::meta
namespace at::native {
DEFINE_DISPATCH(cross_stub);
static int64_t _default_cross_dim(const std::optional<int64_t> &dimension, SymIntArrayRef sizes) {
// If dimension is not given, it defaults to the first dimension found with the size 3.
// Note that this behaviour might be unexpected.
// _default_cross_dim is called internally inside the cross implementation to calculate
// the dim and finally cross delegates to the linalg_cross implementation with this dim
if(dimension.has_value()) {
return *dimension;
}
for(auto i : c10::irange(sizes.size())) {
if(sizes[i] == 3) {
return i;
}
}
TORCH_CHECK(false, "no dimension of size 3 in input");
}
Tensor cross(const Tensor & input, const Tensor & other, const std::optional<int64_t> dimension) {
if (!dimension) {
TORCH_WARN_ONCE(
"Using torch.cross without specifying the dim arg is deprecated.\n",
"Please either pass the dim explicitly or simply use torch.linalg.cross.\n",
"The default value of dim will change to agree with that of linalg.cross in a future release."
);
}
auto dim = _default_cross_dim(dimension, input.sym_sizes());
return at::linalg_cross(input, other, dim);
}
Tensor & cross_out(const Tensor & input, const Tensor & other, const std::optional<int64_t> dimension, Tensor & out) {
auto dim = _default_cross_dim(dimension, input.sym_sizes());
return at::linalg_cross_out(out, input, other, dim);
}
TORCH_IMPL_FUNC(linalg_cross_out)
(const Tensor & input, const Tensor & other, int64_t dim, const Tensor & out) {
at::assert_no_internal_overlap(out);
at::assert_no_overlap(out, input);
at::assert_no_overlap(out, other);
dim = maybe_wrap_dim(dim, input.dim());
auto out_size = out.sizes();
Tensor input_broadcasted = input.expand(out_size);
Tensor other_broadcasted = other.expand(out_size);
cross_stub(input.device().type(), out, input_broadcasted, other_broadcasted, dim);
}
} // namespace at::native | cpp | github | https://github.com/pytorch/pytorch | aten/src/ATen/native/Cross.cpp |
# Copyright (c) 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Weighers that weigh hosts by volume number in backends:
1. Volume Number Weigher. Weigh hosts by their volume number.
The default is to spread volumes across all hosts evenly. If you prefer
stacking, you can set the 'volume_number_multiplier' option to a positive
number and the weighing has the opposite effect of the default.
"""
from oslo.config import cfg
from cinder import db
from cinder.openstack.common import log as logging
from cinder.openstack.common.scheduler import weights
LOG = logging.getLogger(__name__)
volume_number_weight_opts = [
cfg.FloatOpt('volume_number_multiplier',
default=-1.0,
help='Multiplier used for weighing volume number. '
'Negative numbers mean to spread vs stack.'),
]
CONF = cfg.CONF
CONF.register_opts(volume_number_weight_opts)
class VolumeNumberWeigher(weights.BaseHostWeigher):
def _weight_multiplier(self):
"""Override the weight multiplier."""
return CONF.volume_number_multiplier
def _weigh_object(self, host_state, weight_properties):
"""Less volume number weights win.
We want spreading to be the default.
"""
context = weight_properties['context']
volume_number = db.volume_data_get_for_host(context=context,
host=host_state.host,
count_only=True)
return volume_number | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env bash
# Copyright 2022 The Cockroach Authors.
#
# Use of this software is governed by the CockroachDB Software License
# included in the /LICENSE file.
set -euo pipefail
google_credentials="$GOOGLE_EPHEMERAL_CREDENTIALS"
dir="$(dirname $(dirname $(dirname $(dirname "${0}"))))"
source "$dir/teamcity-support.sh" # for log_into_gcloud
log_into_gcloud
set -x
tc_start_block "Build Go toolchains"
this_dir="$(cd "$(dirname "${0}")"; pwd)"
toplevel="$(dirname $(dirname $(dirname $(dirname $this_dir))))"
mkdir -p "${toplevel}"/artifacts
# We use a docker image mirror to avoid pulling from 3rd party repos, which sometimes have reliability issues.
# See https://cockroachlabs.atlassian.net/wiki/spaces/devinf/pages/3462594561/Docker+image+sync for the details.
docker run --rm -i ${tty-} -v $this_dir/build-and-publish-patched-go:/bootstrap \
-v "${toplevel}"/artifacts:/artifacts \
us-east1-docker.pkg.dev/crl-docker-sync/docker-io/library/ubuntu:focal /bootstrap/impl.sh
tc_end_block "Build Go toolchains"
tc_start_block "Publish artifacts"
loc=$(date +%Y%m%d-%H%M%S)
echo $loc > "${toplevel}"/artifacts/TIMESTAMP.txt
for FILE in `find $root/artifacts -name '*.tar.gz'`; do
BASE=$(basename $FILE)
if [[ "$BASE" != *"darwin"* ]]; then
gsutil cp $FILE gs://public-bazel-artifacts/go/$loc/$BASE
fi
done
tc_end_block "Publish artifacts" | unknown | github | https://github.com/cockroachdb/cockroach | build/teamcity/internal/release/build-and-publish-patched-go.sh |
#!/usr/bin/env python
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""This module contains implementations of various third-party
authentication schemes.
All the classes in this file are class mixins designed to be used with
the `tornado.web.RequestHandler` class. They are used in two ways:
* On a login handler, use methods such as ``authenticate_redirect()``,
``authorize_redirect()``, and ``get_authenticated_user()`` to
establish the user's identity and store authentication tokens to your
database and/or cookies.
* In non-login handlers, use methods such as ``facebook_request()``
or ``twitter_request()`` to use the authentication tokens to make
requests to the respective services.
They all take slightly different arguments due to the fact all these
services implement authentication and authorization slightly differently.
See the individual service classes below for complete documentation.
Example usage for Google OAuth:
.. testcode::
class GoogleOAuth2LoginHandler(tornado.web.RequestHandler,
tornado.auth.GoogleOAuth2Mixin):
@tornado.gen.coroutine
def get(self):
if self.get_argument('code', False):
user = yield self.get_authenticated_user(
redirect_uri='http://your.site.com/auth/google',
code=self.get_argument('code'))
# Save the user with e.g. set_secure_cookie
else:
yield self.authorize_redirect(
redirect_uri='http://your.site.com/auth/google',
client_id=self.settings['google_oauth']['key'],
scope=['profile', 'email'],
response_type='code',
extra_params={'approval_prompt': 'auto'})
.. testoutput::
:hide:
.. versionchanged:: 4.0
All of the callback interfaces in this module are now guaranteed
to run their callback with an argument of ``None`` on error.
Previously some functions would do this while others would simply
terminate the request on their own. This change also ensures that
errors are more consistently reported through the ``Future`` interfaces.
"""
from __future__ import absolute_import, division, print_function, with_statement
import base64
import binascii
import functools
import hashlib
import hmac
import time
import uuid
from tornado.concurrent import TracebackFuture, return_future
from tornado import gen
from tornado import httpclient
from tornado import escape
from tornado.httputil import url_concat
from tornado.log import gen_log
from tornado.stack_context import ExceptionStackContext
from tornado.util import u, unicode_type, ArgReplacer
try:
import urlparse # py2
except ImportError:
import urllib.parse as urlparse # py3
try:
import urllib.parse as urllib_parse # py3
except ImportError:
import urllib as urllib_parse # py2
try:
long # py2
except NameError:
long = int # py3
class AuthError(Exception):
pass
def _auth_future_to_callback(callback, future):
try:
result = future.result()
except AuthError as e:
gen_log.warning(str(e))
result = None
callback(result)
def _auth_return_future(f):
"""Similar to tornado.concurrent.return_future, but uses the auth
module's legacy callback interface.
Note that when using this decorator the ``callback`` parameter
inside the function will actually be a future.
"""
replacer = ArgReplacer(f, 'callback')
@functools.wraps(f)
def wrapper(*args, **kwargs):
future = TracebackFuture()
callback, args, kwargs = replacer.replace(future, args, kwargs)
if callback is not None:
future.add_done_callback(
functools.partial(_auth_future_to_callback, callback))
def handle_exception(typ, value, tb):
if future.done():
return False
else:
future.set_exc_info((typ, value, tb))
return True
with ExceptionStackContext(handle_exception):
f(*args, **kwargs)
return future
return wrapper
class OpenIdMixin(object):
"""Abstract implementation of OpenID and Attribute Exchange.
Class attributes:
* ``_OPENID_ENDPOINT``: the identity provider's URI.
"""
@return_future
def authenticate_redirect(self, callback_uri=None,
ax_attrs=["name", "email", "language", "username"],
callback=None):
"""Redirects to the authentication URL for this service.
After authentication, the service will redirect back to the given
callback URI with additional parameters including ``openid.mode``.
We request the given attributes for the authenticated user by
default (name, email, language, and username). If you don't need
all those attributes for your app, you can request fewer with
the ax_attrs keyword argument.
.. versionchanged:: 3.1
Returns a `.Future` and takes an optional callback. These are
not strictly necessary as this method is synchronous,
but they are supplied for consistency with
`OAuthMixin.authorize_redirect`.
"""
callback_uri = callback_uri or self.request.uri
args = self._openid_args(callback_uri, ax_attrs=ax_attrs)
self.redirect(self._OPENID_ENDPOINT + "?" + urllib_parse.urlencode(args))
callback()
@_auth_return_future
def get_authenticated_user(self, callback, http_client=None):
"""Fetches the authenticated user data upon redirect.
This method should be called by the handler that receives the
redirect from the `authenticate_redirect()` method (which is
often the same as the one that calls it; in that case you would
call `get_authenticated_user` if the ``openid.mode`` parameter
is present and `authenticate_redirect` if it is not).
The result of this method will generally be used to set a cookie.
"""
# Verify the OpenID response via direct request to the OP
args = dict((k, v[-1]) for k, v in self.request.arguments.items())
args["openid.mode"] = u("check_authentication")
url = self._OPENID_ENDPOINT
if http_client is None:
http_client = self.get_auth_http_client()
http_client.fetch(url, functools.partial(
self._on_authentication_verified, callback),
method="POST", body=urllib_parse.urlencode(args))
def _openid_args(self, callback_uri, ax_attrs=[], oauth_scope=None):
url = urlparse.urljoin(self.request.full_url(), callback_uri)
args = {
"openid.ns": "http://specs.openid.net/auth/2.0",
"openid.claimed_id":
"http://specs.openid.net/auth/2.0/identifier_select",
"openid.identity":
"http://specs.openid.net/auth/2.0/identifier_select",
"openid.return_to": url,
"openid.realm": urlparse.urljoin(url, '/'),
"openid.mode": "checkid_setup",
}
if ax_attrs:
args.update({
"openid.ns.ax": "http://openid.net/srv/ax/1.0",
"openid.ax.mode": "fetch_request",
})
ax_attrs = set(ax_attrs)
required = []
if "name" in ax_attrs:
ax_attrs -= set(["name", "firstname", "fullname", "lastname"])
required += ["firstname", "fullname", "lastname"]
args.update({
"openid.ax.type.firstname":
"http://axschema.org/namePerson/first",
"openid.ax.type.fullname":
"http://axschema.org/namePerson",
"openid.ax.type.lastname":
"http://axschema.org/namePerson/last",
})
known_attrs = {
"email": "http://axschema.org/contact/email",
"language": "http://axschema.org/pref/language",
"username": "http://axschema.org/namePerson/friendly",
}
for name in ax_attrs:
args["openid.ax.type." + name] = known_attrs[name]
required.append(name)
args["openid.ax.required"] = ",".join(required)
if oauth_scope:
args.update({
"openid.ns.oauth":
"http://specs.openid.net/extensions/oauth/1.0",
"openid.oauth.consumer": self.request.host.split(":")[0],
"openid.oauth.scope": oauth_scope,
})
return args
def _on_authentication_verified(self, future, response):
if response.error or b"is_valid:true" not in response.body:
future.set_exception(AuthError(
"Invalid OpenID response: %s" % (response.error or
response.body)))
return
# Make sure we got back at least an email from attribute exchange
ax_ns = None
for name in self.request.arguments:
if name.startswith("openid.ns.") and \
self.get_argument(name) == u("http://openid.net/srv/ax/1.0"):
ax_ns = name[10:]
break
def get_ax_arg(uri):
if not ax_ns:
return u("")
prefix = "openid." + ax_ns + ".type."
ax_name = None
for name in self.request.arguments.keys():
if self.get_argument(name) == uri and name.startswith(prefix):
part = name[len(prefix):]
ax_name = "openid." + ax_ns + ".value." + part
break
if not ax_name:
return u("")
return self.get_argument(ax_name, u(""))
email = get_ax_arg("http://axschema.org/contact/email")
name = get_ax_arg("http://axschema.org/namePerson")
first_name = get_ax_arg("http://axschema.org/namePerson/first")
last_name = get_ax_arg("http://axschema.org/namePerson/last")
username = get_ax_arg("http://axschema.org/namePerson/friendly")
locale = get_ax_arg("http://axschema.org/pref/language").lower()
user = dict()
name_parts = []
if first_name:
user["first_name"] = first_name
name_parts.append(first_name)
if last_name:
user["last_name"] = last_name
name_parts.append(last_name)
if name:
user["name"] = name
elif name_parts:
user["name"] = u(" ").join(name_parts)
elif email:
user["name"] = email.split("@")[0]
if email:
user["email"] = email
if locale:
user["locale"] = locale
if username:
user["username"] = username
claimed_id = self.get_argument("openid.claimed_id", None)
if claimed_id:
user["claimed_id"] = claimed_id
future.set_result(user)
def get_auth_http_client(self):
"""Returns the `.AsyncHTTPClient` instance to be used for auth requests.
May be overridden by subclasses to use an HTTP client other than
the default.
"""
return httpclient.AsyncHTTPClient()
class OAuthMixin(object):
"""Abstract implementation of OAuth 1.0 and 1.0a.
See `TwitterMixin` below for an example implementation.
Class attributes:
* ``_OAUTH_AUTHORIZE_URL``: The service's OAuth authorization url.
* ``_OAUTH_ACCESS_TOKEN_URL``: The service's OAuth access token url.
* ``_OAUTH_VERSION``: May be either "1.0" or "1.0a".
* ``_OAUTH_NO_CALLBACKS``: Set this to True if the service requires
advance registration of callbacks.
Subclasses must also override the `_oauth_get_user_future` and
`_oauth_consumer_token` methods.
"""
@return_future
def authorize_redirect(self, callback_uri=None, extra_params=None,
http_client=None, callback=None):
"""Redirects the user to obtain OAuth authorization for this service.
The ``callback_uri`` may be omitted if you have previously
registered a callback URI with the third-party service. For
some services (including Friendfeed), you must use a
previously-registered callback URI and cannot specify a
callback via this method.
This method sets a cookie called ``_oauth_request_token`` which is
subsequently used (and cleared) in `get_authenticated_user` for
security purposes.
Note that this method is asynchronous, although it calls
`.RequestHandler.finish` for you so it may not be necessary
to pass a callback or use the `.Future` it returns. However,
if this method is called from a function decorated with
`.gen.coroutine`, you must call it with ``yield`` to keep the
response from being closed prematurely.
.. versionchanged:: 3.1
Now returns a `.Future` and takes an optional callback, for
compatibility with `.gen.coroutine`.
"""
if callback_uri and getattr(self, "_OAUTH_NO_CALLBACKS", False):
raise Exception("This service does not support oauth_callback")
if http_client is None:
http_client = self.get_auth_http_client()
if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a":
http_client.fetch(
self._oauth_request_token_url(callback_uri=callback_uri,
extra_params=extra_params),
functools.partial(
self._on_request_token,
self._OAUTH_AUTHORIZE_URL,
callback_uri,
callback))
else:
http_client.fetch(
self._oauth_request_token_url(),
functools.partial(
self._on_request_token, self._OAUTH_AUTHORIZE_URL,
callback_uri,
callback))
@_auth_return_future
def get_authenticated_user(self, callback, http_client=None):
"""Gets the OAuth authorized user and access token.
This method should be called from the handler for your
OAuth callback URL to complete the registration process. We run the
callback with the authenticated user dictionary. This dictionary
will contain an ``access_key`` which can be used to make authorized
requests to this service on behalf of the user. The dictionary will
also contain other fields such as ``name``, depending on the service
used.
"""
future = callback
request_key = escape.utf8(self.get_argument("oauth_token"))
oauth_verifier = self.get_argument("oauth_verifier", None)
request_cookie = self.get_cookie("_oauth_request_token")
if not request_cookie:
future.set_exception(AuthError(
"Missing OAuth request token cookie"))
return
self.clear_cookie("_oauth_request_token")
cookie_key, cookie_secret = [base64.b64decode(escape.utf8(i)) for i in request_cookie.split("|")]
if cookie_key != request_key:
future.set_exception(AuthError(
"Request token does not match cookie"))
return
token = dict(key=cookie_key, secret=cookie_secret)
if oauth_verifier:
token["verifier"] = oauth_verifier
if http_client is None:
http_client = self.get_auth_http_client()
http_client.fetch(self._oauth_access_token_url(token),
functools.partial(self._on_access_token, callback))
def _oauth_request_token_url(self, callback_uri=None, extra_params=None):
consumer_token = self._oauth_consumer_token()
url = self._OAUTH_REQUEST_TOKEN_URL
args = dict(
oauth_consumer_key=escape.to_basestring(consumer_token["key"]),
oauth_signature_method="HMAC-SHA1",
oauth_timestamp=str(int(time.time())),
oauth_nonce=escape.to_basestring(binascii.b2a_hex(uuid.uuid4().bytes)),
oauth_version="1.0",
)
if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a":
if callback_uri == "oob":
args["oauth_callback"] = "oob"
elif callback_uri:
args["oauth_callback"] = urlparse.urljoin(
self.request.full_url(), callback_uri)
if extra_params:
args.update(extra_params)
signature = _oauth10a_signature(consumer_token, "GET", url, args)
else:
signature = _oauth_signature(consumer_token, "GET", url, args)
args["oauth_signature"] = signature
return url + "?" + urllib_parse.urlencode(args)
def _on_request_token(self, authorize_url, callback_uri, callback,
response):
if response.error:
raise Exception("Could not get request token: %s" % response.error)
request_token = _oauth_parse_response(response.body)
data = (base64.b64encode(escape.utf8(request_token["key"])) + b"|" +
base64.b64encode(escape.utf8(request_token["secret"])))
self.set_cookie("_oauth_request_token", data)
args = dict(oauth_token=request_token["key"])
if callback_uri == "oob":
self.finish(authorize_url + "?" + urllib_parse.urlencode(args))
callback()
return
elif callback_uri:
args["oauth_callback"] = urlparse.urljoin(
self.request.full_url(), callback_uri)
self.redirect(authorize_url + "?" + urllib_parse.urlencode(args))
callback()
def _oauth_access_token_url(self, request_token):
consumer_token = self._oauth_consumer_token()
url = self._OAUTH_ACCESS_TOKEN_URL
args = dict(
oauth_consumer_key=escape.to_basestring(consumer_token["key"]),
oauth_token=escape.to_basestring(request_token["key"]),
oauth_signature_method="HMAC-SHA1",
oauth_timestamp=str(int(time.time())),
oauth_nonce=escape.to_basestring(binascii.b2a_hex(uuid.uuid4().bytes)),
oauth_version="1.0",
)
if "verifier" in request_token:
args["oauth_verifier"] = request_token["verifier"]
if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a":
signature = _oauth10a_signature(consumer_token, "GET", url, args,
request_token)
else:
signature = _oauth_signature(consumer_token, "GET", url, args,
request_token)
args["oauth_signature"] = signature
return url + "?" + urllib_parse.urlencode(args)
def _on_access_token(self, future, response):
if response.error:
future.set_exception(AuthError("Could not fetch access token"))
return
access_token = _oauth_parse_response(response.body)
self._oauth_get_user_future(access_token).add_done_callback(
functools.partial(self._on_oauth_get_user, access_token, future))
def _oauth_consumer_token(self):
"""Subclasses must override this to return their OAuth consumer keys.
The return value should be a `dict` with keys ``key`` and ``secret``.
"""
raise NotImplementedError()
@return_future
def _oauth_get_user_future(self, access_token, callback):
"""Subclasses must override this to get basic information about the
user.
Should return a `.Future` whose result is a dictionary
containing information about the user, which may have been
retrieved by using ``access_token`` to make a request to the
service.
The access token will be added to the returned dictionary to make
the result of `get_authenticated_user`.
For backwards compatibility, the callback-based ``_oauth_get_user``
method is also supported.
"""
# By default, call the old-style _oauth_get_user, but new code
# should override this method instead.
self._oauth_get_user(access_token, callback)
def _oauth_get_user(self, access_token, callback):
raise NotImplementedError()
def _on_oauth_get_user(self, access_token, future, user_future):
if user_future.exception() is not None:
future.set_exception(user_future.exception())
return
user = user_future.result()
if not user:
future.set_exception(AuthError("Error getting user"))
return
user["access_token"] = access_token
future.set_result(user)
def _oauth_request_parameters(self, url, access_token, parameters={},
method="GET"):
"""Returns the OAuth parameters as a dict for the given request.
parameters should include all POST arguments and query string arguments
that will be sent with the request.
"""
consumer_token = self._oauth_consumer_token()
base_args = dict(
oauth_consumer_key=escape.to_basestring(consumer_token["key"]),
oauth_token=escape.to_basestring(access_token["key"]),
oauth_signature_method="HMAC-SHA1",
oauth_timestamp=str(int(time.time())),
oauth_nonce=escape.to_basestring(binascii.b2a_hex(uuid.uuid4().bytes)),
oauth_version="1.0",
)
args = {}
args.update(base_args)
args.update(parameters)
if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a":
signature = _oauth10a_signature(consumer_token, method, url, args,
access_token)
else:
signature = _oauth_signature(consumer_token, method, url, args,
access_token)
base_args["oauth_signature"] = escape.to_basestring(signature)
return base_args
def get_auth_http_client(self):
"""Returns the `.AsyncHTTPClient` instance to be used for auth requests.
May be overridden by subclasses to use an HTTP client other than
the default.
"""
return httpclient.AsyncHTTPClient()
class OAuth2Mixin(object):
"""Abstract implementation of OAuth 2.0.
See `FacebookGraphMixin` or `GoogleOAuth2Mixin` below for example
implementations.
Class attributes:
* ``_OAUTH_AUTHORIZE_URL``: The service's authorization url.
* ``_OAUTH_ACCESS_TOKEN_URL``: The service's access token url.
"""
@return_future
def authorize_redirect(self, redirect_uri=None, client_id=None,
client_secret=None, extra_params=None,
callback=None, scope=None, response_type="code"):
"""Redirects the user to obtain OAuth authorization for this service.
Some providers require that you register a redirect URL with
your application instead of passing one via this method. You
should call this method to log the user in, and then call
``get_authenticated_user`` in the handler for your
redirect URL to complete the authorization process.
.. versionchanged:: 3.1
Returns a `.Future` and takes an optional callback. These are
not strictly necessary as this method is synchronous,
but they are supplied for consistency with
`OAuthMixin.authorize_redirect`.
"""
args = {
"redirect_uri": redirect_uri,
"client_id": client_id,
"response_type": response_type
}
if extra_params:
args.update(extra_params)
if scope:
args['scope'] = ' '.join(scope)
self.redirect(
url_concat(self._OAUTH_AUTHORIZE_URL, args))
callback()
def _oauth_request_token_url(self, redirect_uri=None, client_id=None,
client_secret=None, code=None,
extra_params=None):
url = self._OAUTH_ACCESS_TOKEN_URL
args = dict(
redirect_uri=redirect_uri,
code=code,
client_id=client_id,
client_secret=client_secret,
)
if extra_params:
args.update(extra_params)
return url_concat(url, args)
@_auth_return_future
def oauth2_request(self, url, callback, access_token=None,
post_args=None, **args):
"""Fetches the given URL auth an OAuth2 access token.
If the request is a POST, ``post_args`` should be provided. Query
string arguments should be given as keyword arguments.
Example usage:
..testcode::
class MainHandler(tornado.web.RequestHandler,
tornado.auth.FacebookGraphMixin):
@tornado.web.authenticated
@tornado.gen.coroutine
def get(self):
new_entry = yield self.oauth2_request(
"https://graph.facebook.com/me/feed",
post_args={"message": "I am posting from my Tornado application!"},
access_token=self.current_user["access_token"])
if not new_entry:
# Call failed; perhaps missing permission?
yield self.authorize_redirect()
return
self.finish("Posted a message!")
.. testoutput::
:hide:
.. versionadded:: 4.3
"""
all_args = {}
if access_token:
all_args["access_token"] = access_token
all_args.update(args)
if all_args:
url += "?" + urllib_parse.urlencode(all_args)
callback = functools.partial(self._on_oauth2_request, callback)
http = self.get_auth_http_client()
if post_args is not None:
http.fetch(url, method="POST", body=urllib_parse.urlencode(post_args),
callback=callback)
else:
http.fetch(url, callback=callback)
def _on_oauth2_request(self, future, response):
if response.error:
future.set_exception(AuthError("Error response %s fetching %s" %
(response.error, response.request.url)))
return
future.set_result(escape.json_decode(response.body))
def get_auth_http_client(self):
"""Returns the `.AsyncHTTPClient` instance to be used for auth requests.
May be overridden by subclasses to use an HTTP client other than
the default.
.. versionadded:: 4.3
"""
return httpclient.AsyncHTTPClient()
class TwitterMixin(OAuthMixin):
"""Twitter OAuth authentication.
To authenticate with Twitter, register your application with
Twitter at http://twitter.com/apps. Then copy your Consumer Key
and Consumer Secret to the application
`~tornado.web.Application.settings` ``twitter_consumer_key`` and
``twitter_consumer_secret``. Use this mixin on the handler for the
URL you registered as your application's callback URL.
When your application is set up, you can use this mixin like this
to authenticate the user with Twitter and get access to their stream:
.. testcode::
class TwitterLoginHandler(tornado.web.RequestHandler,
tornado.auth.TwitterMixin):
@tornado.gen.coroutine
def get(self):
if self.get_argument("oauth_token", None):
user = yield self.get_authenticated_user()
# Save the user using e.g. set_secure_cookie()
else:
yield self.authorize_redirect()
.. testoutput::
:hide:
The user object returned by `~OAuthMixin.get_authenticated_user`
includes the attributes ``username``, ``name``, ``access_token``,
and all of the custom Twitter user attributes described at
https://dev.twitter.com/docs/api/1.1/get/users/show
"""
_OAUTH_REQUEST_TOKEN_URL = "https://api.twitter.com/oauth/request_token"
_OAUTH_ACCESS_TOKEN_URL = "https://api.twitter.com/oauth/access_token"
_OAUTH_AUTHORIZE_URL = "https://api.twitter.com/oauth/authorize"
_OAUTH_AUTHENTICATE_URL = "https://api.twitter.com/oauth/authenticate"
_OAUTH_NO_CALLBACKS = False
_TWITTER_BASE_URL = "https://api.twitter.com/1.1"
@return_future
def authenticate_redirect(self, callback_uri=None, callback=None):
"""Just like `~OAuthMixin.authorize_redirect`, but
auto-redirects if authorized.
This is generally the right interface to use if you are using
Twitter for single-sign on.
.. versionchanged:: 3.1
Now returns a `.Future` and takes an optional callback, for
compatibility with `.gen.coroutine`.
"""
http = self.get_auth_http_client()
http.fetch(self._oauth_request_token_url(callback_uri=callback_uri),
functools.partial(
self._on_request_token, self._OAUTH_AUTHENTICATE_URL,
None, callback))
@_auth_return_future
def twitter_request(self, path, callback=None, access_token=None,
post_args=None, **args):
"""Fetches the given API path, e.g., ``statuses/user_timeline/btaylor``
The path should not include the format or API version number.
(we automatically use JSON format and API version 1).
If the request is a POST, ``post_args`` should be provided. Query
string arguments should be given as keyword arguments.
All the Twitter methods are documented at http://dev.twitter.com/
Many methods require an OAuth access token which you can
obtain through `~OAuthMixin.authorize_redirect` and
`~OAuthMixin.get_authenticated_user`. The user returned through that
process includes an 'access_token' attribute that can be used
to make authenticated requests via this method. Example
usage:
.. testcode::
class MainHandler(tornado.web.RequestHandler,
tornado.auth.TwitterMixin):
@tornado.web.authenticated
@tornado.gen.coroutine
def get(self):
new_entry = yield self.twitter_request(
"/statuses/update",
post_args={"status": "Testing Tornado Web Server"},
access_token=self.current_user["access_token"])
if not new_entry:
# Call failed; perhaps missing permission?
yield self.authorize_redirect()
return
self.finish("Posted a message!")
.. testoutput::
:hide:
"""
if path.startswith('http:') or path.startswith('https:'):
# Raw urls are useful for e.g. search which doesn't follow the
# usual pattern: http://search.twitter.com/search.json
url = path
else:
url = self._TWITTER_BASE_URL + path + ".json"
# Add the OAuth resource request signature if we have credentials
if access_token:
all_args = {}
all_args.update(args)
all_args.update(post_args or {})
method = "POST" if post_args is not None else "GET"
oauth = self._oauth_request_parameters(
url, access_token, all_args, method=method)
args.update(oauth)
if args:
url += "?" + urllib_parse.urlencode(args)
http = self.get_auth_http_client()
http_callback = functools.partial(self._on_twitter_request, callback)
if post_args is not None:
http.fetch(url, method="POST", body=urllib_parse.urlencode(post_args),
callback=http_callback)
else:
http.fetch(url, callback=http_callback)
def _on_twitter_request(self, future, response):
if response.error:
future.set_exception(AuthError(
"Error response %s fetching %s" % (response.error,
response.request.url)))
return
future.set_result(escape.json_decode(response.body))
def _oauth_consumer_token(self):
self.require_setting("twitter_consumer_key", "Twitter OAuth")
self.require_setting("twitter_consumer_secret", "Twitter OAuth")
return dict(
key=self.settings["twitter_consumer_key"],
secret=self.settings["twitter_consumer_secret"])
@gen.coroutine
def _oauth_get_user_future(self, access_token):
user = yield self.twitter_request(
"/account/verify_credentials",
access_token=access_token)
if user:
user["username"] = user["screen_name"]
raise gen.Return(user)
class GoogleOAuth2Mixin(OAuth2Mixin):
"""Google authentication using OAuth2.
In order to use, register your application with Google and copy the
relevant parameters to your application settings.
* Go to the Google Dev Console at http://console.developers.google.com
* Select a project, or create a new one.
* In the sidebar on the left, select APIs & Auth.
* In the list of APIs, find the Google+ API service and set it to ON.
* In the sidebar on the left, select Credentials.
* In the OAuth section of the page, select Create New Client ID.
* Set the Redirect URI to point to your auth handler
* Copy the "Client secret" and "Client ID" to the application settings as
{"google_oauth": {"key": CLIENT_ID, "secret": CLIENT_SECRET}}
.. versionadded:: 3.2
"""
_OAUTH_AUTHORIZE_URL = "https://accounts.google.com/o/oauth2/auth"
_OAUTH_ACCESS_TOKEN_URL = "https://accounts.google.com/o/oauth2/token"
_OAUTH_USERINFO_URL = "https://www.googleapis.com/oauth2/v1/userinfo"
_OAUTH_NO_CALLBACKS = False
_OAUTH_SETTINGS_KEY = 'google_oauth'
@_auth_return_future
def get_authenticated_user(self, redirect_uri, code, callback):
"""Handles the login for the Google user, returning an access token.
The result is a dictionary containing an ``access_token`` field
([among others](https://developers.google.com/identity/protocols/OAuth2WebServer#handlingtheresponse)).
Unlike other ``get_authenticated_user`` methods in this package,
this method does not return any additional information about the user.
The returned access token can be used with `OAuth2Mixin.oauth2_request`
to request additional information (perhaps from
``https://www.googleapis.com/oauth2/v2/userinfo``)
Example usage:
.. testcode::
class GoogleOAuth2LoginHandler(tornado.web.RequestHandler,
tornado.auth.GoogleOAuth2Mixin):
@tornado.gen.coroutine
def get(self):
if self.get_argument('code', False):
access = yield self.get_authenticated_user(
redirect_uri='http://your.site.com/auth/google',
code=self.get_argument('code'))
user = yield self.oauth2_request(
"https://www.googleapis.com/oauth2/v1/userinfo",
access_token=access["access_token"])
# Save the user and access token with
# e.g. set_secure_cookie.
else:
yield self.authorize_redirect(
redirect_uri='http://your.site.com/auth/google',
client_id=self.settings['google_oauth']['key'],
scope=['profile', 'email'],
response_type='code',
extra_params={'approval_prompt': 'auto'})
.. testoutput::
:hide:
"""
http = self.get_auth_http_client()
body = urllib_parse.urlencode({
"redirect_uri": redirect_uri,
"code": code,
"client_id": self.settings[self._OAUTH_SETTINGS_KEY]['key'],
"client_secret": self.settings[self._OAUTH_SETTINGS_KEY]['secret'],
"grant_type": "authorization_code",
})
http.fetch(self._OAUTH_ACCESS_TOKEN_URL,
functools.partial(self._on_access_token, callback),
method="POST", headers={'Content-Type': 'application/x-www-form-urlencoded'}, body=body)
def _on_access_token(self, future, response):
"""Callback function for the exchange to the access token."""
if response.error:
future.set_exception(AuthError('Google auth error: %s' % str(response)))
return
args = escape.json_decode(response.body)
future.set_result(args)
class FacebookGraphMixin(OAuth2Mixin):
"""Facebook authentication using the new Graph API and OAuth2."""
_OAUTH_ACCESS_TOKEN_URL = "https://graph.facebook.com/oauth/access_token?"
_OAUTH_AUTHORIZE_URL = "https://www.facebook.com/dialog/oauth?"
_OAUTH_NO_CALLBACKS = False
_FACEBOOK_BASE_URL = "https://graph.facebook.com"
@_auth_return_future
def get_authenticated_user(self, redirect_uri, client_id, client_secret,
code, callback, extra_fields=None):
"""Handles the login for the Facebook user, returning a user object.
Example usage:
.. testcode::
class FacebookGraphLoginHandler(tornado.web.RequestHandler,
tornado.auth.FacebookGraphMixin):
@tornado.gen.coroutine
def get(self):
if self.get_argument("code", False):
user = yield self.get_authenticated_user(
redirect_uri='/auth/facebookgraph/',
client_id=self.settings["facebook_api_key"],
client_secret=self.settings["facebook_secret"],
code=self.get_argument("code"))
# Save the user with e.g. set_secure_cookie
else:
yield self.authorize_redirect(
redirect_uri='/auth/facebookgraph/',
client_id=self.settings["facebook_api_key"],
extra_params={"scope": "read_stream,offline_access"})
.. testoutput::
:hide:
"""
http = self.get_auth_http_client()
args = {
"redirect_uri": redirect_uri,
"code": code,
"client_id": client_id,
"client_secret": client_secret,
}
fields = set(['id', 'name', 'first_name', 'last_name',
'locale', 'picture', 'link'])
if extra_fields:
fields.update(extra_fields)
http.fetch(self._oauth_request_token_url(**args),
functools.partial(self._on_access_token, redirect_uri, client_id,
client_secret, callback, fields))
def _on_access_token(self, redirect_uri, client_id, client_secret,
future, fields, response):
if response.error:
future.set_exception(AuthError('Facebook auth error: %s' % str(response)))
return
args = escape.parse_qs_bytes(escape.native_str(response.body))
session = {
"access_token": args["access_token"][-1],
"expires": args.get("expires")
}
self.facebook_request(
path="/me",
callback=functools.partial(
self._on_get_user_info, future, session, fields),
access_token=session["access_token"],
fields=",".join(fields)
)
def _on_get_user_info(self, future, session, fields, user):
if user is None:
future.set_result(None)
return
fieldmap = {}
for field in fields:
fieldmap[field] = user.get(field)
fieldmap.update({"access_token": session["access_token"], "session_expires": session.get("expires")})
future.set_result(fieldmap)
@_auth_return_future
def facebook_request(self, path, callback, access_token=None,
post_args=None, **args):
"""Fetches the given relative API path, e.g., "/btaylor/picture"
If the request is a POST, ``post_args`` should be provided. Query
string arguments should be given as keyword arguments.
An introduction to the Facebook Graph API can be found at
http://developers.facebook.com/docs/api
Many methods require an OAuth access token which you can
obtain through `~OAuth2Mixin.authorize_redirect` and
`get_authenticated_user`. The user returned through that
process includes an ``access_token`` attribute that can be
used to make authenticated requests via this method.
Example usage:
..testcode::
class MainHandler(tornado.web.RequestHandler,
tornado.auth.FacebookGraphMixin):
@tornado.web.authenticated
@tornado.gen.coroutine
def get(self):
new_entry = yield self.facebook_request(
"/me/feed",
post_args={"message": "I am posting from my Tornado application!"},
access_token=self.current_user["access_token"])
if not new_entry:
# Call failed; perhaps missing permission?
yield self.authorize_redirect()
return
self.finish("Posted a message!")
.. testoutput::
:hide:
The given path is relative to ``self._FACEBOOK_BASE_URL``,
by default "https://graph.facebook.com".
This method is a wrapper around `OAuth2Mixin.oauth2_request`;
the only difference is that this method takes a relative path,
while ``oauth2_request`` takes a complete url.
.. versionchanged:: 3.1
Added the ability to override ``self._FACEBOOK_BASE_URL``.
"""
url = self._FACEBOOK_BASE_URL + path
return self.oauth2_request(url, callback, access_token,
post_args, **args)
def _oauth_signature(consumer_token, method, url, parameters={}, token=None):
"""Calculates the HMAC-SHA1 OAuth signature for the given request.
See http://oauth.net/core/1.0/#signing_process
"""
parts = urlparse.urlparse(url)
scheme, netloc, path = parts[:3]
normalized_url = scheme.lower() + "://" + netloc.lower() + path
base_elems = []
base_elems.append(method.upper())
base_elems.append(normalized_url)
base_elems.append("&".join("%s=%s" % (k, _oauth_escape(str(v)))
for k, v in sorted(parameters.items())))
base_string = "&".join(_oauth_escape(e) for e in base_elems)
key_elems = [escape.utf8(consumer_token["secret"])]
key_elems.append(escape.utf8(token["secret"] if token else ""))
key = b"&".join(key_elems)
hash = hmac.new(key, escape.utf8(base_string), hashlib.sha1)
return binascii.b2a_base64(hash.digest())[:-1]
def _oauth10a_signature(consumer_token, method, url, parameters={}, token=None):
"""Calculates the HMAC-SHA1 OAuth 1.0a signature for the given request.
See http://oauth.net/core/1.0a/#signing_process
"""
parts = urlparse.urlparse(url)
scheme, netloc, path = parts[:3]
normalized_url = scheme.lower() + "://" + netloc.lower() + path
base_elems = []
base_elems.append(method.upper())
base_elems.append(normalized_url)
base_elems.append("&".join("%s=%s" % (k, _oauth_escape(str(v)))
for k, v in sorted(parameters.items())))
base_string = "&".join(_oauth_escape(e) for e in base_elems)
key_elems = [escape.utf8(urllib_parse.quote(consumer_token["secret"], safe='~'))]
key_elems.append(escape.utf8(urllib_parse.quote(token["secret"], safe='~') if token else ""))
key = b"&".join(key_elems)
hash = hmac.new(key, escape.utf8(base_string), hashlib.sha1)
return binascii.b2a_base64(hash.digest())[:-1]
def _oauth_escape(val):
if isinstance(val, unicode_type):
val = val.encode("utf-8")
return urllib_parse.quote(val, safe="~")
def _oauth_parse_response(body):
# I can't find an officially-defined encoding for oauth responses and
# have never seen anyone use non-ascii. Leave the response in a byte
# string for python 2, and use utf8 on python 3.
body = escape.native_str(body)
p = urlparse.parse_qs(body, keep_blank_values=False)
token = dict(key=p["oauth_token"][0], secret=p["oauth_token_secret"][0])
# Add the extra parameters the Provider included to the token
special = ("oauth_token", "oauth_token_secret")
token.update((k, p[k][0]) for k in p if k not in special)
return token | unknown | codeparrot/codeparrot-clean | ||
import unittest
from biicode.server.model.social_account import SocialAccount, SocialAccountToken
from biicode.server.model.epoch.utc_datetime import UtcDatetime
import datetime
class SocialAccountTest(unittest.TestCase):
def setUp(self):
self.utc_datetime = UtcDatetime.deserialize(datetime.datetime.now())
def test_social_token_serialization(self):
social_token = SocialAccountToken("xxzc", "zxcc", self.utc_datetime)
serialized_social_token = social_token.serialize()
self.assertEquals(SocialAccountToken.deserialize(serialized_social_token), social_token)
def test_social_token_no_secret_serialization(self):
social_token = SocialAccountToken("xxzc", "", self.utc_datetime)
serialized_social_token = social_token.serialize()
self.assertEquals(SocialAccountToken.deserialize(serialized_social_token), social_token)
def test_social_account_serialization(self):
tokens = [SocialAccountToken("xxzc", "zxcc", self.utc_datetime),
SocialAccountToken("xxzc", "zxcc", self.utc_datetime)]
social_account = SocialAccount("zcas",
self.utc_datetime,
self.utc_datetime,
tokens,
"zcc")
serialized_social_account = social_account.serialize()
self.assertEquals(SocialAccount.deserialize(serialized_social_account), social_account)
def test_social_account_without_token_serialization(self):
tokens = []
social_account = SocialAccount("zcas",
self.utc_datetime,
self.utc_datetime,
tokens,
"zcc")
serialized_social_account = social_account.serialize()
self.assertEquals(SocialAccount.deserialize(serialized_social_account), social_account) | unknown | codeparrot/codeparrot-clean | ||
// SPDX-License-Identifier: GPL-2.0
#include <linux/init.h>
#include <linux/memblock.h>
#include <linux/fs.h>
#include <linux/sysfs.h>
#include <linux/kobject.h>
#include <linux/memory_hotplug.h>
#include <linux/mm.h>
#include <linux/mmzone.h>
#include <linux/pagemap.h>
#include <linux/rmap.h>
#include <linux/mmu_notifier.h>
#include <linux/page_ext.h>
#include <linux/page_idle.h>
#include "internal.h"
#define BITMAP_CHUNK_SIZE sizeof(u64)
#define BITMAP_CHUNK_BITS (BITMAP_CHUNK_SIZE * BITS_PER_BYTE)
/*
* Idle page tracking only considers user memory pages, for other types of
* pages the idle flag is always unset and an attempt to set it is silently
* ignored.
*
* We treat a page as a user memory page if it is on an LRU list, because it is
* always safe to pass such a page to rmap_walk(), which is essential for idle
* page tracking. With such an indicator of user pages we can skip isolated
* pages, but since there are not usually many of them, it will hardly affect
* the overall result.
*
* This function tries to get a user memory page by pfn as described above.
*/
static struct folio *page_idle_get_folio(unsigned long pfn)
{
struct page *page = pfn_to_online_page(pfn);
struct folio *folio;
if (!page || PageTail(page))
return NULL;
folio = page_folio(page);
if (!folio_test_lru(folio) || !folio_try_get(folio))
return NULL;
if (unlikely(page_folio(page) != folio || !folio_test_lru(folio))) {
folio_put(folio);
folio = NULL;
}
return folio;
}
static bool page_idle_clear_pte_refs_one(struct folio *folio,
struct vm_area_struct *vma,
unsigned long addr, void *arg)
{
DEFINE_FOLIO_VMA_WALK(pvmw, folio, vma, addr, 0);
bool referenced = false;
while (page_vma_mapped_walk(&pvmw)) {
addr = pvmw.address;
if (pvmw.pte) {
/*
* For PTE-mapped THP, one sub page is referenced,
* the whole THP is referenced.
*
* PFN swap PTEs, such as device-exclusive ones, that
* actually map pages are "old" from a CPU perspective.
* The MMU notifier takes care of any device aspects.
*/
if (likely(pte_present(ptep_get(pvmw.pte))))
referenced |= ptep_test_and_clear_young(vma, addr, pvmw.pte);
referenced |= mmu_notifier_clear_young(vma->vm_mm, addr, addr + PAGE_SIZE);
} else if (IS_ENABLED(CONFIG_TRANSPARENT_HUGEPAGE)) {
pmd_t pmdval = pmdp_get(pvmw.pmd);
if (likely(pmd_present(pmdval)))
referenced |= pmdp_clear_young_notify(vma, addr, pvmw.pmd);
referenced |= mmu_notifier_clear_young(vma->vm_mm, addr, addr + PMD_SIZE);
} else {
/* unexpected pmd-mapped page? */
WARN_ON_ONCE(1);
}
}
if (referenced) {
folio_clear_idle(folio);
/*
* We cleared the referenced bit in a mapping to this page. To
* avoid interference with page reclaim, mark it young so that
* folio_referenced() will return > 0.
*/
folio_set_young(folio);
}
return true;
}
static void page_idle_clear_pte_refs(struct folio *folio)
{
/*
* Since rwc.try_lock is unused, rwc is effectively immutable, so we
* can make it static to save some cycles and stack.
*/
static struct rmap_walk_control rwc = {
.rmap_one = page_idle_clear_pte_refs_one,
.anon_lock = folio_lock_anon_vma_read,
};
if (!folio_mapped(folio) || !folio_raw_mapping(folio))
return;
if (!folio_trylock(folio))
return;
rmap_walk(folio, &rwc);
folio_unlock(folio);
}
static ssize_t page_idle_bitmap_read(struct file *file, struct kobject *kobj,
const struct bin_attribute *attr, char *buf,
loff_t pos, size_t count)
{
u64 *out = (u64 *)buf;
struct folio *folio;
unsigned long pfn, end_pfn;
int bit;
if (pos % BITMAP_CHUNK_SIZE || count % BITMAP_CHUNK_SIZE)
return -EINVAL;
pfn = pos * BITS_PER_BYTE;
if (pfn >= max_pfn)
return 0;
end_pfn = pfn + count * BITS_PER_BYTE;
if (end_pfn > max_pfn)
end_pfn = max_pfn;
for (; pfn < end_pfn; pfn++) {
bit = pfn % BITMAP_CHUNK_BITS;
if (!bit)
*out = 0ULL;
folio = page_idle_get_folio(pfn);
if (folio) {
if (folio_test_idle(folio)) {
/*
* The page might have been referenced via a
* pte, in which case it is not idle. Clear
* refs and recheck.
*/
page_idle_clear_pte_refs(folio);
if (folio_test_idle(folio))
*out |= 1ULL << bit;
}
folio_put(folio);
}
if (bit == BITMAP_CHUNK_BITS - 1)
out++;
cond_resched();
}
return (char *)out - buf;
}
static ssize_t page_idle_bitmap_write(struct file *file, struct kobject *kobj,
const struct bin_attribute *attr, char *buf,
loff_t pos, size_t count)
{
const u64 *in = (u64 *)buf;
struct folio *folio;
unsigned long pfn, end_pfn;
int bit;
if (pos % BITMAP_CHUNK_SIZE || count % BITMAP_CHUNK_SIZE)
return -EINVAL;
pfn = pos * BITS_PER_BYTE;
if (pfn >= max_pfn)
return -ENXIO;
end_pfn = pfn + count * BITS_PER_BYTE;
if (end_pfn > max_pfn)
end_pfn = max_pfn;
for (; pfn < end_pfn; pfn++) {
bit = pfn % BITMAP_CHUNK_BITS;
if ((*in >> bit) & 1) {
folio = page_idle_get_folio(pfn);
if (folio) {
page_idle_clear_pte_refs(folio);
folio_set_idle(folio);
folio_put(folio);
}
}
if (bit == BITMAP_CHUNK_BITS - 1)
in++;
cond_resched();
}
return (char *)in - buf;
}
static const struct bin_attribute page_idle_bitmap_attr =
__BIN_ATTR(bitmap, 0600,
page_idle_bitmap_read, page_idle_bitmap_write, 0);
static const struct bin_attribute *const page_idle_bin_attrs[] = {
&page_idle_bitmap_attr,
NULL,
};
static const struct attribute_group page_idle_attr_group = {
.bin_attrs = page_idle_bin_attrs,
.name = "page_idle",
};
static int __init page_idle_init(void)
{
int err;
err = sysfs_create_group(mm_kobj, &page_idle_attr_group);
if (err) {
pr_err("page_idle: register sysfs failed\n");
return err;
}
return 0;
}
subsys_initcall(page_idle_init); | c | github | https://github.com/torvalds/linux | mm/page_idle.c |
/*
* Copyright (C) 2009 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkElementIndex;
import static com.google.common.base.Preconditions.checkNotNull;
import static java.lang.System.arraycopy;
import static java.util.Collections.emptyMap;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.annotations.J2ktIncompatible;
import com.google.common.collect.Maps.IteratorBasedAbstractMap;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import com.google.errorprone.annotations.DoNotCall;
import com.google.errorprone.annotations.concurrent.LazyInit;
import com.google.j2objc.annotations.WeakOuter;
import java.io.Serializable;
import java.lang.reflect.Array;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import org.jspecify.annotations.Nullable;
/**
* Fixed-size {@link Table} implementation backed by a two-dimensional array.
*
* <p><b>Warning:</b> {@code ArrayTable} is rarely the {@link Table} implementation you want. First,
* it requires that the complete universe of rows and columns be specified at construction time.
* Second, it is always backed by an array large enough to hold a value for every possible
* combination of row and column keys. (This is rarely optimal unless the table is extremely dense.)
* Finally, every possible combination of row and column keys is always considered to have a value
* associated with it: It is not possible to "remove" a value, only to replace it with {@code null},
* which will still appear when iterating over the table's contents in a foreach loop or a call to a
* null-hostile method like {@link ImmutableTable#copyOf}. For alternatives, please see <a
* href="https://github.com/google/guava/wiki/NewCollectionTypesExplained#table">the wiki</a>.
*
* <p>The allowed row and column keys must be supplied when the table is created. The table always
* contains a mapping for every row key / column pair. The value corresponding to a given row and
* column is null unless another value is provided.
*
* <p>The table's size is constant: the product of the number of supplied row keys and the number of
* supplied column keys. The {@code remove} and {@code clear} methods are not supported by the table
* or its views. The {@link #erase} and {@link #eraseAll} methods may be used instead.
*
* <p>The ordering of the row and column keys provided when the table is constructed determines the
* iteration ordering across rows and columns in the table's views. None of the view iterators
* support {@link Iterator#remove}. If the table is modified after an iterator is created, the
* iterator remains valid.
*
* <p>This class requires less memory than the {@link HashBasedTable} and {@link TreeBasedTable}
* implementations, except when the table is sparse.
*
* <p>Null row keys or column keys are not permitted.
*
* <p>This class provides methods involving the underlying array structure, where the array indices
* correspond to the position of a row or column in the lists of allowed keys and values. See the
* {@link #at}, {@link #set}, {@link #toArray}, {@link #rowKeyList}, and {@link #columnKeyList}
* methods for more details.
*
* <p>Note that this implementation is not synchronized. If multiple threads access the same cell of
* an {@code ArrayTable} concurrently and one of the threads modifies its value, there is no
* guarantee that the new value will be fully visible to the other threads. To guarantee that
* modifications are visible, synchronize access to the table. Unlike other {@code Table}
* implementations, synchronization is unnecessary between a thread that writes to one cell and a
* thread that reads from another.
*
* <p>See the Guava User Guide article on <a href=
* "https://github.com/google/guava/wiki/NewCollectionTypesExplained#table">{@code Table}</a>.
*
* @author Jared Levy
* @since 10.0
*/
// We explicitly list `implements Table<...>` so that its `@Nullable V` appears in Javadoc.
@SuppressWarnings("RedundancyRemover")
@GwtCompatible
public final class ArrayTable<R, C, V> extends AbstractTable<R, C, @Nullable V>
implements Table<R, C, @Nullable V>, Serializable {
/**
* Creates an {@code ArrayTable} filled with {@code null}.
*
* @param rowKeys row keys that may be stored in the generated table
* @param columnKeys column keys that may be stored in the generated table
* @throws NullPointerException if any of the provided keys is null
* @throws IllegalArgumentException if {@code rowKeys} or {@code columnKeys} contains duplicates
* or if exactly one of {@code rowKeys} or {@code columnKeys} is empty.
*/
public static <R, C, V> ArrayTable<R, C, V> create(
Iterable<? extends R> rowKeys, Iterable<? extends C> columnKeys) {
return new ArrayTable<>(rowKeys, columnKeys);
}
/*
* TODO(jlevy): Add factory methods taking an Enum class, instead of an
* iterable, to specify the allowed row keys and/or column keys. Note that
* custom serialization logic is needed to support different enum sizes during
* serialization and deserialization.
*/
/**
* Creates an {@code ArrayTable} with the mappings in the provided table.
*
* <p>If {@code table} includes a mapping with row key {@code r} and a separate mapping with
* column key {@code c}, the returned table contains a mapping with row key {@code r} and column
* key {@code c}. If that row key / column key pair in not in {@code table}, the pair maps to
* {@code null} in the generated table.
*
* <p>The returned table allows subsequent {@code put} calls with the row keys in {@code
* table.rowKeySet()} and the column keys in {@code table.columnKeySet()}. Calling {@link #put}
* with other keys leads to an {@code IllegalArgumentException}.
*
* <p>The ordering of {@code table.rowKeySet()} and {@code table.columnKeySet()} determines the
* row and column iteration ordering of the returned table.
*
* @throws NullPointerException if {@code table} has a null key
*/
@SuppressWarnings("unchecked") // TODO(cpovirk): Make constructor accept wildcard types?
public static <R, C, V> ArrayTable<R, C, V> create(Table<R, C, ? extends @Nullable V> table) {
return (table instanceof ArrayTable)
? new ArrayTable<R, C, V>((ArrayTable<R, C, V>) table)
: new ArrayTable<R, C, V>(table);
}
private final ImmutableList<R> rowList;
private final ImmutableList<C> columnList;
// TODO(jlevy): Add getters returning rowKeyToIndex and columnKeyToIndex?
private final ImmutableMap<R, Integer> rowKeyToIndex;
private final ImmutableMap<C, Integer> columnKeyToIndex;
private final @Nullable V[][] array;
private ArrayTable(Iterable<? extends R> rowKeys, Iterable<? extends C> columnKeys) {
this.rowList = ImmutableList.copyOf(rowKeys);
this.columnList = ImmutableList.copyOf(columnKeys);
checkArgument(rowList.isEmpty() == columnList.isEmpty());
/*
* TODO(jlevy): Support only one of rowKey / columnKey being empty? If we
* do, when columnKeys is empty but rowKeys isn't, rowKeyList() can contain
* elements but rowKeySet() will be empty and containsRow() won't
* acknowledge them.
*/
rowKeyToIndex = Maps.indexMap(rowList);
columnKeyToIndex = Maps.indexMap(columnList);
@SuppressWarnings("unchecked")
@Nullable V[][] tmpArray = (@Nullable V[][]) new Object[rowList.size()][columnList.size()];
array = tmpArray;
// Necessary because in GWT the arrays are initialized with "undefined" instead of null.
eraseAll();
}
private ArrayTable(Table<R, C, ? extends @Nullable V> table) {
this(table.rowKeySet(), table.columnKeySet());
putAll(table);
}
private ArrayTable(ArrayTable<R, C, V> table) {
rowList = table.rowList;
columnList = table.columnList;
rowKeyToIndex = table.rowKeyToIndex;
columnKeyToIndex = table.columnKeyToIndex;
@SuppressWarnings("unchecked")
@Nullable V[][] copy = (@Nullable V[][]) new Object[rowList.size()][columnList.size()];
array = copy;
for (int i = 0; i < rowList.size(); i++) {
arraycopy(table.array[i], 0, copy[i], 0, table.array[i].length);
}
}
private abstract static class ArrayMap<K, V extends @Nullable Object>
extends IteratorBasedAbstractMap<K, V> {
private final ImmutableMap<K, Integer> keyIndex;
private ArrayMap(ImmutableMap<K, Integer> keyIndex) {
this.keyIndex = keyIndex;
}
@Override
public Set<K> keySet() {
return keyIndex.keySet();
}
K getKey(int index) {
return keyIndex.keySet().asList().get(index);
}
abstract String getKeyRole();
@ParametricNullness
abstract V getValue(int index);
@ParametricNullness
abstract V setValue(int index, @ParametricNullness V newValue);
@Override
public int size() {
return keyIndex.size();
}
@Override
public boolean isEmpty() {
return keyIndex.isEmpty();
}
Entry<K, V> getEntry(int index) {
checkElementIndex(index, size());
return new AbstractMapEntry<K, V>() {
@Override
public K getKey() {
return ArrayMap.this.getKey(index);
}
@Override
@ParametricNullness
public V getValue() {
return ArrayMap.this.getValue(index);
}
@Override
@ParametricNullness
public V setValue(@ParametricNullness V value) {
return ArrayMap.this.setValue(index, value);
}
};
}
@Override
Iterator<Entry<K, V>> entryIterator() {
return new AbstractIndexedListIterator<Entry<K, V>>(size()) {
@Override
protected Entry<K, V> get(int index) {
return getEntry(index);
}
};
}
// TODO(lowasser): consider an optimized values() implementation
@Override
public boolean containsKey(@Nullable Object key) {
return keyIndex.containsKey(key);
}
@Override
public @Nullable V get(@Nullable Object key) {
Integer index = keyIndex.get(key);
if (index == null) {
return null;
} else {
return getValue(index);
}
}
@Override
public @Nullable V put(K key, @ParametricNullness V value) {
Integer index = keyIndex.get(key);
if (index == null) {
throw new IllegalArgumentException(
getKeyRole() + " " + key + " not in " + keyIndex.keySet());
}
return setValue(index, value);
}
@Override
public @Nullable V remove(@Nullable Object key) {
throw new UnsupportedOperationException();
}
@Override
public void clear() {
throw new UnsupportedOperationException();
}
}
/**
* Returns, as an immutable list, the row keys provided when the table was constructed, including
* those that are mapped to null values only.
*/
public ImmutableList<R> rowKeyList() {
return rowList;
}
/**
* Returns, as an immutable list, the column keys provided when the table was constructed,
* including those that are mapped to null values only.
*/
public ImmutableList<C> columnKeyList() {
return columnList;
}
/**
* Returns the value corresponding to the specified row and column indices. The same value is
* returned by {@code get(rowKeyList().get(rowIndex), columnKeyList().get(columnIndex))}, but this
* method runs more quickly.
*
* @param rowIndex position of the row key in {@link #rowKeyList()}
* @param columnIndex position of the row key in {@link #columnKeyList()}
* @return the value with the specified row and column
* @throws IndexOutOfBoundsException if either index is negative, {@code rowIndex} is greater than
* or equal to the number of allowed row keys, or {@code columnIndex} is greater than or equal
* to the number of allowed column keys
*/
public @Nullable V at(int rowIndex, int columnIndex) {
// In GWT array access never throws IndexOutOfBoundsException.
checkElementIndex(rowIndex, rowList.size());
checkElementIndex(columnIndex, columnList.size());
return array[rowIndex][columnIndex];
}
/**
* Associates {@code value} with the specified row and column indices. The logic {@code
* put(rowKeyList().get(rowIndex), columnKeyList().get(columnIndex), value)} has the same
* behavior, but this method runs more quickly.
*
* @param rowIndex position of the row key in {@link #rowKeyList()}
* @param columnIndex position of the row key in {@link #columnKeyList()}
* @param value value to store in the table
* @return the previous value with the specified row and column
* @throws IndexOutOfBoundsException if either index is negative, {@code rowIndex} is greater than
* or equal to the number of allowed row keys, or {@code columnIndex} is greater than or equal
* to the number of allowed column keys
*/
@CanIgnoreReturnValue
public @Nullable V set(int rowIndex, int columnIndex, @Nullable V value) {
// In GWT array access never throws IndexOutOfBoundsException.
checkElementIndex(rowIndex, rowList.size());
checkElementIndex(columnIndex, columnList.size());
V oldValue = array[rowIndex][columnIndex];
array[rowIndex][columnIndex] = value;
return oldValue;
}
/**
* Returns a two-dimensional array with the table contents. The row and column indices correspond
* to the positions of the row and column in the iterables provided during table construction. If
* the table lacks a mapping for a given row and column, the corresponding array element is null.
*
* <p>Subsequent table changes will not modify the array, and vice versa.
*
* @param valueClass class of values stored in the returned array
*/
@GwtIncompatible // reflection
public @Nullable V[][] toArray(Class<V> valueClass) {
@SuppressWarnings("unchecked") // TODO: safe?
@Nullable V[][] copy =
(@Nullable V[][]) Array.newInstance(valueClass, rowList.size(), columnList.size());
for (int i = 0; i < rowList.size(); i++) {
arraycopy(array[i], 0, copy[i], 0, array[i].length);
}
return copy;
}
/**
* Not supported. Use {@link #eraseAll} instead.
*
* @throws UnsupportedOperationException always
* @deprecated Use {@link #eraseAll}
*/
@DoNotCall("Always throws UnsupportedOperationException")
@Override
@Deprecated
public void clear() {
throw new UnsupportedOperationException();
}
/** Associates the value {@code null} with every pair of allowed row and column keys. */
public void eraseAll() {
for (@Nullable V[] row : array) {
Arrays.fill(row, null);
}
}
/**
* Returns {@code true} if the provided keys are among the keys provided when the table was
* constructed.
*/
@Override
public boolean contains(@Nullable Object rowKey, @Nullable Object columnKey) {
return containsRow(rowKey) && containsColumn(columnKey);
}
/**
* Returns {@code true} if the provided column key is among the column keys provided when the
* table was constructed.
*/
@Override
public boolean containsColumn(@Nullable Object columnKey) {
return columnKeyToIndex.containsKey(columnKey);
}
/**
* Returns {@code true} if the provided row key is among the row keys provided when the table was
* constructed.
*/
@Override
public boolean containsRow(@Nullable Object rowKey) {
return rowKeyToIndex.containsKey(rowKey);
}
@Override
public boolean containsValue(@Nullable Object value) {
for (@Nullable V[] row : array) {
for (V element : row) {
if (Objects.equals(value, element)) {
return true;
}
}
}
return false;
}
@Override
public @Nullable V get(@Nullable Object rowKey, @Nullable Object columnKey) {
Integer rowIndex = rowKeyToIndex.get(rowKey);
Integer columnIndex = columnKeyToIndex.get(columnKey);
return (rowIndex == null || columnIndex == null) ? null : at(rowIndex, columnIndex);
}
/**
* Returns {@code true} if {@code rowKeyList().size == 0} or {@code columnKeyList().size() == 0}.
*/
@Override
public boolean isEmpty() {
return rowList.isEmpty() || columnList.isEmpty();
}
/**
* {@inheritDoc}
*
* @throws IllegalArgumentException if {@code rowKey} is not in {@link #rowKeySet()} or {@code
* columnKey} is not in {@link #columnKeySet()}.
*/
@CanIgnoreReturnValue
@Override
public @Nullable V put(R rowKey, C columnKey, @Nullable V value) {
checkNotNull(rowKey);
checkNotNull(columnKey);
Integer rowIndex = rowKeyToIndex.get(rowKey);
checkArgument(rowIndex != null, "Row %s not in %s", rowKey, rowList);
Integer columnIndex = columnKeyToIndex.get(columnKey);
checkArgument(columnIndex != null, "Column %s not in %s", columnKey, columnList);
return set(rowIndex, columnIndex, value);
}
/*
* TODO(jlevy): Consider creating a merge() method, similar to putAll() but
* copying non-null values only.
*/
/**
* {@inheritDoc}
*
* <p>If {@code table} is an {@code ArrayTable}, its null values will be stored in this table,
* possibly replacing values that were previously non-null.
*
* @throws NullPointerException if {@code table} has a null key
* @throws IllegalArgumentException if any of the provided table's row keys or column keys is not
* in {@link #rowKeySet()} or {@link #columnKeySet()}
*/
@Override
public void putAll(Table<? extends R, ? extends C, ? extends @Nullable V> table) {
super.putAll(table);
}
/**
* Not supported. Use {@link #erase} instead.
*
* @throws UnsupportedOperationException always
* @deprecated Use {@link #erase}
*/
@DoNotCall("Always throws UnsupportedOperationException")
@CanIgnoreReturnValue
@Override
@Deprecated
public @Nullable V remove(@Nullable Object rowKey, @Nullable Object columnKey) {
throw new UnsupportedOperationException();
}
/**
* Associates the value {@code null} with the specified keys, assuming both keys are valid. If
* either key is null or isn't among the keys provided during construction, this method has no
* effect.
*
* <p>This method is equivalent to {@code put(rowKey, columnKey, null)} when both provided keys
* are valid.
*
* @param rowKey row key of mapping to be erased
* @param columnKey column key of mapping to be erased
* @return the value previously associated with the keys, or {@code null} if no mapping existed
* for the keys
*/
@CanIgnoreReturnValue
public @Nullable V erase(@Nullable Object rowKey, @Nullable Object columnKey) {
Integer rowIndex = rowKeyToIndex.get(rowKey);
Integer columnIndex = columnKeyToIndex.get(columnKey);
if (rowIndex == null || columnIndex == null) {
return null;
}
return set(rowIndex, columnIndex, null);
}
// TODO(jlevy): Add eraseRow and eraseColumn methods?
@Override
public int size() {
return rowList.size() * columnList.size();
}
/**
* Returns an unmodifiable set of all row key / column key / value triplets. Changes to the table
* will update the returned set.
*
* <p>The returned set's iterator traverses the mappings with the first row key, the mappings with
* the second row key, and so on.
*
* <p>The value in the returned cells may change if the table subsequently changes.
*
* @return set of table cells consisting of row key / column key / value triplets
*/
@Override
public Set<Cell<R, C, @Nullable V>> cellSet() {
return super.cellSet();
}
@Override
Iterator<Cell<R, C, @Nullable V>> cellIterator() {
return new AbstractIndexedListIterator<Cell<R, C, @Nullable V>>(size()) {
@Override
protected Cell<R, C, @Nullable V> get(int index) {
return getCell(index);
}
};
}
private Cell<R, C, @Nullable V> getCell(int index) {
return new Tables.AbstractCell<R, C, @Nullable V>() {
final int rowIndex = index / columnList.size();
final int columnIndex = index % columnList.size();
@Override
public R getRowKey() {
return rowList.get(rowIndex);
}
@Override
public C getColumnKey() {
return columnList.get(columnIndex);
}
@Override
public @Nullable V getValue() {
return at(rowIndex, columnIndex);
}
};
}
private @Nullable V getValue(int index) {
int rowIndex = index / columnList.size();
int columnIndex = index % columnList.size();
return at(rowIndex, columnIndex);
}
/**
* Returns a view of all mappings that have the given column key. If the column key isn't in
* {@link #columnKeySet()}, an empty immutable map is returned.
*
* <p>Otherwise, for each row key in {@link #rowKeySet()}, the returned map associates the row key
* with the corresponding value in the table. Changes to the returned map will update the
* underlying table, and vice versa.
*
* @param columnKey key of column to search for in the table
* @return the corresponding map from row keys to values
*/
@Override
public Map<R, @Nullable V> column(C columnKey) {
checkNotNull(columnKey);
Integer columnIndex = columnKeyToIndex.get(columnKey);
if (columnIndex == null) {
return emptyMap();
} else {
return new Column(columnIndex);
}
}
private final class Column extends ArrayMap<R, @Nullable V> {
final int columnIndex;
Column(int columnIndex) {
super(rowKeyToIndex);
this.columnIndex = columnIndex;
}
@Override
String getKeyRole() {
return "Row";
}
@Override
@Nullable V getValue(int index) {
return at(index, columnIndex);
}
@Override
@Nullable V setValue(int index, @Nullable V newValue) {
return set(index, columnIndex, newValue);
}
}
/**
* Returns an immutable set of the valid column keys, including those that are associated with
* null values only.
*
* @return immutable set of column keys
*/
@Override
public ImmutableSet<C> columnKeySet() {
return columnKeyToIndex.keySet();
}
@LazyInit private transient @Nullable ColumnMap columnMap;
@Override
public Map<C, Map<R, @Nullable V>> columnMap() {
ColumnMap map = columnMap;
return (map == null) ? columnMap = new ColumnMap() : map;
}
@WeakOuter
private final class ColumnMap extends ArrayMap<C, Map<R, @Nullable V>> {
private ColumnMap() {
super(columnKeyToIndex);
}
@Override
String getKeyRole() {
return "Column";
}
@Override
Map<R, @Nullable V> getValue(int index) {
return new Column(index);
}
@Override
Map<R, @Nullable V> setValue(int index, Map<R, @Nullable V> newValue) {
throw new UnsupportedOperationException();
}
@Override
public @Nullable Map<R, @Nullable V> put(C key, Map<R, @Nullable V> value) {
throw new UnsupportedOperationException();
}
}
/**
* Returns a view of all mappings that have the given row key. If the row key isn't in {@link
* #rowKeySet()}, an empty immutable map is returned.
*
* <p>Otherwise, for each column key in {@link #columnKeySet()}, the returned map associates the
* column key with the corresponding value in the table. Changes to the returned map will update
* the underlying table, and vice versa.
*
* @param rowKey key of row to search for in the table
* @return the corresponding map from column keys to values
*/
@Override
public Map<C, @Nullable V> row(R rowKey) {
checkNotNull(rowKey);
Integer rowIndex = rowKeyToIndex.get(rowKey);
if (rowIndex == null) {
return emptyMap();
} else {
return new Row(rowIndex);
}
}
private final class Row extends ArrayMap<C, @Nullable V> {
final int rowIndex;
Row(int rowIndex) {
super(columnKeyToIndex);
this.rowIndex = rowIndex;
}
@Override
String getKeyRole() {
return "Column";
}
@Override
@Nullable V getValue(int index) {
return at(rowIndex, index);
}
@Override
@Nullable V setValue(int index, @Nullable V newValue) {
return set(rowIndex, index, newValue);
}
}
/**
* Returns an immutable set of the valid row keys, including those that are associated with null
* values only.
*
* @return immutable set of row keys
*/
@Override
public ImmutableSet<R> rowKeySet() {
return rowKeyToIndex.keySet();
}
@LazyInit private transient @Nullable RowMap rowMap;
@Override
public Map<R, Map<C, @Nullable V>> rowMap() {
RowMap map = rowMap;
return (map == null) ? rowMap = new RowMap() : map;
}
@WeakOuter
private final class RowMap extends ArrayMap<R, Map<C, @Nullable V>> {
private RowMap() {
super(rowKeyToIndex);
}
@Override
String getKeyRole() {
return "Row";
}
@Override
Map<C, @Nullable V> getValue(int index) {
return new Row(index);
}
@Override
Map<C, @Nullable V> setValue(int index, Map<C, @Nullable V> newValue) {
throw new UnsupportedOperationException();
}
@Override
public @Nullable Map<C, @Nullable V> put(R key, Map<C, @Nullable V> value) {
throw new UnsupportedOperationException();
}
}
/**
* Returns an unmodifiable collection of all values, which may contain duplicates. Changes to the
* table will update the returned collection.
*
* <p>The returned collection's iterator traverses the values of the first row key, the values of
* the second row key, and so on.
*
* @return collection of values
*/
@Override
public Collection<@Nullable V> values() {
return super.values();
}
@Override
Iterator<@Nullable V> valuesIterator() {
return new AbstractIndexedListIterator<@Nullable V>(size()) {
@Override
protected @Nullable V get(int index) {
return getValue(index);
}
};
}
@GwtIncompatible @J2ktIncompatible private static final long serialVersionUID = 0;
} | java | github | https://github.com/google/guava | android/guava/src/com/google/common/collect/ArrayTable.java |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from TProtocol import *
from struct import pack, unpack
class TBinaryProtocol(TProtocolBase):
"""Binary implementation of the Thrift protocol driver."""
# NastyHaxx. Python 2.4+ on 32-bit machines forces hex constants to be
# positive, converting this into a long. If we hardcode the int value
# instead it'll stay in 32 bit-land.
# VERSION_MASK = 0xffff0000
VERSION_MASK = -65536
# VERSION_1 = 0x80010000
VERSION_1 = -2147418112
TYPE_MASK = 0x000000ff
def __init__(self, trans, strictRead=False, strictWrite=True):
TProtocolBase.__init__(self, trans)
self.strictRead = strictRead
self.strictWrite = strictWrite
def writeMessageBegin(self, name, type, seqid):
if self.strictWrite:
self.writeI32(TBinaryProtocol.VERSION_1 | type)
self.writeString(name)
self.writeI32(seqid)
else:
self.writeString(name)
self.writeByte(type)
self.writeI32(seqid)
def writeMessageEnd(self):
pass
def writeStructBegin(self, name):
pass
def writeStructEnd(self):
pass
def writeFieldBegin(self, name, type, id):
self.writeByte(type)
self.writeI16(id)
def writeFieldEnd(self):
pass
def writeFieldStop(self):
self.writeByte(TType.STOP);
def writeMapBegin(self, ktype, vtype, size):
self.writeByte(ktype)
self.writeByte(vtype)
self.writeI32(size)
def writeMapEnd(self):
pass
def writeListBegin(self, etype, size):
self.writeByte(etype)
self.writeI32(size)
def writeListEnd(self):
pass
def writeSetBegin(self, etype, size):
self.writeByte(etype)
self.writeI32(size)
def writeSetEnd(self):
pass
def writeBool(self, bool):
if bool:
self.writeByte(1)
else:
self.writeByte(0)
def writeByte(self, byte):
buff = pack("!b", byte)
self.trans.write(buff)
def writeI16(self, i16):
buff = pack("!h", i16)
self.trans.write(buff)
def writeI32(self, i32):
buff = pack("!i", i32)
self.trans.write(buff)
def writeI64(self, i64):
buff = pack("!q", i64)
self.trans.write(buff)
def writeDouble(self, dub):
buff = pack("!d", dub)
self.trans.write(buff)
def writeString(self, str):
self.writeI32(len(str))
self.trans.write(str)
def readMessageBegin(self):
sz = self.readI32()
if sz < 0:
version = sz & TBinaryProtocol.VERSION_MASK
if version != TBinaryProtocol.VERSION_1:
raise TProtocolException(TProtocolException.BAD_VERSION, 'Bad version in readMessageBegin: %d' % (sz))
type = sz & TBinaryProtocol.TYPE_MASK
name = self.readString()
seqid = self.readI32()
else:
if self.strictRead:
raise TProtocolException(TProtocolException.BAD_VERSION, 'No protocol version header')
name = self.trans.readAll(sz)
type = self.readByte()
seqid = self.readI32()
return (name, type, seqid)
def readMessageEnd(self):
pass
def readStructBegin(self):
pass
def readStructEnd(self):
pass
def readFieldBegin(self):
type = self.readByte()
if type == TType.STOP:
return (None, type, 0)
id = self.readI16()
return (None, type, id)
def readFieldEnd(self):
pass
def readMapBegin(self):
ktype = self.readByte()
vtype = self.readByte()
size = self.readI32()
return (ktype, vtype, size)
def readMapEnd(self):
pass
def readListBegin(self):
etype = self.readByte()
size = self.readI32()
return (etype, size)
def readListEnd(self):
pass
def readSetBegin(self):
etype = self.readByte()
size = self.readI32()
return (etype, size)
def readSetEnd(self):
pass
def readBool(self):
byte = self.readByte()
if byte == 0:
return False
return True
def readByte(self):
buff = self.trans.readAll(1)
val, = unpack('!b', buff)
return val
def readI16(self):
buff = self.trans.readAll(2)
val, = unpack('!h', buff)
return val
def readI32(self):
buff = self.trans.readAll(4)
val, = unpack('!i', buff)
return val
def readI64(self):
buff = self.trans.readAll(8)
val, = unpack('!q', buff)
return val
def readDouble(self):
buff = self.trans.readAll(8)
val, = unpack('!d', buff)
return val
def readString(self):
len = self.readI32()
str = self.trans.readAll(len)
return str
class TBinaryProtocolFactory:
def __init__(self, strictRead=False, strictWrite=True):
self.strictRead = strictRead
self.strictWrite = strictWrite
def getProtocol(self, trans):
prot = TBinaryProtocol(trans, self.strictRead, self.strictWrite)
return prot
class TBinaryProtocolAccelerated(TBinaryProtocol):
"""C-Accelerated version of TBinaryProtocol.
This class does not override any of TBinaryProtocol's methods,
but the generated code recognizes it directly and will call into
our C module to do the encoding, bypassing this object entirely.
We inherit from TBinaryProtocol so that the normal TBinaryProtocol
encoding can happen if the fastbinary module doesn't work for some
reason. (TODO(dreiss): Make this happen sanely in more cases.)
In order to take advantage of the C module, just use
TBinaryProtocolAccelerated instead of TBinaryProtocol.
NOTE: This code was contributed by an external developer.
The internal Thrift team has reviewed and tested it,
but we cannot guarantee that it is production-ready.
Please feel free to report bugs and/or success stories
to the public mailing list.
"""
pass
class TBinaryProtocolAcceleratedFactory:
def getProtocol(self, trans):
return TBinaryProtocolAccelerated(trans) | unknown | codeparrot/codeparrot-clean | ||
import urllib2
from StringIO import StringIO
import gzip
import cookielib
import time
class NZBDownloader(object):
def __init__( self ):
self.cj = cookielib.CookieJar()
self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cj))
self.lastRequestTime = None
def waitBeforeNextRequest(self):
if self.lastRequestTime and self.lastRequestTime > ( time.mktime(time.localtime()) - 10):
time.sleep( 10 )
self.lastRequestTime = time.gmtime()
def open(self, request):
self.waitBeforeNextRequest()
return self.opener.open(request)
class NZBSearchResult(object):
def __init__(self, downloader, sizeInMegs, refererURL, age, nzbid):
self.downloader = downloader
self.refererURL = refererURL
self.sizeInMegs = sizeInMegs
self.age = age
self.nzbid = nzbid
def readRequest(self, request):
request.add_header('Accept-encoding', 'gzip')
request.add_header('Referer', self.refererURL)
request.add_header('Accept-Encoding', 'gzip')
request.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.57 Safari/537.17')
response = self.downloader.open(request)
if response.info().get('Content-Encoding') == 'gzip':
buf = StringIO( response.read())
f = gzip.GzipFile(fileobj=buf)
return f.read()
else:
return response.read()
def getNZB(self):
pass
class NZBGetURLSearchResult( NZBSearchResult ):
def __init__(self, downloader, nzburl, sizeInMegs, refererURL, age, nzbid):
NZBSearchResult.__init__(self, downloader, sizeInMegs, refererURL, age, nzbid)
self.nzburl = nzburl
def getNZB(self):
request = urllib2.Request( self.nzburl )
self.nzbdata = NZBSearchResult.readRequest( self, request )
return self.nzbdata
class NZBPostURLSearchResult( NZBSearchResult ):
def __init__(self, downloader, nzburl, postData, sizeInMegs, refererURL, age, nzbid):
NZBSearchResult.__init__(self, downloader, sizeInMegs, refererURL, age, nzbid)
self.nzburl = nzburl
self.postData = postData
def getNZB(self):
request = urllib2.Request( self.nzburl, self.postData )
self.nzbdata = NZBSearchResult.readRequest( self, request )
return self.nzbdata
class NZBDataSearchResult( NZBSearchResult ):
def __init__(self, nzbdata, sizeInMegs, refererURL, age, nzbid):
NZBSearchResult.__init__(self, None, refererURL, age, nzbid)
self.nzbdata = nzbdata
def getNZB(self):
return self.nzbdata | unknown | codeparrot/codeparrot-clean | ||
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
__all__ = ['LookupBase']
class LookupBase:
def __init__(self, loader=None, templar=None, **kwargs):
self._loader = loader
self._templar = templar
self._display = display
def get_basedir(self, variables):
if 'role_path' in variables:
return variables['role_path']
else:
return self._loader.get_basedir()
def _flatten(self, terms):
ret = []
for term in terms:
if isinstance(term, (list, tuple)):
ret.extend(term)
else:
ret.append(term)
return ret
def _combine(self, a, b):
results = []
for x in a:
for y in b:
results.append(self._flatten([x,y]))
return results
def _flatten_hash_to_list(self, terms):
ret = []
for key in terms:
ret.append({'key': key, 'value': terms[key]})
return ret | unknown | codeparrot/codeparrot-clean | ||
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
import uuid
if sys.version > '3':
basestring = str
unicode = str
from pyspark import SparkContext, since
from pyspark.ml.common import inherit_doc
def _jvm():
"""
Returns the JVM view associated with SparkContext. Must be called
after SparkContext is initialized.
"""
jvm = SparkContext._jvm
if jvm:
return jvm
else:
raise AttributeError("Cannot load _jvm from SparkContext. Is SparkContext initialized?")
class Identifiable(object):
"""
Object with a unique ID.
"""
def __init__(self):
#: A unique id for the object.
self.uid = self._randomUID()
def __repr__(self):
return self.uid
@classmethod
def _randomUID(cls):
"""
Generate a unique unicode id for the object. The default implementation
concatenates the class name, "_", and 12 random hex chars.
"""
return unicode(cls.__name__ + "_" + uuid.uuid4().hex[12:])
@inherit_doc
class MLWriter(object):
"""
Utility class that can save ML instances.
.. versionadded:: 2.0.0
"""
def save(self, path):
"""Save the ML instance to the input path."""
raise NotImplementedError("MLWriter is not yet implemented for type: %s" % type(self))
def overwrite(self):
"""Overwrites if the output path already exists."""
raise NotImplementedError("MLWriter is not yet implemented for type: %s" % type(self))
def context(self, sqlContext):
"""
Sets the SQL context to use for saving.
.. note:: Deprecated in 2.1 and will be removed in 2.2, use session instead.
"""
raise NotImplementedError("MLWriter is not yet implemented for type: %s" % type(self))
def session(self, sparkSession):
"""Sets the Spark Session to use for saving."""
raise NotImplementedError("MLWriter is not yet implemented for type: %s" % type(self))
@inherit_doc
class JavaMLWriter(MLWriter):
"""
(Private) Specialization of :py:class:`MLWriter` for :py:class:`JavaParams` types
"""
def __init__(self, instance):
super(JavaMLWriter, self).__init__()
_java_obj = instance._to_java()
self._jwrite = _java_obj.write()
def save(self, path):
"""Save the ML instance to the input path."""
if not isinstance(path, basestring):
raise TypeError("path should be a basestring, got type %s" % type(path))
self._jwrite.save(path)
def overwrite(self):
"""Overwrites if the output path already exists."""
self._jwrite.overwrite()
return self
def context(self, sqlContext):
"""
Sets the SQL context to use for saving.
.. note:: Deprecated in 2.1 and will be removed in 2.2, use session instead.
"""
warnings.warn("Deprecated in 2.1 and will be removed in 2.2, use session instead.")
self._jwrite.context(sqlContext._ssql_ctx)
return self
def session(self, sparkSession):
"""Sets the Spark Session to use for saving."""
self._jwrite.session(sparkSession._jsparkSession)
return self
@inherit_doc
class MLWritable(object):
"""
Mixin for ML instances that provide :py:class:`MLWriter`.
.. versionadded:: 2.0.0
"""
def write(self):
"""Returns an MLWriter instance for this ML instance."""
raise NotImplementedError("MLWritable is not yet implemented for type: %r" % type(self))
def save(self, path):
"""Save this ML instance to the given path, a shortcut of `write().save(path)`."""
self.write().save(path)
@inherit_doc
class JavaMLWritable(MLWritable):
"""
(Private) Mixin for ML instances that provide :py:class:`JavaMLWriter`.
"""
def write(self):
"""Returns an MLWriter instance for this ML instance."""
return JavaMLWriter(self)
@inherit_doc
class MLReader(object):
"""
Utility class that can load ML instances.
.. versionadded:: 2.0.0
"""
def load(self, path):
"""Load the ML instance from the input path."""
raise NotImplementedError("MLReader is not yet implemented for type: %s" % type(self))
def context(self, sqlContext):
"""
Sets the SQL context to use for loading.
.. note:: Deprecated in 2.1 and will be removed in 2.2, use session instead.
"""
raise NotImplementedError("MLReader is not yet implemented for type: %s" % type(self))
def session(self, sparkSession):
"""Sets the Spark Session to use for loading."""
raise NotImplementedError("MLReader is not yet implemented for type: %s" % type(self))
@inherit_doc
class JavaMLReader(MLReader):
"""
(Private) Specialization of :py:class:`MLReader` for :py:class:`JavaParams` types
"""
def __init__(self, clazz):
self._clazz = clazz
self._jread = self._load_java_obj(clazz).read()
def load(self, path):
"""Load the ML instance from the input path."""
if not isinstance(path, basestring):
raise TypeError("path should be a basestring, got type %s" % type(path))
java_obj = self._jread.load(path)
if not hasattr(self._clazz, "_from_java"):
raise NotImplementedError("This Java ML type cannot be loaded into Python currently: %r"
% self._clazz)
return self._clazz._from_java(java_obj)
def context(self, sqlContext):
"""
Sets the SQL context to use for loading.
.. note:: Deprecated in 2.1 and will be removed in 2.2, use session instead.
"""
warnings.warn("Deprecated in 2.1 and will be removed in 2.2, use session instead.")
self._jread.context(sqlContext._ssql_ctx)
return self
def session(self, sparkSession):
"""Sets the Spark Session to use for loading."""
self._jread.session(sparkSession._jsparkSession)
return self
@classmethod
def _java_loader_class(cls, clazz):
"""
Returns the full class name of the Java ML instance. The default
implementation replaces "pyspark" by "org.apache.spark" in
the Python full class name.
"""
java_package = clazz.__module__.replace("pyspark", "org.apache.spark")
if clazz.__name__ in ("Pipeline", "PipelineModel"):
# Remove the last package name "pipeline" for Pipeline and PipelineModel.
java_package = ".".join(java_package.split(".")[0:-1])
return java_package + "." + clazz.__name__
@classmethod
def _load_java_obj(cls, clazz):
"""Load the peer Java object of the ML instance."""
java_class = cls._java_loader_class(clazz)
java_obj = _jvm()
for name in java_class.split("."):
java_obj = getattr(java_obj, name)
return java_obj
@inherit_doc
class MLReadable(object):
"""
Mixin for instances that provide :py:class:`MLReader`.
.. versionadded:: 2.0.0
"""
@classmethod
def read(cls):
"""Returns an MLReader instance for this class."""
raise NotImplementedError("MLReadable.read() not implemented for type: %r" % cls)
@classmethod
def load(cls, path):
"""Reads an ML instance from the input path, a shortcut of `read().load(path)`."""
return cls.read().load(path)
@inherit_doc
class JavaMLReadable(MLReadable):
"""
(Private) Mixin for instances that provide JavaMLReader.
"""
@classmethod
def read(cls):
"""Returns an MLReader instance for this class."""
return JavaMLReader(cls)
@inherit_doc
class JavaPredictionModel():
"""
(Private) Java Model for prediction tasks (regression and classification).
To be mixed in with class:`pyspark.ml.JavaModel`
"""
@property
@since("2.1.0")
def numFeatures(self):
"""
Returns the number of features the model was trained on. If unknown, returns -1
"""
return self._call_java("numFeatures") | unknown | codeparrot/codeparrot-clean | ||
# Lint as: python3
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Tests for the pybind11 bindings of format_converter."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import absltest
import numpy as np
from tensorflow.lite.tools.optimize.sparsity.python import format_converter_extension as format_converter
class FormatConverterTest(absltest.TestCase):
def test_bcsr_fp32(self):
"""Same as FormatConverterTest::BlockTestD0S1 but via pybind11."""
# pyformat: disable
dense_matrix = [1.0, 0.0, 2.0, 3.0,
0.0, 4.0, 0.0, 0.0,
0.0, 0.0, 5.0, 0.0,
0.0, 0.0, 0.0, 6.0]
# pyformat: enable
dense_shape = [4, 4]
traversal_order = [0, 1, 2, 3]
dim_types = [
format_converter.TfLiteDimensionType.TF_LITE_DIM_DENSE,
format_converter.TfLiteDimensionType.TF_LITE_DIM_SPARSE_CSR
]
block_size = [2, 2]
block_map = [0, 1]
converter = format_converter.FormatConverterFp32(dense_shape,
traversal_order, dim_types,
block_size, block_map)
converter.DenseToSparse(np.asarray(dense_matrix, dtype=np.float32).data)
dim_metadata = converter.GetDimMetadata()
self.assertEqual([2], dim_metadata[0])
self.assertEmpty(dim_metadata[1]) # rows are dense.
self.assertEqual([0, 2, 3], dim_metadata[2]) # array segments.
self.assertEqual([0, 1, 1], dim_metadata[3]) # array indices.
self.assertEqual([2], dim_metadata[4])
self.assertEmpty(dim_metadata[5]) # sub block rows are dense.
self.assertEqual([2], dim_metadata[6])
self.assertEmpty(dim_metadata[7]) # sub block columns are dense.
expected_data = [1.0, 0.0, 0.0, 4.0, 2.0, 3.0, 0.0, 0.0, 5.0, 0.0, 0.0, 6.0]
sparse_data = converter.GetData()
self.assertTrue(np.allclose(expected_data, sparse_data))
converter.SparseToDense(np.asarray(sparse_data, dtype=np.float32).data)
self.assertTrue(np.allclose(dense_matrix, converter.GetData()))
if __name__ == '__main__':
absltest.main() | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env bash
# Copyright 2023 The Cockroach Authors.
#
# Use of this software is governed by the CockroachDB Software License
# included in the /LICENSE file.
# This script is the third step of the "Publish Coverage" build.
#
# It takes the HTML mini-websites produced by the previous step and uploads them
# to GCE. It also updates the index.html file listing all uploaded profiles.
# The index can be accessed at:
# https://storage.googleapis.com/crl-codecover-public/cockroach/index.html
BUCKET=crl-codecover-public
set -euo pipefail
source "build/teamcity-support.sh" # for log_into_gcloud
google_credentials="$GOOGLE_CREDENTIALS"
log_into_gcloud
gsutil ls gs://$BUCKET/
TIMESTAMP=$(date -u '+%Y-%m-%d %H:%MZ')
publish() {
PROFILE="$1"
DIR="$TIMESTAMP $(git rev-parse --short=8 HEAD) - $PROFILE"
echo "Uploading to $DIR.."
gsutil -m cp -Z -r "output/html/$PROFILE" "gs://$BUCKET/cockroach/$DIR" > "output/logs/upload-$PROFILE.log" 2>&1
}
for dir in $(find output/html -mindepth 1 -maxdepth 1 -type d); do
publish $(basename "$dir")
done
# Regenerate index.html.
INDEX=$(mktemp)
trap "rm -f $INDEX" EXIT
echo '<title>Cockroach coverage</title><body><h2>Cockroach coverage runs:</h2><ul>' > "$INDEX"
gsutil ls "gs://$BUCKET/cockroach" |
sed "s#gs://$BUCKET/cockroach/##" |
sed 's#/$##' |
grep -v index.html |
sort -r |
while read -r d; do
echo "<li><a href=\"$d/index.html\">$d</a>" >> "$INDEX"
done
echo '</ul></body>' >> "$INDEX"
gsutil \
-h "Cache-Control:public, max-age=300, no-transform" \
-h "Content-Type:text/html" \
cp "$INDEX" "gs://$BUCKET/cockroach/index.html" | unknown | github | https://github.com/cockroachdb/cockroach | build/teamcity/cockroach/coverage/publish_upload.sh |
# vi: ts=4 expandtab
#
# Copyright (C) 2009-2010 Canonical Ltd.
# Copyright (C) 2012 Hewlett-Packard Development Company, L.P.
#
# Author: Scott Moser <scott.moser@canonical.com>
# Author: Juerg Haefliger <juerg.haefliger@hp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3, as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Ensure this is aliased to a name not 'distros'
# since the module attribute 'distros'
# is a list of distros that are supported, not a sub-module
from cloudinit import distros as ds
from cloudinit import util
distros = ['ubuntu', 'debian']
def handle(name, cfg, cloud, log, args):
if len(args) != 0:
value = args[0]
else:
value = util.get_cfg_option_str(cfg, "byobu_by_default", "")
if not value:
log.debug("Skipping module named %s, no 'byobu' values found", name)
return
if value == "user" or value == "system":
value = "enable-%s" % value
valid = ("enable-user", "enable-system", "enable",
"disable-user", "disable-system", "disable")
if not value in valid:
log.warn("Unknown value %s for byobu_by_default", value)
mod_user = value.endswith("-user")
mod_sys = value.endswith("-system")
if value.startswith("enable"):
bl_inst = "install"
dc_val = "byobu byobu/launch-by-default boolean true"
mod_sys = True
else:
if value == "disable":
mod_user = True
mod_sys = True
bl_inst = "uninstall"
dc_val = "byobu byobu/launch-by-default boolean false"
shcmd = ""
if mod_user:
(users, _groups) = ds.normalize_users_groups(cfg, cloud.distro)
(user, _user_config) = ds.extract_default(users)
if not user:
log.warn(("No default byobu user provided, "
"can not launch %s for the default user"), bl_inst)
else:
shcmd += " sudo -Hu \"%s\" byobu-launcher-%s" % (user, bl_inst)
shcmd += " || X=$(($X+1)); "
if mod_sys:
shcmd += "echo \"%s\" | debconf-set-selections" % dc_val
shcmd += " && dpkg-reconfigure byobu --frontend=noninteractive"
shcmd += " || X=$(($X+1)); "
if len(shcmd):
cmd = ["/bin/sh", "-c", "%s %s %s" % ("X=0;", shcmd, "exit $X")]
log.debug("Setting byobu to %s", value)
util.subp(cmd, capture=False) | unknown | codeparrot/codeparrot-clean | ||
/*-------------------------------------------------------------------------
*
* cmdtag.c
* Data and routines for commandtag names and enumeration.
*
* Portions Copyright (c) 1996-2026, PostgreSQL Global Development Group
* Portions Copyright (c) 1994, Regents of the University of California
*
* IDENTIFICATION
* src/backend/tcop/cmdtag.c
*
*-------------------------------------------------------------------------
*/
#include "postgres.h"
#include "tcop/cmdtag.h"
#include "utils/builtins.h"
typedef struct CommandTagBehavior
{
const char *name; /* tag name, e.g. "SELECT" */
const uint8 namelen; /* set to strlen(name) */
const bool event_trigger_ok;
const bool table_rewrite_ok;
const bool display_rowcount; /* should the number of rows affected be
* shown in the command completion string */
} CommandTagBehavior;
#define PG_CMDTAG(tag, name, evtrgok, rwrok, rowcnt) \
{ name, (uint8) (sizeof(name) - 1), evtrgok, rwrok, rowcnt },
static const CommandTagBehavior tag_behavior[] = {
#include "tcop/cmdtaglist.h"
};
#undef PG_CMDTAG
void
InitializeQueryCompletion(QueryCompletion *qc)
{
qc->commandTag = CMDTAG_UNKNOWN;
qc->nprocessed = 0;
}
const char *
GetCommandTagName(CommandTag commandTag)
{
return tag_behavior[commandTag].name;
}
const char *
GetCommandTagNameAndLen(CommandTag commandTag, Size *len)
{
*len = (Size) tag_behavior[commandTag].namelen;
return tag_behavior[commandTag].name;
}
bool
command_tag_display_rowcount(CommandTag commandTag)
{
return tag_behavior[commandTag].display_rowcount;
}
bool
command_tag_event_trigger_ok(CommandTag commandTag)
{
return tag_behavior[commandTag].event_trigger_ok;
}
bool
command_tag_table_rewrite_ok(CommandTag commandTag)
{
return tag_behavior[commandTag].table_rewrite_ok;
}
/*
* Search CommandTag by name
*
* Returns CommandTag, or CMDTAG_UNKNOWN if not recognized
*/
CommandTag
GetCommandTagEnum(const char *commandname)
{
const CommandTagBehavior *base,
*last,
*position;
int result;
if (commandname == NULL || *commandname == '\0')
return CMDTAG_UNKNOWN;
base = tag_behavior;
last = tag_behavior + lengthof(tag_behavior) - 1;
while (last >= base)
{
position = base + ((last - base) >> 1);
result = pg_strcasecmp(commandname, position->name);
if (result == 0)
return (CommandTag) (position - tag_behavior);
else if (result < 0)
last = position - 1;
else
base = position + 1;
}
return CMDTAG_UNKNOWN;
}
/*
* BuildQueryCompletionString
* Build a string containing the command tag name with the
* QueryCompletion's nprocessed for command tags with display_rowcount
* set. Returns the strlen of the constructed string.
*
* The caller must ensure that buff is at least COMPLETION_TAG_BUFSIZE bytes.
*
* If nameonly is true, then the constructed string will contain only the tag
* name.
*/
Size
BuildQueryCompletionString(char *buff, const QueryCompletion *qc,
bool nameonly)
{
CommandTag tag = qc->commandTag;
Size taglen;
const char *tagname = GetCommandTagNameAndLen(tag, &taglen);
char *bufp;
/*
* We assume the tagname is plain ASCII and therefore requires no encoding
* conversion.
*/
memcpy(buff, tagname, taglen);
bufp = buff + taglen;
/* ensure that the tagname isn't long enough to overrun the buffer */
Assert(taglen <= COMPLETION_TAG_BUFSIZE - MAXINT8LEN - 4);
/*
* In PostgreSQL versions 11 and earlier, it was possible to create a
* table WITH OIDS. When inserting into such a table, INSERT used to
* include the Oid of the inserted record in the completion tag. To
* maintain compatibility in the wire protocol, we now write a "0" (for
* InvalidOid) in the location where we once wrote the new record's Oid.
*/
if (command_tag_display_rowcount(tag) && !nameonly)
{
if (tag == CMDTAG_INSERT)
{
*bufp++ = ' ';
*bufp++ = '0';
}
*bufp++ = ' ';
bufp += pg_ulltoa_n(qc->nprocessed, bufp);
}
/* and finally, NUL terminate the string */
*bufp = '\0';
Assert((bufp - buff) == strlen(buff));
return bufp - buff;
} | c | github | https://github.com/postgres/postgres | src/backend/tcop/cmdtag.c |
import type { AppLoadContext } from "../server-runtime/data";
import type { RequestHandler } from "../server-runtime/server";
import type { MiddlewareEnabled } from "../types/future";
import { createPath, invariant } from "./history";
import type { Router } from "./router";
import type {
ActionFunctionArgs,
AgnosticDataRouteObject,
FormEncType,
HTMLFormMethod,
LazyRouteObject,
LoaderFunction,
LoaderFunctionArgs,
MaybePromise,
MiddlewareFunction,
RouterContext,
RouterContextProvider,
} from "./utils";
// Public APIs
export type unstable_ServerInstrumentation = {
handler?: unstable_InstrumentRequestHandlerFunction;
route?: unstable_InstrumentRouteFunction;
};
export type unstable_ClientInstrumentation = {
router?: unstable_InstrumentRouterFunction;
route?: unstable_InstrumentRouteFunction;
};
export type unstable_InstrumentRequestHandlerFunction = (
handler: InstrumentableRequestHandler,
) => void;
export type unstable_InstrumentRouterFunction = (
router: InstrumentableRouter,
) => void;
export type unstable_InstrumentRouteFunction = (
route: InstrumentableRoute,
) => void;
export type unstable_InstrumentationHandlerResult =
| { status: "success"; error: undefined }
| { status: "error"; error: Error };
// Shared
type InstrumentFunction<T> = (
handler: () => Promise<unstable_InstrumentationHandlerResult>,
info: T,
) => Promise<void>;
type InstrumentationInfo =
| RouteLazyInstrumentationInfo
| RouteHandlerInstrumentationInfo
| RouterNavigationInstrumentationInfo
| RouterFetchInstrumentationInfo
| RequestHandlerInstrumentationInfo;
type ReadonlyRequest = {
method: string;
url: string;
headers: Pick<Headers, "get">;
};
type ReadonlyContext = MiddlewareEnabled extends true
? Pick<RouterContextProvider, "get">
: Readonly<AppLoadContext>;
// Route Instrumentation
type InstrumentableRoute = {
id: string;
index: boolean | undefined;
path: string | undefined;
instrument(instrumentations: RouteInstrumentations): void;
};
type RouteInstrumentations = {
lazy?: InstrumentFunction<RouteLazyInstrumentationInfo>;
"lazy.loader"?: InstrumentFunction<RouteLazyInstrumentationInfo>;
"lazy.action"?: InstrumentFunction<RouteLazyInstrumentationInfo>;
"lazy.middleware"?: InstrumentFunction<RouteLazyInstrumentationInfo>;
middleware?: InstrumentFunction<RouteHandlerInstrumentationInfo>;
loader?: InstrumentFunction<RouteHandlerInstrumentationInfo>;
action?: InstrumentFunction<RouteHandlerInstrumentationInfo>;
};
type RouteLazyInstrumentationInfo = undefined;
type RouteHandlerInstrumentationInfo = Readonly<{
request: ReadonlyRequest;
params: LoaderFunctionArgs["params"];
unstable_pattern: string;
context: ReadonlyContext;
}>;
// Router Instrumentation
type InstrumentableRouter = {
instrument(instrumentations: RouterInstrumentations): void;
};
type RouterInstrumentations = {
navigate?: InstrumentFunction<RouterNavigationInstrumentationInfo>;
fetch?: InstrumentFunction<RouterFetchInstrumentationInfo>;
};
type RouterNavigationInstrumentationInfo = Readonly<{
to: string | number;
currentUrl: string;
formMethod?: HTMLFormMethod;
formEncType?: FormEncType;
formData?: FormData;
body?: any;
}>;
type RouterFetchInstrumentationInfo = Readonly<{
href: string;
currentUrl: string;
fetcherKey: string;
formMethod?: HTMLFormMethod;
formEncType?: FormEncType;
formData?: FormData;
body?: any;
}>;
// Request Handler Instrumentation
type InstrumentableRequestHandler = {
instrument(instrumentations: RequestHandlerInstrumentations): void;
};
type RequestHandlerInstrumentations = {
request?: InstrumentFunction<RequestHandlerInstrumentationInfo>;
};
type RequestHandlerInstrumentationInfo = Readonly<{
request: ReadonlyRequest;
context: ReadonlyContext | undefined;
}>;
const UninstrumentedSymbol = Symbol("Uninstrumented");
export function getRouteInstrumentationUpdates(
fns: unstable_InstrumentRouteFunction[],
route: Readonly<AgnosticDataRouteObject>,
) {
let aggregated: {
lazy: InstrumentFunction<RouteLazyInstrumentationInfo>[];
"lazy.loader": InstrumentFunction<RouteLazyInstrumentationInfo>[];
"lazy.action": InstrumentFunction<RouteLazyInstrumentationInfo>[];
"lazy.middleware": InstrumentFunction<RouteLazyInstrumentationInfo>[];
middleware: InstrumentFunction<RouteHandlerInstrumentationInfo>[];
loader: InstrumentFunction<RouteHandlerInstrumentationInfo>[];
action: InstrumentFunction<RouteHandlerInstrumentationInfo>[];
} = {
lazy: [],
"lazy.loader": [],
"lazy.action": [],
"lazy.middleware": [],
middleware: [],
loader: [],
action: [],
};
fns.forEach((fn) =>
fn({
id: route.id,
index: route.index,
path: route.path,
instrument(i) {
let keys = Object.keys(aggregated) as Array<keyof typeof aggregated>;
for (let key of keys) {
if (i[key]) {
aggregated[key].push(i[key] as any);
}
}
},
}),
);
let updates: {
middleware?: AgnosticDataRouteObject["middleware"];
loader?: AgnosticDataRouteObject["loader"];
action?: AgnosticDataRouteObject["action"];
lazy?: AgnosticDataRouteObject["lazy"];
} = {};
// Instrument lazy functions
if (typeof route.lazy === "function" && aggregated.lazy.length > 0) {
let instrumented = wrapImpl(aggregated.lazy, route.lazy, () => undefined);
if (instrumented) {
updates.lazy = instrumented as AgnosticDataRouteObject["lazy"];
}
}
// Instrument the lazy object format
if (typeof route.lazy === "object") {
let lazyObject: LazyRouteObject<AgnosticDataRouteObject> = route.lazy;
(["middleware", "loader", "action"] as const).forEach((key) => {
let lazyFn = lazyObject[key];
let instrumentations = aggregated[`lazy.${key}`];
if (typeof lazyFn === "function" && instrumentations.length > 0) {
let instrumented = wrapImpl(instrumentations, lazyFn, () => undefined);
if (instrumented) {
updates.lazy = Object.assign(updates.lazy || {}, {
[key]: instrumented,
});
}
}
});
}
// Instrument loader/action functions
(["loader", "action"] as const).forEach((key) => {
let handler = route[key];
if (typeof handler === "function" && aggregated[key].length > 0) {
// @ts-expect-error
let original = handler[UninstrumentedSymbol] ?? handler;
let instrumented = wrapImpl(aggregated[key], original, (...args) =>
getHandlerInfo(args[0] as LoaderFunctionArgs | ActionFunctionArgs),
);
if (instrumented) {
if (key === "loader" && original.hydrate === true) {
(instrumented as LoaderFunction).hydrate = true;
}
// @ts-expect-error
instrumented[UninstrumentedSymbol] = original;
updates[key] = instrumented;
}
}
});
// Instrument middleware functions
if (
route.middleware &&
route.middleware.length > 0 &&
aggregated.middleware.length > 0
) {
updates.middleware = route.middleware.map((middleware) => {
// @ts-expect-error
let original = middleware[UninstrumentedSymbol] ?? middleware;
let instrumented = wrapImpl(aggregated.middleware, original, (...args) =>
getHandlerInfo(args[0] as Parameters<MiddlewareFunction>[0]),
);
if (instrumented) {
// @ts-expect-error
instrumented[UninstrumentedSymbol] = original;
return instrumented;
}
return middleware;
});
}
return updates;
}
export function instrumentClientSideRouter(
router: Router,
fns: unstable_InstrumentRouterFunction[],
): Router {
let aggregated: {
navigate: InstrumentFunction<RouterNavigationInstrumentationInfo>[];
fetch: InstrumentFunction<RouterFetchInstrumentationInfo>[];
} = {
navigate: [],
fetch: [],
};
fns.forEach((fn) =>
fn({
instrument(i) {
let keys = Object.keys(i) as Array<keyof RouterInstrumentations>;
for (let key of keys) {
if (i[key]) {
aggregated[key].push(i[key] as any);
}
}
},
}),
);
if (aggregated.navigate.length > 0) {
// @ts-expect-error
let navigate = router.navigate[UninstrumentedSymbol] ?? router.navigate;
let instrumentedNavigate = wrapImpl(
aggregated.navigate,
navigate,
(...args) => {
let [to, opts] = args as Parameters<Router["navigate"]>;
return {
to:
typeof to === "number" || typeof to === "string"
? to
: to
? createPath(to)
: ".",
...getRouterInfo(router, opts ?? {}),
} satisfies RouterNavigationInstrumentationInfo;
},
) as Router["navigate"];
if (instrumentedNavigate) {
// @ts-expect-error
instrumentedNavigate[UninstrumentedSymbol] = navigate;
router.navigate = instrumentedNavigate;
}
}
if (aggregated.fetch.length > 0) {
// @ts-expect-error
let fetch = router.fetch[UninstrumentedSymbol] ?? router.fetch;
let instrumentedFetch = wrapImpl(aggregated.fetch, fetch, (...args) => {
let [key, , href, opts] = args as Parameters<Router["fetch"]>;
return {
href: href ?? ".",
fetcherKey: key,
...getRouterInfo(router, opts ?? {}),
} satisfies RouterFetchInstrumentationInfo;
}) as Router["fetch"];
if (instrumentedFetch) {
// @ts-expect-error
instrumentedFetch[UninstrumentedSymbol] = fetch;
router.fetch = instrumentedFetch;
}
}
return router;
}
export function instrumentHandler(
handler: RequestHandler,
fns: unstable_InstrumentRequestHandlerFunction[],
): RequestHandler {
let aggregated: {
request: InstrumentFunction<RequestHandlerInstrumentationInfo>[];
} = {
request: [],
};
fns.forEach((fn) =>
fn({
instrument(i) {
let keys = Object.keys(i) as Array<keyof typeof i>;
for (let key of keys) {
if (i[key]) {
aggregated[key].push(i[key] as any);
}
}
},
}),
);
let instrumentedHandler = handler;
if (aggregated.request.length > 0) {
instrumentedHandler = wrapImpl(aggregated.request, handler, (...args) => {
let [request, context] = args as Parameters<RequestHandler>;
return {
request: getReadonlyRequest(request),
context: context != null ? getReadonlyContext(context) : context,
} satisfies RequestHandlerInstrumentationInfo;
}) as RequestHandler;
}
return instrumentedHandler;
}
function wrapImpl<T extends InstrumentationInfo>(
impls: InstrumentFunction<T>[],
handler: (...args: any[]) => MaybePromise<any>,
getInfo: (...args: unknown[]) => T,
) {
if (impls.length === 0) {
return null;
}
return async (...args: unknown[]) => {
let result = await recurseRight(
impls,
getInfo(...args),
() => handler(...args),
impls.length - 1,
);
if (result.type === "error") {
throw result.value;
}
return result.value;
};
}
type RecurseResult = { type: "success" | "error"; value: unknown };
async function recurseRight<T extends InstrumentationInfo>(
impls: InstrumentFunction<T>[],
info: T,
handler: () => MaybePromise<void>,
index: number,
): Promise<RecurseResult> {
let impl = impls[index];
let result: RecurseResult | undefined;
if (!impl) {
try {
let value = await handler();
result = { type: "success", value };
} catch (e) {
result = { type: "error", value: e };
}
} else {
// If they forget to call the handler, or if they throw before calling the
// handler, we need to ensure the handlers still gets called
let handlerPromise: ReturnType<typeof recurseRight> | undefined = undefined;
let callHandler =
async (): Promise<unstable_InstrumentationHandlerResult> => {
if (handlerPromise) {
console.error("You cannot call instrumented handlers more than once");
} else {
handlerPromise = recurseRight(impls, info, handler, index - 1);
}
result = await handlerPromise;
invariant(result, "Expected a result");
if (result.type === "error" && result.value instanceof Error) {
return { status: "error", error: result.value };
}
return { status: "success", error: undefined };
};
try {
await impl(callHandler, info);
} catch (e) {
console.error("An instrumentation function threw an error:", e);
}
if (!handlerPromise) {
await callHandler();
}
// If the user forgot to await the handler, we can wait for it to resolve here
await handlerPromise;
}
if (result) {
return result;
}
return {
type: "error",
value: new Error("No result assigned in instrumentation chain."),
};
}
function getHandlerInfo(
args:
| LoaderFunctionArgs
| ActionFunctionArgs
| Parameters<MiddlewareFunction>[0],
): RouteHandlerInstrumentationInfo {
let { request, context, params, unstable_pattern } = args;
return {
request: getReadonlyRequest(request),
params: { ...params },
unstable_pattern,
context: getReadonlyContext(context),
};
}
function getRouterInfo(
router: Router,
opts: NonNullable<
Parameters<Router["navigate"]>[1] | Parameters<Router["fetch"]>[3]
>,
) {
return {
currentUrl: createPath(router.state.location),
...("formMethod" in opts ? { formMethod: opts.formMethod } : {}),
...("formEncType" in opts ? { formEncType: opts.formEncType } : {}),
...("formData" in opts ? { formData: opts.formData } : {}),
...("body" in opts ? { body: opts.body } : {}),
};
}
// Return a shallow readonly "clone" of the Request with the info they may
// want to read from during instrumentation
function getReadonlyRequest(request: Request): {
method: string;
url: string;
headers: Pick<Headers, "get">;
} {
return {
method: request.method,
url: request.url,
headers: {
get: (...args) => request.headers.get(...args),
},
};
}
function getReadonlyContext(
context: MiddlewareEnabled extends true
? RouterContextProvider
: AppLoadContext,
): MiddlewareEnabled extends true
? Pick<RouterContextProvider, "get">
: Readonly<AppLoadContext> {
if (isPlainObject(context)) {
let frozen = { ...context };
Object.freeze(frozen);
return frozen;
} else {
return {
get: <T>(ctx: RouterContext<T>) =>
(context as unknown as RouterContextProvider).get(ctx),
};
}
}
// From turbo-stream-v2/flatten.ts
const objectProtoNames = Object.getOwnPropertyNames(Object.prototype)
.sort()
.join("\0");
function isPlainObject(
thing: unknown,
): thing is Record<string | number | symbol, unknown> {
if (thing === null || typeof thing !== "object") {
return false;
}
const proto = Object.getPrototypeOf(thing);
return (
proto === Object.prototype ||
proto === null ||
Object.getOwnPropertyNames(proto).sort().join("\0") === objectProtoNames
);
} | typescript | github | https://github.com/remix-run/react-router | packages/react-router/lib/router/instrumentation.ts |
# frozen_string_literal: true
require "active_support/tagged_logging"
require "active_support/logger"
module ActiveJob
module Logging
extend ActiveSupport::Concern
included do
##
# Accepts a logger conforming to the interface of Log4r or the default
# Ruby +Logger+ class. You can retrieve this logger by calling +logger+ on
# either an Active Job job class or an Active Job job instance.
cattr_accessor :logger, default: ActiveSupport::TaggedLogging.new(ActiveSupport::Logger.new(STDOUT))
##
# Configures whether a job's arguments should be logged. This can be
# useful when a job's arguments may be sensitive and so should not be
# logged.
#
# The value defaults to +true+, but this can be configured with
# +config.active_job.log_arguments+. Additionally, individual jobs can
# also configure a value, which will apply to themselves and any
# subclasses.
class_attribute :log_arguments, instance_accessor: false, default: true
around_enqueue(prepend: true) { |_, block| tag_logger(&block) }
end
def perform_now # :nodoc:
tag_logger(self.class.name, self.job_id) { super }
end
private
def tag_logger(*tags, &block)
if logger.respond_to?(:tagged)
tags.unshift "ActiveJob" unless logger_tagged_by_active_job?
logger.tagged(*tags, &block)
else
yield
end
end
def logger_tagged_by_active_job?
logger.formatter.current_tags.include?("ActiveJob")
end
end
end | ruby | github | https://github.com/rails/rails | activejob/lib/active_job/logging.rb |
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
#ifndef OPENCV_UTILS_FILESYSTEM_HPP
#define OPENCV_UTILS_FILESYSTEM_HPP
namespace cv { namespace utils { namespace fs {
CV_EXPORTS bool exists(const cv::String& path);
CV_EXPORTS bool isDirectory(const cv::String& path);
CV_EXPORTS void remove_all(const cv::String& path);
CV_EXPORTS cv::String getcwd();
/** @brief Converts path p to a canonical absolute path
* Symlinks are processed if there is support for them on running platform.
*
* @param path input path. Target file/directory should exist.
*/
CV_EXPORTS cv::String canonical(const cv::String& path);
/** Join path components */
CV_EXPORTS cv::String join(const cv::String& base, const cv::String& path);
/** Get parent directory */
CV_EXPORTS cv::String getParent(const cv::String &path);
CV_EXPORTS std::wstring getParent(const std::wstring& path);
/**
* Generate a list of all files that match the globbing pattern.
*
* Result entries are prefixed by base directory path.
*
* @param directory base directory
* @param pattern filter pattern (based on '*'/'?' symbols). Use empty string to disable filtering and return all results
* @param[out] result result of globing.
* @param recursive scan nested directories too
* @param includeDirectories include directories into results list
*/
CV_EXPORTS void glob(const cv::String& directory, const cv::String& pattern,
CV_OUT std::vector<cv::String>& result,
bool recursive = false, bool includeDirectories = false);
/**
* Generate a list of all files that match the globbing pattern.
*
* @param directory base directory
* @param pattern filter pattern (based on '*'/'?' symbols). Use empty string to disable filtering and return all results
* @param[out] result globbing result with relative paths from base directory
* @param recursive scan nested directories too
* @param includeDirectories include directories into results list
*/
CV_EXPORTS void glob_relative(const cv::String& directory, const cv::String& pattern,
CV_OUT std::vector<cv::String>& result,
bool recursive = false, bool includeDirectories = false);
CV_EXPORTS bool createDirectory(const cv::String& path);
CV_EXPORTS bool createDirectories(const cv::String& path);
#if defined(__OPENCV_BUILD) || defined(BUILD_PLUGIN)
// TODO
//CV_EXPORTS cv::String getTempDirectory();
/**
* @brief Returns directory to store OpenCV cache files
* Create sub-directory in common OpenCV cache directory if it doesn't exist.
* @param sub_directory_name name of sub-directory. NULL or "" value asks to return root cache directory.
* @param configuration_name optional name of configuration parameter name which overrides default behavior.
* @return Path to cache directory. Returns empty string if cache directories support is not available. Returns "disabled" if cache disabled by user.
*/
CV_EXPORTS cv::String getCacheDirectory(const char* sub_directory_name, const char* configuration_name = NULL);
#endif
}}} // namespace
#endif // OPENCV_UTILS_FILESYSTEM_HPP | unknown | github | https://github.com/opencv/opencv | modules/core/include/opencv2/core/utils/filesystem.hpp |
#ifndef ITERATOR_H
#define ITERATOR_H
/*
* Generic constants related to iterators.
*/
/*
* The attempt to advance the iterator was successful; the iterator
* reflects the new current entry.
*/
#define ITER_OK 0
/*
* The iterator is exhausted.
*/
#define ITER_DONE -1
/*
* The iterator experienced an error. The iteration has been aborted
* and the iterator has been freed.
*/
#define ITER_ERROR -2
/*
* Return values for selector functions for merge iterators. The
* numerical values of these constants are important and must be
* compatible with ITER_DONE and ITER_ERROR.
*/
enum iterator_selection {
/* End the iteration without an error: */
ITER_SELECT_DONE = ITER_DONE,
/* Report an error and abort the iteration: */
ITER_SELECT_ERROR = ITER_ERROR,
/*
* The next group of constants are masks that are useful
* mainly internally.
*/
/* The LSB selects whether iter0/iter1 is the "current" iterator: */
ITER_CURRENT_SELECTION_MASK = 0x01,
/* iter0 is the "current" iterator this round: */
ITER_CURRENT_SELECTION_0 = 0x00,
/* iter1 is the "current" iterator this round: */
ITER_CURRENT_SELECTION_1 = 0x01,
/* Yield the value from the current iterator? */
ITER_YIELD_CURRENT = 0x02,
/* Discard the value from the secondary iterator? */
ITER_SKIP_SECONDARY = 0x04,
/*
* The constants that a selector function should usually
* return.
*/
/* Yield the value from iter0: */
ITER_SELECT_0 = ITER_CURRENT_SELECTION_0 | ITER_YIELD_CURRENT,
/* Yield the value from iter0 and discard the one from iter1: */
ITER_SELECT_0_SKIP_1 = ITER_SELECT_0 | ITER_SKIP_SECONDARY,
/* Discard the value from iter0 without yielding anything this round: */
ITER_SKIP_0 = ITER_CURRENT_SELECTION_1 | ITER_SKIP_SECONDARY,
/* Yield the value from iter1: */
ITER_SELECT_1 = ITER_CURRENT_SELECTION_1 | ITER_YIELD_CURRENT,
/* Yield the value from iter1 and discard the one from iter0: */
ITER_SELECT_1_SKIP_0 = ITER_SELECT_1 | ITER_SKIP_SECONDARY,
/* Discard the value from iter1 without yielding anything this round: */
ITER_SKIP_1 = ITER_CURRENT_SELECTION_0 | ITER_SKIP_SECONDARY
};
#endif /* ITERATOR_H */ | c | github | https://github.com/git/git | iterator.h |
"""
Make transformations/adjustments/reorganisations of the QOF data
"""
import os
import datetime
import json
import sys
import ffs
import re
from publish.lib.helpers import filename_for_resource, download_file
from publish.lib.upload import Uploader
DATA_DIR = None
def get_metadata_file(filename):
root = os.path.dirname(__file__)
f = os.path.join(root, os.path.pardir, os.path.pardir, "metadata", filename)
return os.path.abspath(f)
def add_metadata_to_qof_datasets():
u = Uploader("nshof")
f = os.path.join(DATA_DIR, "nhsof_metadata_indicators.json")
datasets = json.load(open(f))
for metadata in datasets:
metadata['tags'] = ['QOF', 'Quality Outcomes Framework']
title = metadata['title']
#metadata['frequency'] = 'yearly'
#metadata['title'] = 'QOF - National Quality Outcomes Framework - {0}-{1}'.format(match.group(1), match.group(2))
resources = []
for resource in metadata['sources']:
resource['format'] = resource['filetype']
resource['name'] = resource['url'].split('/')[-1]
resource['url_type'] = ''
filename = filename_for_resource(resource)
path = DATA_DIR / filename
download_file(resource['url'], path)
print "Uploading to S3"
url = u.upload(path)
resource['url'] = url
resources.append(resource)
metadata['resources'] = resources
u.close()
json.dump(datasets, open(os.path.join(DATA_DIR, "nhsof_metadata_indicators.json"), "w"))
return
def main(workspace):
global DATA_DIR
DATA_DIR = ffs.Path(workspace) / 'data'
add_metadata_to_qof_datasets()
return 0 | unknown | codeparrot/codeparrot-clean | ||
import numpy as np
import time
from cvxpy import *
from utils import *
np.random.seed(123)
num_iters = 10
# WARNING: DO NOT SET ABOVE 4 (basic cvxpy dies)
dim1 = 2
dim2 = 2
num_data = 1000
vec_perm = vecperm(dim1, dim2)
tgt_kron1, tgt_kron2 = generate_pd_krons(dim1, dim2)
rand_mat = np.kron(tgt_kron1, tgt_kron2)
# Generate data
print("Generating data...")
data = np.random.multivariate_normal(np.zeros(dim1*dim2), rand_mat, num_data)
print("Building covariance matrix from data...")
S_n = np.cov(data.T)
S_n_bar = vec_perm.T.dot(S_n.dot(vec_perm))
print("Solving KGlasso...")
s_time = time.time()
# Random initialization
init_kron1, init_kron2 = generate_pd_krons(dim1, dim2)
X = np.linalg.inv(init_kron1)
Y = np.linalg.inv(init_kron2)
for i in range(num_iters):
print("\n*** in iteration %d" % i)
B = block_sum(dim1, dim2, X, S_n)
print("*** solving for Y")
Y = basic_glasso(dim2, B)
A = block_sum(dim2, dim1, Y, S_n_bar)
print("*** solving for X")
X = basic_glasso(dim1, A)
kglasso_res = np.linalg.inv(np.kron(X, Y))
e_time = time.time()
kglasso_time = e_time - s_time
s_time = time.time()
print("Solving basic optimization...")
basic_res = basic_glasso(dim1*dim2, S_n, return_inv=True, verbose=True)
e_time = time.time()
basic_time = e_time - s_time
print("Basic optimization error: %.4f" % error_percentage(basic_res, rand_mat))
print("Basic optimization time: %.4f" % basic_time)
print("KGlasso optimization error: %.4f" % error_percentage(kglasso_res, rand_mat))
print("KGlasso optimization time: %.4f" % kglasso_time) | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4 coding=utf-8
import os, glob
from django.core.management.base import BaseCommand
from django.core.management import call_command
from django.utils.translation import ugettext_lazy
from settings import PROJECT_ROOT
import os
from django.core.serializers import serialize
from odk_logger.models import XForm, Instance, SurveyType
class Command(BaseCommand):
help = ugettext_lazy("Export ODK forms and instances to JSON.")
def handle(self, *args, **kwargs):
fixtures_dir = os.path.join(PROJECT_ROOT, "json_xform_fixtures")
if not os.path.exists(fixtures_dir):
os.mkdir(fixtures_dir)
xform_fp = os.path.join(fixtures_dir, "a-xforms.json")
instance_fp = os.path.join(fixtures_dir, "b-instances.json")
xfp = open(xform_fp, 'w')
xfp.write(serialize("json", XForm.objects.all()))
xfp.close()
ifp = open(instance_fp, 'w')
ifp.write(serialize("json", Instance.objects.all()))
ifp.close() | unknown | codeparrot/codeparrot-clean | ||
# Copyright 2017 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for sonnet.python.ops.nest.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
# Dependency imports
import numpy as np
import six
from sonnet.python.ops import nest
import tensorflow as tf
typekw = "class" if six.PY3 else "type"
class NestTest(tf.test.TestCase):
def testStringRepeat(self):
ab_tuple = collections.namedtuple("ab_tuple", "a, b")
inp_a = ab_tuple(a="foo", b=("bar", "baz"))
inp_b = ab_tuple(a=2, b=(1, 3))
out = nest.map(lambda string, repeats: string * repeats, inp_a, inp_b)
self.assertEqual(out.a, "foofoo")
self.assertEqual(out.b[0], "bar")
self.assertEqual(out.b[1], "bazbazbaz")
def testMapSingleCollection(self):
ab_tuple = collections.namedtuple("ab_tuple", "a, b")
nt = ab_tuple(a=("something", "something_else"),
b="yet another thing")
rev_nt = nest.map(lambda x: x[::-1], nt)
# Check the output is the correct structure, and all strings are reversed.
nest.assert_same_structure(nt, rev_nt)
self.assertEqual(nt.a[0][::-1], rev_nt.a[0])
self.assertEqual(nt.a[1][::-1], rev_nt.a[1])
self.assertEqual(nt.b[::-1], rev_nt.b)
def testMapOverTwoTuples(self):
inp_a = (tf.placeholder(tf.float32, shape=[3, 4]),
tf.placeholder(tf.float32, shape=[3, 7]))
inp_b = (tf.placeholder(tf.float32, shape=[3, 4]),
tf.placeholder(tf.float32, shape=[3, 7]))
output = nest.map(lambda x1, x2: x1 + x2, inp_a, inp_b)
nest.assert_same_structure(output, inp_a)
self.assertShapeEqual(np.zeros((3, 4)), output[0])
self.assertShapeEqual(np.zeros((3, 7)), output[1])
feed_dict = {
inp_a: (np.random.randn(3, 4), np.random.randn(3, 7)),
inp_b: (np.random.randn(3, 4), np.random.randn(3, 7))
}
with self.test_session() as sess:
output_np = sess.run(output, feed_dict=feed_dict)
self.assertAllClose(output_np[0],
feed_dict[inp_a][0] + feed_dict[inp_b][0])
self.assertAllClose(output_np[1],
feed_dict[inp_a][1] + feed_dict[inp_b][1])
def testStructureMustBeSame(self):
inp_a = (3, 4)
inp_b = (42, 42, 44)
err = "The two structures don't have the same number of elements."
with self.assertRaisesRegexp(ValueError, err):
nest.map(lambda a, b: a + b, inp_a, inp_b)
def testMultiNest(self):
inp_a = (3, (4, 5))
inp_b = (42, (42, 44))
output = nest.map(lambda a, b: a + b, inp_a, inp_b)
self.assertEqual((45, (46, 49)), output)
def testNoSequences(self):
with self.assertRaisesRegexp(ValueError,
"Must provide at least one structure"):
nest.map(lambda x: x)
def testEmptySequences(self):
f = lambda x: x + 1
empty_nt = collections.namedtuple("empty_nt", "")
self.assertEqual((), nest.map(f, ()))
self.assertEqual([], nest.map(f, []))
self.assertEqual(empty_nt(), nest.map(f, empty_nt()))
# This is checking actual equality of types, empty list != empty tuple
self.assertNotEqual((), nest.map(f, []))
def testFlattenAndPackIterable(self):
# A nice messy mix of tuples, lists, dicts, and `OrderedDict`s.
named_tuple = collections.namedtuple("A", ("b", "c"))
mess = [
"z",
named_tuple(3, 4),
{
"c": [
1,
collections.OrderedDict([
("b", 3),
("a", 2),
]),
],
"b": 5
},
17
]
flattened = nest.flatten_iterable(mess)
self.assertEqual(flattened, ["z", 3, 4, 5, 1, 2, 3, 17])
structure_of_mess = [
14,
named_tuple("a", True),
{
"c": [
0,
collections.OrderedDict([
("b", 9),
("a", 8),
]),
],
"b": 3
},
"hi everybody",
]
unflattened = nest.pack_iterable_as(structure_of_mess, flattened)
self.assertEqual(unflattened, mess)
def testFlattenIterable_numpyIsNotFlattened(self):
structure = np.array([1, 2, 3])
flattened = nest.flatten_iterable(structure)
self.assertEqual(len(flattened), 1)
def testFlattenIterable_stringIsNotFlattened(self):
structure = "lots of letters"
flattened = nest.flatten_iterable(structure)
self.assertEqual(len(flattened), 1)
def testFlatternIterable_scalarStructure(self):
# Tests can call flatten_iterable with single "scalar" object.
structure = "hello"
flattened = nest.flatten_iterable(structure)
unflattened = nest.pack_iterable_as("goodbye", flattened)
self.assertEqual(structure, unflattened)
def testPackIterableAs_notIterableError(self):
with self.assertRaisesRegexp(TypeError,
"flat_iterable must be an iterable"):
nest.pack_iterable_as("hi", "bye")
def testPackIterableAs_scalarStructureError(self):
with self.assertRaisesRegexp(
ValueError, r"Structure is a scalar but len\(flat_iterable\) == 2 > 1"):
nest.pack_iterable_as("hi", ["bye", "twice"])
def testPackIterableAs_wrongLengthsError(self):
with self.assertRaisesRegexp(
ValueError,
"Structure had 2 elements, but flat_iterable had 3 elements."):
nest.pack_iterable_as(["hello", "world"],
["and", "goodbye", "again"])
if __name__ == "__main__":
tf.test.main() | unknown | codeparrot/codeparrot-clean | ||
// Copyright IBM Corp. 2016, 2025
// SPDX-License-Identifier: BUSL-1.1
package token
import (
"fmt"
"io"
"os"
"strconv"
"strings"
"github.com/hashicorp/go-secure-stdlib/password"
"github.com/hashicorp/vault/api"
)
type CLIHandler struct {
// for tests
testStdin io.Reader
testStdout io.Writer
}
func (h *CLIHandler) Auth(c *api.Client, m map[string]string) (*api.Secret, error) {
// Parse "lookup" first - we want to return an early error if the user
// supplied an invalid value here before we prompt them for a token. It would
// be annoying to type your token and then be told you supplied an invalid
// value that we could have known in advance.
lookup := true
if x, ok := m["lookup"]; ok {
parsed, err := strconv.ParseBool(x)
if err != nil {
return nil, fmt.Errorf("Failed to parse \"lookup\" as boolean: %w", err)
}
lookup = parsed
}
// Parse the token.
token, ok := m["token"]
if !ok {
// Override the output
stdout := h.testStdout
if stdout == nil {
stdout = os.Stderr
}
// No arguments given, read the token from user input
fmt.Fprintf(stdout, "Token (will be hidden): ")
var err error
token, err = password.Read(os.Stdin)
fmt.Fprintf(stdout, "\n")
if err != nil {
if err == password.ErrInterrupted {
return nil, fmt.Errorf("user interrupted")
}
return nil, fmt.Errorf("An error occurred attempting to "+
"ask for a token. The raw error message is shown below, but usually "+
"this is because you attempted to pipe a value into the command or "+
"you are executing outside of a terminal (tty). If you want to pipe "+
"the value, pass \"-\" as the argument to read from stdin. The raw "+
"error was: %w", err)
}
}
// Remove any whitespace, etc.
token = strings.TrimSpace(token)
if token == "" {
return nil, fmt.Errorf(
"a token must be passed to auth, please view the help for more " +
"information")
}
// If the user declined verification, return now. Note that we will not have
// a lot of information about the token.
if !lookup {
return &api.Secret{
Auth: &api.SecretAuth{
ClientToken: token,
},
}, nil
}
// If we got this far, we want to lookup and lookup the token and pull it's
// list of policies an metadata.
c.SetToken(token)
c.SetWrappingLookupFunc(func(string, string) string { return "" })
secret, err := c.Auth().Token().LookupSelf()
if err != nil {
return nil, fmt.Errorf("error looking up token: %w", err)
}
if secret == nil {
return nil, fmt.Errorf("empty response from lookup-self")
}
// Return an auth struct that "looks" like the response from an auth method.
// lookup and lookup-self return their data in data, not auth. We try to
// mirror that data here.
id, err := secret.TokenID()
if err != nil {
return nil, fmt.Errorf("error accessing token ID: %w", err)
}
accessor, err := secret.TokenAccessor()
if err != nil {
return nil, fmt.Errorf("error accessing token accessor: %w", err)
}
// This populates secret.Auth
_, err = secret.TokenPolicies()
if err != nil {
return nil, fmt.Errorf("error accessing token policies: %w", err)
}
metadata, err := secret.TokenMetadata()
if err != nil {
return nil, fmt.Errorf("error accessing token metadata: %w", err)
}
dur, err := secret.TokenTTL()
if err != nil {
return nil, fmt.Errorf("error converting token TTL: %w", err)
}
renewable, err := secret.TokenIsRenewable()
if err != nil {
return nil, fmt.Errorf("error checking if token is renewable: %w", err)
}
return &api.Secret{
Auth: &api.SecretAuth{
ClientToken: id,
Accessor: accessor,
Policies: secret.Auth.Policies,
TokenPolicies: secret.Auth.TokenPolicies,
IdentityPolicies: secret.Auth.IdentityPolicies,
Metadata: metadata,
LeaseDuration: int(dur.Seconds()),
Renewable: renewable,
},
}, nil
}
func (h *CLIHandler) Help() string {
help := `
Usage: vault login TOKEN [CONFIG K=V...]
The token auth method allows logging in directly with a token. This
can be a token from the "token-create" command or API. There are no
configuration options for this auth method.
Authenticate using a token:
$ vault login 96ddf4bc-d217-f3ba-f9bd-017055595017
Authenticate but do not lookup information about the token:
$ vault login token=96ddf4bc-d217-f3ba-f9bd-017055595017 lookup=false
This token usually comes from a different source such as the API or via the
built-in "vault token create" command.
Configuration:
token=<string>
The token to use for authentication. This is usually provided directly
via the "vault login" command.
lookup=<bool>
Perform a lookup of the token's metadata and policies.
`
return strings.TrimSpace(help)
} | go | github | https://github.com/hashicorp/vault | builtin/credential/token/cli.go |
#!/usr/bin/env python3
# Copyright (C) 2017 Inria
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
import sys
from testrunner import run
PS_EXPECTED = (
('\tpid | name | state Q | pri | stack ( used) | '
'base addr | current | runtime | switches'),
('\t - | isr_stack | - - | - | \d+ ( -?\d+) | '
'0x\d+ | 0x\d+'),
('\t 1 | idle | pending Q | 15 | \d+ ( -?\d+) | '
'0x\d+ | 0x\d+ | \d+\.\d+% | \d+'),
('\t 2 | main | running Q | 7 | \d+ ( -?\d+) | '
'0x\d+ | 0x\d+ | \d+\.\d+% | \d+'),
('\t 3 | thread | bl rx _ | 6 | \d+ ( -?\d+) | '
'0x\d+ | 0x\d+ | \d+\.\d+% | \d+'),
('\t 4 | thread | bl rx _ | 6 | \d+ ( -?\d+) | '
'0x\d+ | 0x\d+ | \d+\.\d+% | \d+'),
('\t 5 | thread | bl rx _ | 6 | \d+ ( -?\d+) | '
'0x\d+ | 0x\d+ | \d+\.\d+% | \d+'),
('\t 6 | thread | bl mutex _ | 6 | \d+ ( -?\d+) | '
'0x\d+ | 0x\d+ | \d+\.\d+% | \d+'),
('\t 7 | thread | bl rx _ | 6 | \d+ ( -?\d+) | '
'0x\d+ | 0x\d+ | \d+\.\d+% | \d+'),
('\t | SUM | | | \d+ (\d+)')
)
def _check_startup(child):
for i in range(5):
child.expect_exact('Creating thread #{}, next={}'
.format(i, (i + 1) % 5))
def _check_help(child):
child.sendline('')
child.expect('>')
child.sendline('help')
child.expect_exact('Command Description')
child.expect_exact('---------------------------------------')
child.expect_exact('reboot Reboot the node')
child.expect_exact('ps Prints information about '
'running threads.')
def _check_ps(child):
child.sendline('ps')
for line in PS_EXPECTED:
child.expect(line)
def testfunc(child):
_check_startup(child)
_check_help(child)
_check_ps(child)
if __name__ == "__main__":
sys.exit(run(testfunc)) | unknown | codeparrot/codeparrot-clean | ||
{
"name": "angular.dev",
"version": "0.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "angular.dev",
"version": "0.0.0",
"dependencies": {
"@angular/common": "^19.0.0",
"@angular/compiler": "^19.0.0",
"@angular/core": "^19.0.0",
"@angular/platform-browser": "^19.0.0",
"rxjs": "~7.8.0",
"tslib": "^2.3.0",
"zone.js": "~0.15.0"
},
"devDependencies": {
"@angular/build": "^19.0.0",
"@angular/cli": "^19.0.0",
"@angular/compiler-cli": "^19.0.0",
"typescript": "~5.7.3"
}
},
"node_modules/@ampproject/remapping": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz",
"integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
"@jridgewell/gen-mapping": "^0.3.5",
"@jridgewell/trace-mapping": "^0.3.24"
},
"engines": {
"node": ">=6.0.0"
}
},
"node_modules/@angular-devkit/architect": {
"version": "0.1901.8",
"resolved": "https://registry.npmjs.org/@angular-devkit/architect/-/architect-0.1901.8.tgz",
"integrity": "sha512-DzvlL1Zg+zOnVmMN3CjE5KzjZAltRZwOwwcso72iWenBPvl/trKzPDlA6ySmpRonm+AR9i9JrdLEUlwczW6/bQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@angular-devkit/core": "19.1.8",
"rxjs": "7.8.1"
},
"engines": {
"node": "^18.19.1 || ^20.11.1 || >=22.0.0",
"npm": "^6.11.0 || ^7.5.6 || >=8.0.0",
"yarn": ">= 1.13.0"
}
},
"node_modules/@angular-devkit/core": {
"version": "19.1.8",
"resolved": "https://registry.npmjs.org/@angular-devkit/core/-/core-19.1.8.tgz",
"integrity": "sha512-j1zHKvOsGwu5YwAZGuzi835R9vcW7PkfxmSRIJeVl+vawgk31K3zFb4UPH8AY/NPWYqXIAnwpka3HC1+JrWLWA==",
"dev": true,
"license": "MIT",
"dependencies": {
"ajv": "8.17.1",
"ajv-formats": "3.0.1",
"jsonc-parser": "3.3.1",
"picomatch": "4.0.2",
"rxjs": "7.8.1",
"source-map": "0.7.4"
},
"engines": {
"node": "^18.19.1 || ^20.11.1 || >=22.0.0",
"npm": "^6.11.0 || ^7.5.6 || >=8.0.0",
"yarn": ">= 1.13.0"
},
"peerDependencies": {
"chokidar": "^4.0.0"
},
"peerDependenciesMeta": {
"chokidar": {
"optional": true
}
}
},
"node_modules/@angular-devkit/schematics": {
"version": "19.1.8",
"resolved": "https://registry.npmjs.org/@angular-devkit/schematics/-/schematics-19.1.8.tgz",
"integrity": "sha512-2JGUMD3zjfY8G4RYpypm2/1YEO+O4DtFycUvptIpsBYyULgnEbJ3tlp2oRiXI2vp9tC8IyWqa/swlA8DTI6ZYQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@angular-devkit/core": "19.1.8",
"jsonc-parser": "3.3.1",
"magic-string": "0.30.17",
"ora": "5.4.1",
"rxjs": "7.8.1"
},
"engines": {
"node": "^18.19.1 || ^20.11.1 || >=22.0.0",
"npm": "^6.11.0 || ^7.5.6 || >=8.0.0",
"yarn": ">= 1.13.0"
}
},
"node_modules/@angular/build": {
"version": "19.1.8",
"resolved": "https://registry.npmjs.org/@angular/build/-/build-19.1.8.tgz",
"integrity": "sha512-DAnnmbqPmtlY5JOitqWUgXi/yKj8eAcrP0T7hYZwLmcRsb+HsHYWsAQoFaTDw0p9WC5BKPqDBCMIivcuIV/izQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@ampproject/remapping": "2.3.0",
"@angular-devkit/architect": "0.1901.8",
"@angular-devkit/core": "19.1.8",
"@babel/core": "7.26.0",
"@babel/helper-annotate-as-pure": "7.25.9",
"@babel/helper-split-export-declaration": "7.24.7",
"@babel/plugin-syntax-import-attributes": "7.26.0",
"@inquirer/confirm": "5.1.1",
"@vitejs/plugin-basic-ssl": "1.2.0",
"beasties": "0.2.0",
"browserslist": "^4.23.0",
"esbuild": "0.24.2",
"fast-glob": "3.3.3",
"https-proxy-agent": "7.0.6",
"istanbul-lib-instrument": "6.0.3",
"listr2": "8.2.5",
"magic-string": "0.30.17",
"mrmime": "2.0.0",
"parse5-html-rewriting-stream": "7.0.0",
"picomatch": "4.0.2",
"piscina": "4.8.0",
"rollup": "4.30.1",
"sass": "1.83.1",
"semver": "7.6.3",
"vite": "6.0.11",
"watchpack": "2.4.2"
},
"engines": {
"node": "^18.19.1 || ^20.11.1 || >=22.0.0",
"npm": "^6.11.0 || ^7.5.6 || >=8.0.0",
"yarn": ">= 1.13.0"
},
"optionalDependencies": {
"lmdb": "3.2.2"
},
"peerDependencies": {
"@angular/compiler": "^19.0.0",
"@angular/compiler-cli": "^19.0.0",
"@angular/localize": "^19.0.0",
"@angular/platform-server": "^19.0.0",
"@angular/service-worker": "^19.0.0",
"@angular/ssr": "^19.1.8",
"less": "^4.2.0",
"ng-packagr": "^19.0.0",
"postcss": "^8.4.0",
"tailwindcss": "^2.0.0 || ^3.0.0 || ^4.0.0",
"typescript": ">=5.5 <5.8"
},
"peerDependenciesMeta": {
"@angular/localize": {
"optional": true
},
"@angular/platform-server": {
"optional": true
},
"@angular/service-worker": {
"optional": true
},
"@angular/ssr": {
"optional": true
},
"less": {
"optional": true
},
"ng-packagr": {
"optional": true
},
"postcss": {
"optional": true
},
"tailwindcss": {
"optional": true
}
}
},
"node_modules/@angular/cli": {
"version": "19.1.8",
"resolved": "https://registry.npmjs.org/@angular/cli/-/cli-19.1.8.tgz",
"integrity": "sha512-JmdLj8110DNWaxL03K7I06+nLyBfXgiIqYyrQx5QO9AodGkKHK5rE+7VD8MjZhUymua57HToD0oHaQgThJwTJQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@angular-devkit/architect": "0.1901.8",
"@angular-devkit/core": "19.1.8",
"@angular-devkit/schematics": "19.1.8",
"@inquirer/prompts": "7.2.1",
"@listr2/prompt-adapter-inquirer": "2.0.18",
"@schematics/angular": "19.1.8",
"@yarnpkg/lockfile": "1.1.0",
"ini": "5.0.0",
"jsonc-parser": "3.3.1",
"listr2": "8.2.5",
"npm-package-arg": "12.0.1",
"npm-pick-manifest": "10.0.0",
"pacote": "20.0.0",
"resolve": "1.22.10",
"semver": "7.6.3",
"symbol-observable": "4.0.0",
"yargs": "17.7.2"
},
"bin": {
"ng": "bin/ng.js"
},
"engines": {
"node": "^18.19.1 || ^20.11.1 || >=22.0.0",
"npm": "^6.11.0 || ^7.5.6 || >=8.0.0",
"yarn": ">= 1.13.0"
}
},
"node_modules/@angular/common": {
"version": "19.1.7",
"resolved": "https://registry.npmjs.org/@angular/common/-/common-19.1.7.tgz",
"integrity": "sha512-MXfUGfWeesTQ12HXgeoVIXsS+r1jZxT2FkLQtqS+NRsRD4T1vlyvD7kTI+Ku1NAjdt3mB8TJ0cZHubvmml8I+Q==",
"license": "MIT",
"dependencies": {
"tslib": "^2.3.0"
},
"engines": {
"node": "^18.19.1 || ^20.11.1 || >=22.0.0"
},
"peerDependencies": {
"@angular/core": "19.1.7",
"rxjs": "^6.5.3 || ^7.4.0"
}
},
"node_modules/@angular/compiler": {
"version": "19.1.7",
"resolved": "https://registry.npmjs.org/@angular/compiler/-/compiler-19.1.7.tgz",
"integrity": "sha512-Q3eqqIhMEzrnmFJtUO0K+WPpCfP/JTl9lJXZKe0zgNPdRFUufjSUcPHGzd7OjN2gPpiAvS1yBvENvqs+g/MejQ==",
"license": "MIT",
"dependencies": {
"tslib": "^2.3.0"
},
"engines": {
"node": "^18.19.1 || ^20.11.1 || >=22.0.0"
},
"peerDependencies": {
"@angular/core": "19.1.7"
},
"peerDependenciesMeta": {
"@angular/core": {
"optional": true
}
}
},
"node_modules/@angular/compiler-cli": {
"version": "19.1.7",
"resolved": "https://registry.npmjs.org/@angular/compiler-cli/-/compiler-cli-19.1.7.tgz",
"integrity": "sha512-Uu/TxfIcE1lStlCLmOPbghG1Y5o83odES89sr7bYNJz2mcG7TEonatf6GTOMzbJNil9FBJt6qnJkDkSjn4nUKw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/core": "7.26.0",
"@jridgewell/sourcemap-codec": "^1.4.14",
"chokidar": "^4.0.0",
"convert-source-map": "^1.5.1",
"reflect-metadata": "^0.2.0",
"semver": "^7.0.0",
"tslib": "^2.3.0",
"yargs": "^17.2.1"
},
"bin": {
"ng-xi18n": "bundles/src/bin/ng_xi18n.js",
"ngc": "bundles/src/bin/ngc.js",
"ngcc": "bundles/ngcc/index.js"
},
"engines": {
"node": "^18.19.1 || ^20.11.1 || >=22.0.0"
},
"peerDependencies": {
"@angular/compiler": "19.1.7",
"typescript": ">=5.5 <5.8"
}
},
"node_modules/@angular/core": {
"version": "19.1.7",
"resolved": "https://registry.npmjs.org/@angular/core/-/core-19.1.7.tgz",
"integrity": "sha512-P+e4ekJYWMFhWSzJav0R51bFAfUhIOmnqmG9mlI/ZONu2qcTTmyIG9AW5x1qhrMHEH42RaeK60RkKyqgcHaGDg==",
"license": "MIT",
"dependencies": {
"tslib": "^2.3.0"
},
"engines": {
"node": "^18.19.1 || ^20.11.1 || >=22.0.0"
},
"peerDependencies": {
"rxjs": "^6.5.3 || ^7.4.0",
"zone.js": "~0.15.0"
}
},
"node_modules/@angular/platform-browser": {
"version": "19.1.7",
"resolved": "https://registry.npmjs.org/@angular/platform-browser/-/platform-browser-19.1.7.tgz",
"integrity": "sha512-QKakWl+CeVVwn22yjRHBXm6BvDsHoo+9u1pJGGk2smKSYjHW6qAly28+P7FUfVXUQI7rg++M66JwzNOFfYMDQA==",
"license": "MIT",
"dependencies": {
"tslib": "^2.3.0"
},
"engines": {
"node": "^18.19.1 || ^20.11.1 || >=22.0.0"
},
"peerDependencies": {
"@angular/animations": "19.1.7",
"@angular/common": "19.1.7",
"@angular/core": "19.1.7"
},
"peerDependenciesMeta": {
"@angular/animations": {
"optional": true
}
}
},
"node_modules/@babel/code-frame": {
"version": "7.26.2",
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz",
"integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/helper-validator-identifier": "^7.25.9",
"js-tokens": "^4.0.0",
"picocolors": "^1.0.0"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/compat-data": {
"version": "7.26.8",
"resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.26.8.tgz",
"integrity": "sha512-oH5UPLMWR3L2wEFLnFJ1TZXqHufiTKAiLfqw5zkhS4dKXLJ10yVztfil/twG8EDTA4F/tvVNw9nOl4ZMslB8rQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/core": {
"version": "7.26.0",
"resolved": "https://registry.npmjs.org/@babel/core/-/core-7.26.0.tgz",
"integrity": "sha512-i1SLeK+DzNnQ3LL/CswPCa/E5u4lh1k6IAEphON8F+cXt0t9euTshDru0q7/IqMa1PMPz5RnHuHscF8/ZJsStg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@ampproject/remapping": "^2.2.0",
"@babel/code-frame": "^7.26.0",
"@babel/generator": "^7.26.0",
"@babel/helper-compilation-targets": "^7.25.9",
"@babel/helper-module-transforms": "^7.26.0",
"@babel/helpers": "^7.26.0",
"@babel/parser": "^7.26.0",
"@babel/template": "^7.25.9",
"@babel/traverse": "^7.25.9",
"@babel/types": "^7.26.0",
"convert-source-map": "^2.0.0",
"debug": "^4.1.0",
"gensync": "^1.0.0-beta.2",
"json5": "^2.2.3",
"semver": "^6.3.1"
},
"engines": {
"node": ">=6.9.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/babel"
}
},
"node_modules/@babel/core/node_modules/convert-source-map": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz",
"integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==",
"dev": true,
"license": "MIT"
},
"node_modules/@babel/core/node_modules/semver": {
"version": "6.3.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
"dev": true,
"license": "ISC",
"bin": {
"semver": "bin/semver.js"
}
},
"node_modules/@babel/generator": {
"version": "7.26.9",
"resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.26.9.tgz",
"integrity": "sha512-kEWdzjOAUMW4hAyrzJ0ZaTOu9OmpyDIQicIh0zg0EEcEkYXZb2TjtBhnHi2ViX7PKwZqF4xwqfAm299/QMP3lg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/parser": "^7.26.9",
"@babel/types": "^7.26.9",
"@jridgewell/gen-mapping": "^0.3.5",
"@jridgewell/trace-mapping": "^0.3.25",
"jsesc": "^3.0.2"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helper-annotate-as-pure": {
"version": "7.25.9",
"resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.25.9.tgz",
"integrity": "sha512-gv7320KBUFJz1RnylIg5WWYPRXKZ884AGkYpgpWW02TH66Dl+HaC1t1CKd0z3R4b6hdYEcmrNZHUmfCP+1u3/g==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/types": "^7.25.9"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helper-compilation-targets": {
"version": "7.26.5",
"resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.26.5.tgz",
"integrity": "sha512-IXuyn5EkouFJscIDuFF5EsiSolseme1s0CZB+QxVugqJLYmKdxI1VfIBOst0SUu4rnk2Z7kqTwmoO1lp3HIfnA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/compat-data": "^7.26.5",
"@babel/helper-validator-option": "^7.25.9",
"browserslist": "^4.24.0",
"lru-cache": "^5.1.1",
"semver": "^6.3.1"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helper-compilation-targets/node_modules/semver": {
"version": "6.3.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
"dev": true,
"license": "ISC",
"bin": {
"semver": "bin/semver.js"
}
},
"node_modules/@babel/helper-module-imports": {
"version": "7.25.9",
"resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.25.9.tgz",
"integrity": "sha512-tnUA4RsrmflIM6W6RFTLFSXITtl0wKjgpnLgXyowocVPrbYrLUXSBXDgTs8BlbmIzIdlBySRQjINYs2BAkiLtw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/traverse": "^7.25.9",
"@babel/types": "^7.25.9"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helper-module-transforms": {
"version": "7.26.0",
"resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.26.0.tgz",
"integrity": "sha512-xO+xu6B5K2czEnQye6BHA7DolFFmS3LB7stHZFaOLb1pAwO1HWLS8fXA+eh0A2yIvltPVmx3eNNDBJA2SLHXFw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/helper-module-imports": "^7.25.9",
"@babel/helper-validator-identifier": "^7.25.9",
"@babel/traverse": "^7.25.9"
},
"engines": {
"node": ">=6.9.0"
},
"peerDependencies": {
"@babel/core": "^7.0.0"
}
},
"node_modules/@babel/helper-plugin-utils": {
"version": "7.26.5",
"resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.26.5.tgz",
"integrity": "sha512-RS+jZcRdZdRFzMyr+wcsaqOmld1/EqTghfaBGQQd/WnRdzdlvSZ//kF7U8VQTxf1ynZ4cjUcYgjVGx13ewNPMg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helper-split-export-declaration": {
"version": "7.24.7",
"resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.24.7.tgz",
"integrity": "sha512-oy5V7pD+UvfkEATUKvIjvIAH/xCzfsFVw7ygW2SI6NClZzquT+mwdTfgfdbUiceh6iQO0CHtCPsyze/MZ2YbAA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/types": "^7.24.7"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helper-string-parser": {
"version": "7.25.9",
"resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz",
"integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helper-validator-identifier": {
"version": "7.25.9",
"resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz",
"integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helper-validator-option": {
"version": "7.25.9",
"resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.25.9.tgz",
"integrity": "sha512-e/zv1co8pp55dNdEcCynfj9X7nyUKUXoUEwfXqaZt0omVOmDe9oOTdKStH4GmAw6zxMFs50ZayuMfHDKlO7Tfw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helpers": {
"version": "7.26.9",
"resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.26.9.tgz",
"integrity": "sha512-Mz/4+y8udxBKdmzt/UjPACs4G3j5SshJJEFFKxlCGPydG4JAHXxjWjAwjd09tf6oINvl1VfMJo+nB7H2YKQ0dA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/template": "^7.26.9",
"@babel/types": "^7.26.9"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/parser": {
"version": "7.26.9",
"resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.26.9.tgz",
"integrity": "sha512-81NWa1njQblgZbQHxWHpxxCzNsa3ZwvFqpUg7P+NNUU6f3UU2jBEg4OlF/J6rl8+PQGh1q6/zWScd001YwcA5A==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/types": "^7.26.9"
},
"bin": {
"parser": "bin/babel-parser.js"
},
"engines": {
"node": ">=6.0.0"
}
},
"node_modules/@babel/plugin-syntax-import-attributes": {
"version": "7.26.0",
"resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.26.0.tgz",
"integrity": "sha512-e2dttdsJ1ZTpi3B9UYGLw41hifAubg19AtCu/2I/F1QNVclOBr1dYpTdmdyZ84Xiz43BS/tCUkMAZNLv12Pi+A==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/helper-plugin-utils": "^7.25.9"
},
"engines": {
"node": ">=6.9.0"
},
"peerDependencies": {
"@babel/core": "^7.0.0-0"
}
},
"node_modules/@babel/template": {
"version": "7.26.9",
"resolved": "https://registry.npmjs.org/@babel/template/-/template-7.26.9.tgz",
"integrity": "sha512-qyRplbeIpNZhmzOysF/wFMuP9sctmh2cFzRAZOn1YapxBsE1i9bJIY586R/WBLfLcmcBlM8ROBiQURnnNy+zfA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/code-frame": "^7.26.2",
"@babel/parser": "^7.26.9",
"@babel/types": "^7.26.9"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/traverse": {
"version": "7.26.9",
"resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.26.9.tgz",
"integrity": "sha512-ZYW7L+pL8ahU5fXmNbPF+iZFHCv5scFak7MZ9bwaRPLUhHh7QQEMjZUg0HevihoqCM5iSYHN61EyCoZvqC+bxg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/code-frame": "^7.26.2",
"@babel/generator": "^7.26.9",
"@babel/parser": "^7.26.9",
"@babel/template": "^7.26.9",
"@babel/types": "^7.26.9",
"debug": "^4.3.1",
"globals": "^11.1.0"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/types": {
"version": "7.26.9",
"resolved": "https://registry.npmjs.org/@babel/types/-/types-7.26.9.tgz",
"integrity": "sha512-Y3IR1cRnOxOCDvMmNiym7XpXQ93iGDDPHx+Zj+NM+rg0fBaShfQLkg+hKPaZCEvg5N/LeCo4+Rj/i3FuJsIQaw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/helper-string-parser": "^7.25.9",
"@babel/helper-validator-identifier": "^7.25.9"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@esbuild/aix-ppc64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.24.2.tgz",
"integrity": "sha512-thpVCb/rhxE/BnMLQ7GReQLLN8q9qbHmI55F4489/ByVg2aQaQ6kbcLb6FHkocZzQhxc4gx0sCk0tJkKBFzDhA==",
"cpu": [
"ppc64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"aix"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/android-arm": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.24.2.tgz",
"integrity": "sha512-tmwl4hJkCfNHwFB3nBa8z1Uy3ypZpxqxfTQOcHX+xRByyYgunVbZ9MzUUfb0RxaHIMnbHagwAxuTL+tnNM+1/Q==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/android-arm64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.24.2.tgz",
"integrity": "sha512-cNLgeqCqV8WxfcTIOeL4OAtSmL8JjcN6m09XIgro1Wi7cF4t/THaWEa7eL5CMoMBdjoHOTh/vwTO/o2TRXIyzg==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/android-x64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.24.2.tgz",
"integrity": "sha512-B6Q0YQDqMx9D7rvIcsXfmJfvUYLoP722bgfBlO5cGvNVb5V/+Y7nhBE3mHV9OpxBf4eAS2S68KZztiPaWq4XYw==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/darwin-arm64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.24.2.tgz",
"integrity": "sha512-kj3AnYWc+CekmZnS5IPu9D+HWtUI49hbnyqk0FLEJDbzCIQt7hg7ucF1SQAilhtYpIujfaHr6O0UHlzzSPdOeA==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/darwin-x64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.24.2.tgz",
"integrity": "sha512-WeSrmwwHaPkNR5H3yYfowhZcbriGqooyu3zI/3GGpF8AyUdsrrP0X6KumITGA9WOyiJavnGZUwPGvxvwfWPHIA==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/freebsd-arm64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.24.2.tgz",
"integrity": "sha512-UN8HXjtJ0k/Mj6a9+5u6+2eZ2ERD7Edt1Q9IZiB5UZAIdPnVKDoG7mdTVGhHJIeEml60JteamR3qhsr1r8gXvg==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"freebsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/freebsd-x64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.24.2.tgz",
"integrity": "sha512-TvW7wE/89PYW+IevEJXZ5sF6gJRDY/14hyIGFXdIucxCsbRmLUcjseQu1SyTko+2idmCw94TgyaEZi9HUSOe3Q==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"freebsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-arm": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.24.2.tgz",
"integrity": "sha512-n0WRM/gWIdU29J57hJyUdIsk0WarGd6To0s+Y+LwvlC55wt+GT/OgkwoXCXvIue1i1sSNWblHEig00GBWiJgfA==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-arm64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.24.2.tgz",
"integrity": "sha512-7HnAD6074BW43YvvUmE/35Id9/NB7BeX5EoNkK9obndmZBUk8xmJJeU7DwmUeN7tkysslb2eSl6CTrYz6oEMQg==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-ia32": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.24.2.tgz",
"integrity": "sha512-sfv0tGPQhcZOgTKO3oBE9xpHuUqguHvSo4jl+wjnKwFpapx+vUDcawbwPNuBIAYdRAvIDBfZVvXprIj3HA+Ugw==",
"cpu": [
"ia32"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-loong64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.24.2.tgz",
"integrity": "sha512-CN9AZr8kEndGooS35ntToZLTQLHEjtVB5n7dl8ZcTZMonJ7CCfStrYhrzF97eAecqVbVJ7APOEe18RPI4KLhwQ==",
"cpu": [
"loong64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-mips64el": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.24.2.tgz",
"integrity": "sha512-iMkk7qr/wl3exJATwkISxI7kTcmHKE+BlymIAbHO8xanq/TjHaaVThFF6ipWzPHryoFsesNQJPE/3wFJw4+huw==",
"cpu": [
"mips64el"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-ppc64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.24.2.tgz",
"integrity": "sha512-shsVrgCZ57Vr2L8mm39kO5PPIb+843FStGt7sGGoqiiWYconSxwTiuswC1VJZLCjNiMLAMh34jg4VSEQb+iEbw==",
"cpu": [
"ppc64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-riscv64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.24.2.tgz",
"integrity": "sha512-4eSFWnU9Hhd68fW16GD0TINewo1L6dRrB+oLNNbYyMUAeOD2yCK5KXGK1GH4qD/kT+bTEXjsyTCiJGHPZ3eM9Q==",
"cpu": [
"riscv64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-s390x": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.24.2.tgz",
"integrity": "sha512-S0Bh0A53b0YHL2XEXC20bHLuGMOhFDO6GN4b3YjRLK//Ep3ql3erpNcPlEFed93hsQAjAQDNsvcK+hV90FubSw==",
"cpu": [
"s390x"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-x64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.24.2.tgz",
"integrity": "sha512-8Qi4nQcCTbLnK9WoMjdC9NiTG6/E38RNICU6sUNqK0QFxCYgoARqVqxdFmWkdonVsvGqWhmm7MO0jyTqLqwj0Q==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/netbsd-arm64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.24.2.tgz",
"integrity": "sha512-wuLK/VztRRpMt9zyHSazyCVdCXlpHkKm34WUyinD2lzK07FAHTq0KQvZZlXikNWkDGoT6x3TD51jKQ7gMVpopw==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"netbsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/netbsd-x64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.24.2.tgz",
"integrity": "sha512-VefFaQUc4FMmJuAxmIHgUmfNiLXY438XrL4GDNV1Y1H/RW3qow68xTwjZKfj/+Plp9NANmzbH5R40Meudu8mmw==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"netbsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/openbsd-arm64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.24.2.tgz",
"integrity": "sha512-YQbi46SBct6iKnszhSvdluqDmxCJA+Pu280Av9WICNwQmMxV7nLRHZfjQzwbPs3jeWnuAhE9Jy0NrnJ12Oz+0A==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"openbsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/openbsd-x64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.24.2.tgz",
"integrity": "sha512-+iDS6zpNM6EnJyWv0bMGLWSWeXGN/HTaF/LXHXHwejGsVi+ooqDfMCCTerNFxEkM3wYVcExkeGXNqshc9iMaOA==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"openbsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/sunos-x64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.24.2.tgz",
"integrity": "sha512-hTdsW27jcktEvpwNHJU4ZwWFGkz2zRJUz8pvddmXPtXDzVKTTINmlmga3ZzwcuMpUvLw7JkLy9QLKyGpD2Yxig==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"sunos"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/win32-arm64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.24.2.tgz",
"integrity": "sha512-LihEQ2BBKVFLOC9ZItT9iFprsE9tqjDjnbulhHoFxYQtQfai7qfluVODIYxt1PgdoyQkz23+01rzwNwYfutxUQ==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/win32-ia32": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.24.2.tgz",
"integrity": "sha512-q+iGUwfs8tncmFC9pcnD5IvRHAzmbwQ3GPS5/ceCyHdjXubwQWI12MKWSNSMYLJMq23/IUCvJMS76PDqXe1fxA==",
"cpu": [
"ia32"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/win32-x64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.24.2.tgz",
"integrity": "sha512-7VTgWzgMGvup6aSqDPLiW5zHaxYJGTO4OokMjIlrCtf+VpEL+cXKtCvg723iguPYI5oaUNdS+/V7OU2gvXVWEg==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@inquirer/checkbox": {
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/@inquirer/checkbox/-/checkbox-4.1.2.tgz",
"integrity": "sha512-PL9ixC5YsPXzXhAZFUPmkXGxfgjkdfZdPEPPmt4kFwQ4LBMDG9n/nHXYRGGZSKZJs+d1sGKWgS2GiPzVRKUdtQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@inquirer/core": "^10.1.7",
"@inquirer/figures": "^1.0.10",
"@inquirer/type": "^3.0.4",
"ansi-escapes": "^4.3.2",
"yoctocolors-cjs": "^2.1.2"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"@types/node": ">=18"
},
"peerDependenciesMeta": {
"@types/node": {
"optional": true
}
}
},
"node_modules/@inquirer/confirm": {
"version": "5.1.1",
"resolved": "https://registry.npmjs.org/@inquirer/confirm/-/confirm-5.1.1.tgz",
"integrity": "sha512-vVLSbGci+IKQvDOtzpPTCOiEJCNidHcAq9JYVoWTW0svb5FiwSLotkM+JXNXejfjnzVYV9n0DTBythl9+XgTxg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@inquirer/core": "^10.1.2",
"@inquirer/type": "^3.0.2"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"@types/node": ">=18"
}
},
"node_modules/@inquirer/core": {
"version": "10.1.7",
"resolved": "https://registry.npmjs.org/@inquirer/core/-/core-10.1.7.tgz",
"integrity": "sha512-AA9CQhlrt6ZgiSy6qoAigiA1izOa751ugX6ioSjqgJ+/Gd+tEN/TORk5sUYNjXuHWfW0r1n/a6ak4u/NqHHrtA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@inquirer/figures": "^1.0.10",
"@inquirer/type": "^3.0.4",
"ansi-escapes": "^4.3.2",
"cli-width": "^4.1.0",
"mute-stream": "^2.0.0",
"signal-exit": "^4.1.0",
"wrap-ansi": "^6.2.0",
"yoctocolors-cjs": "^2.1.2"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"@types/node": ">=18"
},
"peerDependenciesMeta": {
"@types/node": {
"optional": true
}
}
},
"node_modules/@inquirer/editor": {
"version": "4.2.7",
"resolved": "https://registry.npmjs.org/@inquirer/editor/-/editor-4.2.7.tgz",
"integrity": "sha512-gktCSQtnSZHaBytkJKMKEuswSk2cDBuXX5rxGFv306mwHfBPjg5UAldw9zWGoEyvA9KpRDkeM4jfrx0rXn0GyA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@inquirer/core": "^10.1.7",
"@inquirer/type": "^3.0.4",
"external-editor": "^3.1.0"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"@types/node": ">=18"
},
"peerDependenciesMeta": {
"@types/node": {
"optional": true
}
}
},
"node_modules/@inquirer/expand": {
"version": "4.0.9",
"resolved": "https://registry.npmjs.org/@inquirer/expand/-/expand-4.0.9.tgz",
"integrity": "sha512-Xxt6nhomWTAmuSX61kVgglLjMEFGa+7+F6UUtdEUeg7fg4r9vaFttUUKrtkViYYrQBA5Ia1tkOJj2koP9BuLig==",
"dev": true,
"license": "MIT",
"dependencies": {
"@inquirer/core": "^10.1.7",
"@inquirer/type": "^3.0.4",
"yoctocolors-cjs": "^2.1.2"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"@types/node": ">=18"
},
"peerDependenciesMeta": {
"@types/node": {
"optional": true
}
}
},
"node_modules/@inquirer/figures": {
"version": "1.0.10",
"resolved": "https://registry.npmjs.org/@inquirer/figures/-/figures-1.0.10.tgz",
"integrity": "sha512-Ey6176gZmeqZuY/W/nZiUyvmb1/qInjcpiZjXWi6nON+nxJpD1bxtSoBxNliGISae32n6OwbY+TSXPZ1CfS4bw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=18"
}
},
"node_modules/@inquirer/input": {
"version": "4.1.6",
"resolved": "https://registry.npmjs.org/@inquirer/input/-/input-4.1.6.tgz",
"integrity": "sha512-1f5AIsZuVjPT4ecA8AwaxDFNHny/tSershP/cTvTDxLdiIGTeILNcKozB0LaYt6mojJLUbOYhpIxicaYf7UKIQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@inquirer/core": "^10.1.7",
"@inquirer/type": "^3.0.4"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"@types/node": ">=18"
},
"peerDependenciesMeta": {
"@types/node": {
"optional": true
}
}
},
"node_modules/@inquirer/number": {
"version": "3.0.9",
"resolved": "https://registry.npmjs.org/@inquirer/number/-/number-3.0.9.tgz",
"integrity": "sha512-iN2xZvH3tyIYXLXBvlVh0npk1q/aVuKXZo5hj+K3W3D4ngAEq/DkLpofRzx6oebTUhBvOgryZ+rMV0yImKnG3w==",
"dev": true,
"license": "MIT",
"dependencies": {
"@inquirer/core": "^10.1.7",
"@inquirer/type": "^3.0.4"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"@types/node": ">=18"
},
"peerDependenciesMeta": {
"@types/node": {
"optional": true
}
}
},
"node_modules/@inquirer/password": {
"version": "4.0.9",
"resolved": "https://registry.npmjs.org/@inquirer/password/-/password-4.0.9.tgz",
"integrity": "sha512-xBEoOw1XKb0rIN208YU7wM7oJEHhIYkfG7LpTJAEW913GZeaoQerzf5U/LSHI45EVvjAdgNXmXgH51cUXKZcJQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@inquirer/core": "^10.1.7",
"@inquirer/type": "^3.0.4",
"ansi-escapes": "^4.3.2"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"@types/node": ">=18"
},
"peerDependenciesMeta": {
"@types/node": {
"optional": true
}
}
},
"node_modules/@inquirer/prompts": {
"version": "7.2.1",
"resolved": "https://registry.npmjs.org/@inquirer/prompts/-/prompts-7.2.1.tgz",
"integrity": "sha512-v2JSGri6/HXSfoGIwuKEn8sNCQK6nsB2BNpy2lSX6QH9bsECrMv93QHnj5+f+1ZWpF/VNioIV2B/PDox8EvGuQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@inquirer/checkbox": "^4.0.4",
"@inquirer/confirm": "^5.1.1",
"@inquirer/editor": "^4.2.1",
"@inquirer/expand": "^4.0.4",
"@inquirer/input": "^4.1.1",
"@inquirer/number": "^3.0.4",
"@inquirer/password": "^4.0.4",
"@inquirer/rawlist": "^4.0.4",
"@inquirer/search": "^3.0.4",
"@inquirer/select": "^4.0.4"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"@types/node": ">=18"
}
},
"node_modules/@inquirer/rawlist": {
"version": "4.0.9",
"resolved": "https://registry.npmjs.org/@inquirer/rawlist/-/rawlist-4.0.9.tgz",
"integrity": "sha512-+5t6ebehKqgoxV8fXwE49HkSF2Rc9ijNiVGEQZwvbMI61/Q5RcD+jWD6Gs1tKdz5lkI8GRBL31iO0HjGK1bv+A==",
"dev": true,
"license": "MIT",
"dependencies": {
"@inquirer/core": "^10.1.7",
"@inquirer/type": "^3.0.4",
"yoctocolors-cjs": "^2.1.2"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"@types/node": ">=18"
},
"peerDependenciesMeta": {
"@types/node": {
"optional": true
}
}
},
"node_modules/@inquirer/search": {
"version": "3.0.9",
"resolved": "https://registry.npmjs.org/@inquirer/search/-/search-3.0.9.tgz",
"integrity": "sha512-DWmKztkYo9CvldGBaRMr0ETUHgR86zE6sPDVOHsqz4ISe9o1LuiWfgJk+2r75acFclA93J/lqzhT0dTjCzHuoA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@inquirer/core": "^10.1.7",
"@inquirer/figures": "^1.0.10",
"@inquirer/type": "^3.0.4",
"yoctocolors-cjs": "^2.1.2"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"@types/node": ">=18"
},
"peerDependenciesMeta": {
"@types/node": {
"optional": true
}
}
},
"node_modules/@inquirer/select": {
"version": "4.0.9",
"resolved": "https://registry.npmjs.org/@inquirer/select/-/select-4.0.9.tgz",
"integrity": "sha512-BpJyJe7Dkhv2kz7yG7bPSbJLQuu/rqyNlF1CfiiFeFwouegfH+zh13KDyt6+d9DwucKo7hqM3wKLLyJxZMO+Xg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@inquirer/core": "^10.1.7",
"@inquirer/figures": "^1.0.10",
"@inquirer/type": "^3.0.4",
"ansi-escapes": "^4.3.2",
"yoctocolors-cjs": "^2.1.2"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"@types/node": ">=18"
},
"peerDependenciesMeta": {
"@types/node": {
"optional": true
}
}
},
"node_modules/@inquirer/type": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/@inquirer/type/-/type-3.0.4.tgz",
"integrity": "sha512-2MNFrDY8jkFYc9Il9DgLsHhMzuHnOYM1+CUYVWbzu9oT0hC7V7EcYvdCKeoll/Fcci04A+ERZ9wcc7cQ8lTkIA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=18"
},
"peerDependencies": {
"@types/node": ">=18"
},
"peerDependenciesMeta": {
"@types/node": {
"optional": true
}
}
},
"node_modules/@isaacs/cliui": {
"version": "8.0.2",
"resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz",
"integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==",
"dev": true,
"license": "ISC",
"dependencies": {
"string-width": "^5.1.2",
"string-width-cjs": "npm:string-width@^4.2.0",
"strip-ansi": "^7.0.1",
"strip-ansi-cjs": "npm:strip-ansi@^6.0.1",
"wrap-ansi": "^8.1.0",
"wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0"
},
"engines": {
"node": ">=12"
}
},
"node_modules/@isaacs/cliui/node_modules/ansi-styles": {
"version": "6.2.1",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz",
"integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
}
},
"node_modules/@isaacs/cliui/node_modules/emoji-regex": {
"version": "9.2.2",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz",
"integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==",
"dev": true,
"license": "MIT"
},
"node_modules/@isaacs/cliui/node_modules/string-width": {
"version": "5.1.2",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz",
"integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==",
"dev": true,
"license": "MIT",
"dependencies": {
"eastasianwidth": "^0.2.0",
"emoji-regex": "^9.2.2",
"strip-ansi": "^7.0.1"
},
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/@isaacs/cliui/node_modules/wrap-ansi": {
"version": "8.1.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz",
"integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-styles": "^6.1.0",
"string-width": "^5.0.1",
"strip-ansi": "^7.0.1"
},
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
}
},
"node_modules/@isaacs/fs-minipass": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz",
"integrity": "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w==",
"dev": true,
"license": "ISC",
"dependencies": {
"minipass": "^7.0.4"
},
"engines": {
"node": ">=18.0.0"
}
},
"node_modules/@istanbuljs/schema": {
"version": "0.1.3",
"resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz",
"integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
}
},
"node_modules/@jridgewell/gen-mapping": {
"version": "0.3.8",
"resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.8.tgz",
"integrity": "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@jridgewell/set-array": "^1.2.1",
"@jridgewell/sourcemap-codec": "^1.4.10",
"@jridgewell/trace-mapping": "^0.3.24"
},
"engines": {
"node": ">=6.0.0"
}
},
"node_modules/@jridgewell/resolve-uri": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz",
"integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6.0.0"
}
},
"node_modules/@jridgewell/set-array": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz",
"integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6.0.0"
}
},
"node_modules/@jridgewell/sourcemap-codec": {
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz",
"integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==",
"dev": true,
"license": "MIT"
},
"node_modules/@jridgewell/trace-mapping": {
"version": "0.3.25",
"resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz",
"integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@jridgewell/resolve-uri": "^3.1.0",
"@jridgewell/sourcemap-codec": "^1.4.14"
}
},
"node_modules/@listr2/prompt-adapter-inquirer": {
"version": "2.0.18",
"resolved": "https://registry.npmjs.org/@listr2/prompt-adapter-inquirer/-/prompt-adapter-inquirer-2.0.18.tgz",
"integrity": "sha512-0hz44rAcrphyXcA8IS7EJ2SCoaBZD2u5goE8S/e+q/DL+dOGpqpcLidVOFeLG3VgML62SXmfRLAhWt0zL1oW4Q==",
"dev": true,
"license": "MIT",
"dependencies": {
"@inquirer/type": "^1.5.5"
},
"engines": {
"node": ">=18.0.0"
},
"peerDependencies": {
"@inquirer/prompts": ">= 3 < 8"
}
},
"node_modules/@listr2/prompt-adapter-inquirer/node_modules/@inquirer/type": {
"version": "1.5.5",
"resolved": "https://registry.npmjs.org/@inquirer/type/-/type-1.5.5.tgz",
"integrity": "sha512-MzICLu4yS7V8AA61sANROZ9vT1H3ooca5dSmI1FjZkzq7o/koMsRfQSzRtFo+F3Ao4Sf1C0bpLKejpKB/+j6MA==",
"dev": true,
"license": "MIT",
"dependencies": {
"mute-stream": "^1.0.0"
},
"engines": {
"node": ">=18"
}
},
"node_modules/@listr2/prompt-adapter-inquirer/node_modules/mute-stream": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-1.0.0.tgz",
"integrity": "sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA==",
"dev": true,
"license": "ISC",
"engines": {
"node": "^14.17.0 || ^16.13.0 || >=18.0.0"
}
},
"node_modules/@lmdb/lmdb-darwin-arm64": {
"version": "3.2.2",
"resolved": "https://registry.npmjs.org/@lmdb/lmdb-darwin-arm64/-/lmdb-darwin-arm64-3.2.2.tgz",
"integrity": "sha512-WBSJT9Z7DTol5viq+DZD2TapeWOw7mlwXxiSBHgAzqVwsaVb0h/ekMD9iu/jDD8MUA20tO9N0WEdnT06fsUp+g==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
]
},
"node_modules/@lmdb/lmdb-darwin-x64": {
"version": "3.2.2",
"resolved": "https://registry.npmjs.org/@lmdb/lmdb-darwin-x64/-/lmdb-darwin-x64-3.2.2.tgz",
"integrity": "sha512-4S13kUtR7c/j/MzkTIBJCXv52hQ41LG2ukeaqw4Eng9K0pNKLFjo1sDSz96/yKhwykxrWDb13ddJ/ZqD3rAhUA==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
]
},
"node_modules/@lmdb/lmdb-linux-arm": {
"version": "3.2.2",
"resolved": "https://registry.npmjs.org/@lmdb/lmdb-linux-arm/-/lmdb-linux-arm-3.2.2.tgz",
"integrity": "sha512-uW31JmfuPAaLUYW7NsEU8gzwgDAzpGPwjvkxnKlcWd8iDutoPKDJi8Wk9lFmPEZRxVSB0j1/wDQ7N2qliR9UFA==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@lmdb/lmdb-linux-arm64": {
"version": "3.2.2",
"resolved": "https://registry.npmjs.org/@lmdb/lmdb-linux-arm64/-/lmdb-linux-arm64-3.2.2.tgz",
"integrity": "sha512-4hdgZtWI1idQlWRp+eleWXD9KLvObgboRaVoBj2POdPEYvsKANllvMW0El8tEQwtw74yB9NT6P8ENBB5UJf5+g==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@lmdb/lmdb-linux-x64": {
"version": "3.2.2",
"resolved": "https://registry.npmjs.org/@lmdb/lmdb-linux-x64/-/lmdb-linux-x64-3.2.2.tgz",
"integrity": "sha512-A0zjf4a2vM4B4GAx78ncuOTZ8Ka1DbTaG1Axf1e00Sa7f5coqlWiLg1PX7Gxvyibc2YqtqB+8tg1KKrE8guZVw==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@lmdb/lmdb-win32-x64": {
"version": "3.2.2",
"resolved": "https://registry.npmjs.org/@lmdb/lmdb-win32-x64/-/lmdb-win32-x64-3.2.2.tgz",
"integrity": "sha512-Y0qoSCAja+xZE7QQ0LCHoYAuyI1n9ZqukQJa8lv9X3yCvWahFF7OYHAgVH1ejp43XWstj3U89/PAAzcowgF/uQ==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
]
},
"node_modules/@msgpackr-extract/msgpackr-extract-darwin-arm64": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-darwin-arm64/-/msgpackr-extract-darwin-arm64-3.0.3.tgz",
"integrity": "sha512-QZHtlVgbAdy2zAqNA9Gu1UpIuI8Xvsd1v8ic6B2pZmeFnFcMWiPLfWXh7TVw4eGEZ/C9TH281KwhVoeQUKbyjw==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
]
},
"node_modules/@msgpackr-extract/msgpackr-extract-darwin-x64": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-darwin-x64/-/msgpackr-extract-darwin-x64-3.0.3.tgz",
"integrity": "sha512-mdzd3AVzYKuUmiWOQ8GNhl64/IoFGol569zNRdkLReh6LRLHOXxU4U8eq0JwaD8iFHdVGqSy4IjFL4reoWCDFw==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
]
},
"node_modules/@msgpackr-extract/msgpackr-extract-linux-arm": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-arm/-/msgpackr-extract-linux-arm-3.0.3.tgz",
"integrity": "sha512-fg0uy/dG/nZEXfYilKoRe7yALaNmHoYeIoJuJ7KJ+YyU2bvY8vPv27f7UKhGRpY6euFYqEVhxCFZgAUNQBM3nw==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@msgpackr-extract/msgpackr-extract-linux-arm64": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-arm64/-/msgpackr-extract-linux-arm64-3.0.3.tgz",
"integrity": "sha512-YxQL+ax0XqBJDZiKimS2XQaf+2wDGVa1enVRGzEvLLVFeqa5kx2bWbtcSXgsxjQB7nRqqIGFIcLteF/sHeVtQg==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@msgpackr-extract/msgpackr-extract-linux-x64": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-x64/-/msgpackr-extract-linux-x64-3.0.3.tgz",
"integrity": "sha512-cvwNfbP07pKUfq1uH+S6KJ7dT9K8WOE4ZiAcsrSes+UY55E/0jLYc+vq+DO7jlmqRb5zAggExKm0H7O/CBaesg==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@msgpackr-extract/msgpackr-extract-win32-x64": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-win32-x64/-/msgpackr-extract-win32-x64-3.0.3.tgz",
"integrity": "sha512-x0fWaQtYp4E6sktbsdAqnehxDgEc/VwM7uLsRCYWaiGu0ykYdZPiS8zCWdnjHwyiumousxfBm4SO31eXqwEZhQ==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
]
},
"node_modules/@napi-rs/nice": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice/-/nice-1.0.1.tgz",
"integrity": "sha512-zM0mVWSXE0a0h9aKACLwKmD6nHcRiKrPpCfvaKqG1CqDEyjEawId0ocXxVzPMCAm6kkWr2P025msfxXEnt8UGQ==",
"dev": true,
"license": "MIT",
"optional": true,
"engines": {
"node": ">= 10"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/Brooooooklyn"
},
"optionalDependencies": {
"@napi-rs/nice-android-arm-eabi": "1.0.1",
"@napi-rs/nice-android-arm64": "1.0.1",
"@napi-rs/nice-darwin-arm64": "1.0.1",
"@napi-rs/nice-darwin-x64": "1.0.1",
"@napi-rs/nice-freebsd-x64": "1.0.1",
"@napi-rs/nice-linux-arm-gnueabihf": "1.0.1",
"@napi-rs/nice-linux-arm64-gnu": "1.0.1",
"@napi-rs/nice-linux-arm64-musl": "1.0.1",
"@napi-rs/nice-linux-ppc64-gnu": "1.0.1",
"@napi-rs/nice-linux-riscv64-gnu": "1.0.1",
"@napi-rs/nice-linux-s390x-gnu": "1.0.1",
"@napi-rs/nice-linux-x64-gnu": "1.0.1",
"@napi-rs/nice-linux-x64-musl": "1.0.1",
"@napi-rs/nice-win32-arm64-msvc": "1.0.1",
"@napi-rs/nice-win32-ia32-msvc": "1.0.1",
"@napi-rs/nice-win32-x64-msvc": "1.0.1"
}
},
"node_modules/@napi-rs/nice-android-arm-eabi": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-android-arm-eabi/-/nice-android-arm-eabi-1.0.1.tgz",
"integrity": "sha512-5qpvOu5IGwDo7MEKVqqyAxF90I6aLj4n07OzpARdgDRfz8UbBztTByBp0RC59r3J1Ij8uzYi6jI7r5Lws7nn6w==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@napi-rs/nice-android-arm64": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-android-arm64/-/nice-android-arm64-1.0.1.tgz",
"integrity": "sha512-GqvXL0P8fZ+mQqG1g0o4AO9hJjQaeYG84FRfZaYjyJtZZZcMjXW5TwkL8Y8UApheJgyE13TQ4YNUssQaTgTyvA==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@napi-rs/nice-darwin-arm64": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-darwin-arm64/-/nice-darwin-arm64-1.0.1.tgz",
"integrity": "sha512-91k3HEqUl2fsrz/sKkuEkscj6EAj3/eZNCLqzD2AA0TtVbkQi8nqxZCZDMkfklULmxLkMxuUdKe7RvG/T6s2AA==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@napi-rs/nice-darwin-x64": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-darwin-x64/-/nice-darwin-x64-1.0.1.tgz",
"integrity": "sha512-jXnMleYSIR/+TAN/p5u+NkCA7yidgswx5ftqzXdD5wgy/hNR92oerTXHc0jrlBisbd7DpzoaGY4cFD7Sm5GlgQ==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@napi-rs/nice-freebsd-x64": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-freebsd-x64/-/nice-freebsd-x64-1.0.1.tgz",
"integrity": "sha512-j+iJ/ezONXRQsVIB/FJfwjeQXX7A2tf3gEXs4WUGFrJjpe/z2KB7sOv6zpkm08PofF36C9S7wTNuzHZ/Iiccfw==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"freebsd"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@napi-rs/nice-linux-arm-gnueabihf": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-linux-arm-gnueabihf/-/nice-linux-arm-gnueabihf-1.0.1.tgz",
"integrity": "sha512-G8RgJ8FYXYkkSGQwywAUh84m946UTn6l03/vmEXBYNJxQJcD+I3B3k5jmjFG/OPiU8DfvxutOP8bi+F89MCV7Q==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@napi-rs/nice-linux-arm64-gnu": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-linux-arm64-gnu/-/nice-linux-arm64-gnu-1.0.1.tgz",
"integrity": "sha512-IMDak59/W5JSab1oZvmNbrms3mHqcreaCeClUjwlwDr0m3BoR09ZiN8cKFBzuSlXgRdZ4PNqCYNeGQv7YMTjuA==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@napi-rs/nice-linux-arm64-musl": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-linux-arm64-musl/-/nice-linux-arm64-musl-1.0.1.tgz",
"integrity": "sha512-wG8fa2VKuWM4CfjOjjRX9YLIbysSVV1S3Kgm2Fnc67ap/soHBeYZa6AGMeR5BJAylYRjnoVOzV19Cmkco3QEPw==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@napi-rs/nice-linux-ppc64-gnu": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-linux-ppc64-gnu/-/nice-linux-ppc64-gnu-1.0.1.tgz",
"integrity": "sha512-lxQ9WrBf0IlNTCA9oS2jg/iAjQyTI6JHzABV664LLrLA/SIdD+I1i3Mjf7TsnoUbgopBcCuDztVLfJ0q9ubf6Q==",
"cpu": [
"ppc64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@napi-rs/nice-linux-riscv64-gnu": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-linux-riscv64-gnu/-/nice-linux-riscv64-gnu-1.0.1.tgz",
"integrity": "sha512-3xs69dO8WSWBb13KBVex+yvxmUeEsdWexxibqskzoKaWx9AIqkMbWmE2npkazJoopPKX2ULKd8Fm9veEn0g4Ig==",
"cpu": [
"riscv64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@napi-rs/nice-linux-s390x-gnu": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-linux-s390x-gnu/-/nice-linux-s390x-gnu-1.0.1.tgz",
"integrity": "sha512-lMFI3i9rlW7hgToyAzTaEybQYGbQHDrpRkg+1gJWEpH0PLAQoZ8jiY0IzakLfNWnVda1eTYYlxxFYzW8Rqczkg==",
"cpu": [
"s390x"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@napi-rs/nice-linux-x64-gnu": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-linux-x64-gnu/-/nice-linux-x64-gnu-1.0.1.tgz",
"integrity": "sha512-XQAJs7DRN2GpLN6Fb+ZdGFeYZDdGl2Fn3TmFlqEL5JorgWKrQGRUrpGKbgZ25UeZPILuTKJ+OowG2avN8mThBA==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@napi-rs/nice-linux-x64-musl": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-linux-x64-musl/-/nice-linux-x64-musl-1.0.1.tgz",
"integrity": "sha512-/rodHpRSgiI9o1faq9SZOp/o2QkKQg7T+DK0R5AkbnI/YxvAIEHf2cngjYzLMQSQgUhxym+LFr+UGZx4vK4QdQ==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@napi-rs/nice-win32-arm64-msvc": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-win32-arm64-msvc/-/nice-win32-arm64-msvc-1.0.1.tgz",
"integrity": "sha512-rEcz9vZymaCB3OqEXoHnp9YViLct8ugF+6uO5McifTedjq4QMQs3DHz35xBEGhH3gJWEsXMUbzazkz5KNM5YUg==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@napi-rs/nice-win32-ia32-msvc": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-win32-ia32-msvc/-/nice-win32-ia32-msvc-1.0.1.tgz",
"integrity": "sha512-t7eBAyPUrWL8su3gDxw9xxxqNwZzAqKo0Szv3IjVQd1GpXXVkb6vBBQUuxfIYaXMzZLwlxRQ7uzM2vdUE9ULGw==",
"cpu": [
"ia32"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@napi-rs/nice-win32-x64-msvc": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@napi-rs/nice-win32-x64-msvc/-/nice-win32-x64-msvc-1.0.1.tgz",
"integrity": "sha512-JlF+uDcatt3St2ntBG8H02F1mM45i5SF9W+bIKiReVE6wiy3o16oBP/yxt+RZ+N6LbCImJXJ6bXNO2kn9AXicg==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">= 10"
}
},
"node_modules/@nodelib/fs.scandir": {
"version": "2.1.5",
"resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
"integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==",
"dev": true,
"license": "MIT",
"dependencies": {
"@nodelib/fs.stat": "2.0.5",
"run-parallel": "^1.1.9"
},
"engines": {
"node": ">= 8"
}
},
"node_modules/@nodelib/fs.stat": {
"version": "2.0.5",
"resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz",
"integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 8"
}
},
"node_modules/@nodelib/fs.walk": {
"version": "1.2.8",
"resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz",
"integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@nodelib/fs.scandir": "2.1.5",
"fastq": "^1.6.0"
},
"engines": {
"node": ">= 8"
}
},
"node_modules/@npmcli/agent": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-3.0.0.tgz",
"integrity": "sha512-S79NdEgDQd/NGCay6TCoVzXSj74skRZIKJcpJjC5lOq34SZzyI6MqtiiWoiVWoVrTcGjNeC4ipbh1VIHlpfF5Q==",
"dev": true,
"license": "ISC",
"dependencies": {
"agent-base": "^7.1.0",
"http-proxy-agent": "^7.0.0",
"https-proxy-agent": "^7.0.1",
"lru-cache": "^10.0.1",
"socks-proxy-agent": "^8.0.3"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/@npmcli/agent/node_modules/lru-cache": {
"version": "10.4.3",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz",
"integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==",
"dev": true,
"license": "ISC"
},
"node_modules/@npmcli/fs": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-4.0.0.tgz",
"integrity": "sha512-/xGlezI6xfGO9NwuJlnwz/K14qD1kCSAGtacBHnGzeAIuJGazcp45KP5NuyARXoKb7cwulAGWVsbeSxdG/cb0Q==",
"dev": true,
"license": "ISC",
"dependencies": {
"semver": "^7.3.5"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/@npmcli/git": {
"version": "6.0.3",
"resolved": "https://registry.npmjs.org/@npmcli/git/-/git-6.0.3.tgz",
"integrity": "sha512-GUYESQlxZRAdhs3UhbB6pVRNUELQOHXwK9ruDkwmCv2aZ5y0SApQzUJCg02p3A7Ue2J5hxvlk1YI53c00NmRyQ==",
"dev": true,
"license": "ISC",
"dependencies": {
"@npmcli/promise-spawn": "^8.0.0",
"ini": "^5.0.0",
"lru-cache": "^10.0.1",
"npm-pick-manifest": "^10.0.0",
"proc-log": "^5.0.0",
"promise-retry": "^2.0.1",
"semver": "^7.3.5",
"which": "^5.0.0"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/@npmcli/git/node_modules/lru-cache": {
"version": "10.4.3",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz",
"integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==",
"dev": true,
"license": "ISC"
},
"node_modules/@npmcli/installed-package-contents": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/@npmcli/installed-package-contents/-/installed-package-contents-3.0.0.tgz",
"integrity": "sha512-fkxoPuFGvxyrH+OQzyTkX2LUEamrF4jZSmxjAtPPHHGO0dqsQ8tTKjnIS8SAnPHdk2I03BDtSMR5K/4loKg79Q==",
"dev": true,
"license": "ISC",
"dependencies": {
"npm-bundled": "^4.0.0",
"npm-normalize-package-bin": "^4.0.0"
},
"bin": {
"installed-package-contents": "bin/index.js"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/@npmcli/node-gyp": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/@npmcli/node-gyp/-/node-gyp-4.0.0.tgz",
"integrity": "sha512-+t5DZ6mO/QFh78PByMq1fGSAub/agLJZDRfJRMeOSNCt8s9YVlTjmGpIPwPhvXTGUIJk+WszlT0rQa1W33yzNA==",
"dev": true,
"license": "ISC",
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/@npmcli/package-json": {
"version": "6.1.1",
"resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-6.1.1.tgz",
"integrity": "sha512-d5qimadRAUCO4A/Txw71VM7UrRZzV+NPclxz/dc+M6B2oYwjWTjqh8HA/sGQgs9VZuJ6I/P7XIAlJvgrl27ZOw==",
"dev": true,
"license": "ISC",
"dependencies": {
"@npmcli/git": "^6.0.0",
"glob": "^10.2.2",
"hosted-git-info": "^8.0.0",
"json-parse-even-better-errors": "^4.0.0",
"proc-log": "^5.0.0",
"semver": "^7.5.3",
"validate-npm-package-license": "^3.0.4"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/@npmcli/promise-spawn": {
"version": "8.0.2",
"resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-8.0.2.tgz",
"integrity": "sha512-/bNJhjc+o6qL+Dwz/bqfTQClkEO5nTQ1ZEcdCkAQjhkZMHIh22LPG7fNh1enJP1NKWDqYiiABnjFCY7E0zHYtQ==",
"dev": true,
"license": "ISC",
"dependencies": {
"which": "^5.0.0"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/@npmcli/redact": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/@npmcli/redact/-/redact-3.1.1.tgz",
"integrity": "sha512-3Hc2KGIkrvJWJqTbvueXzBeZlmvoOxc2jyX00yzr3+sNFquJg0N8hH4SAPLPVrkWIRQICVpVgjrss971awXVnA==",
"dev": true,
"license": "ISC",
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/@npmcli/run-script": {
"version": "9.0.2",
"resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-9.0.2.tgz",
"integrity": "sha512-cJXiUlycdizQwvqE1iaAb4VRUM3RX09/8q46zjvy+ct9GhfZRWd7jXYVc1tn/CfRlGPVkX/u4sstRlepsm7hfw==",
"dev": true,
"license": "ISC",
"dependencies": {
"@npmcli/node-gyp": "^4.0.0",
"@npmcli/package-json": "^6.0.0",
"@npmcli/promise-spawn": "^8.0.0",
"node-gyp": "^11.0.0",
"proc-log": "^5.0.0",
"which": "^5.0.0"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/@parcel/watcher": {
"version": "2.5.1",
"resolved": "https://registry.npmjs.org/@parcel/watcher/-/watcher-2.5.1.tgz",
"integrity": "sha512-dfUnCxiN9H4ap84DvD2ubjw+3vUNpstxa0TneY/Paat8a3R4uQZDLSvWjmznAY/DoahqTHl9V46HF/Zs3F29pg==",
"dev": true,
"hasInstallScript": true,
"license": "MIT",
"optional": true,
"dependencies": {
"detect-libc": "^1.0.3",
"is-glob": "^4.0.3",
"micromatch": "^4.0.5",
"node-addon-api": "^7.0.0"
},
"engines": {
"node": ">= 10.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
},
"optionalDependencies": {
"@parcel/watcher-android-arm64": "2.5.1",
"@parcel/watcher-darwin-arm64": "2.5.1",
"@parcel/watcher-darwin-x64": "2.5.1",
"@parcel/watcher-freebsd-x64": "2.5.1",
"@parcel/watcher-linux-arm-glibc": "2.5.1",
"@parcel/watcher-linux-arm-musl": "2.5.1",
"@parcel/watcher-linux-arm64-glibc": "2.5.1",
"@parcel/watcher-linux-arm64-musl": "2.5.1",
"@parcel/watcher-linux-x64-glibc": "2.5.1",
"@parcel/watcher-linux-x64-musl": "2.5.1",
"@parcel/watcher-win32-arm64": "2.5.1",
"@parcel/watcher-win32-ia32": "2.5.1",
"@parcel/watcher-win32-x64": "2.5.1"
}
},
"node_modules/@parcel/watcher-android-arm64": {
"version": "2.5.1",
"resolved": "https://registry.npmjs.org/@parcel/watcher-android-arm64/-/watcher-android-arm64-2.5.1.tgz",
"integrity": "sha512-KF8+j9nNbUN8vzOFDpRMsaKBHZ/mcjEjMToVMJOhTozkDonQFFrRcfdLWn6yWKCmJKmdVxSgHiYvTCef4/qcBA==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">= 10.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/watcher-darwin-arm64": {
"version": "2.5.1",
"resolved": "https://registry.npmjs.org/@parcel/watcher-darwin-arm64/-/watcher-darwin-arm64-2.5.1.tgz",
"integrity": "sha512-eAzPv5osDmZyBhou8PoF4i6RQXAfeKL9tjb3QzYuccXFMQU0ruIc/POh30ePnaOyD1UXdlKguHBmsTs53tVoPw==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": ">= 10.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/watcher-darwin-x64": {
"version": "2.5.1",
"resolved": "https://registry.npmjs.org/@parcel/watcher-darwin-x64/-/watcher-darwin-x64-2.5.1.tgz",
"integrity": "sha512-1ZXDthrnNmwv10A0/3AJNZ9JGlzrF82i3gNQcWOzd7nJ8aj+ILyW1MTxVk35Db0u91oD5Nlk9MBiujMlwmeXZg==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": ">= 10.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/watcher-freebsd-x64": {
"version": "2.5.1",
"resolved": "https://registry.npmjs.org/@parcel/watcher-freebsd-x64/-/watcher-freebsd-x64-2.5.1.tgz",
"integrity": "sha512-SI4eljM7Flp9yPuKi8W0ird8TI/JK6CSxju3NojVI6BjHsTyK7zxA9urjVjEKJ5MBYC+bLmMcbAWlZ+rFkLpJQ==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"freebsd"
],
"engines": {
"node": ">= 10.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/watcher-linux-arm-glibc": {
"version": "2.5.1",
"resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm-glibc/-/watcher-linux-arm-glibc-2.5.1.tgz",
"integrity": "sha512-RCdZlEyTs8geyBkkcnPWvtXLY44BCeZKmGYRtSgtwwnHR4dxfHRG3gR99XdMEdQ7KeiDdasJwwvNSF5jKtDwdA==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">= 10.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/watcher-linux-arm-musl": {
"version": "2.5.1",
"resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm-musl/-/watcher-linux-arm-musl-2.5.1.tgz",
"integrity": "sha512-6E+m/Mm1t1yhB8X412stiKFG3XykmgdIOqhjWj+VL8oHkKABfu/gjFj8DvLrYVHSBNC+/u5PeNrujiSQ1zwd1Q==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">= 10.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/watcher-linux-arm64-glibc": {
"version": "2.5.1",
"resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm64-glibc/-/watcher-linux-arm64-glibc-2.5.1.tgz",
"integrity": "sha512-LrGp+f02yU3BN9A+DGuY3v3bmnFUggAITBGriZHUREfNEzZh/GO06FF5u2kx8x+GBEUYfyTGamol4j3m9ANe8w==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">= 10.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/watcher-linux-arm64-musl": {
"version": "2.5.1",
"resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm64-musl/-/watcher-linux-arm64-musl-2.5.1.tgz",
"integrity": "sha512-cFOjABi92pMYRXS7AcQv9/M1YuKRw8SZniCDw0ssQb/noPkRzA+HBDkwmyOJYp5wXcsTrhxO0zq1U11cK9jsFg==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">= 10.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/watcher-linux-x64-glibc": {
"version": "2.5.1",
"resolved": "https://registry.npmjs.org/@parcel/watcher-linux-x64-glibc/-/watcher-linux-x64-glibc-2.5.1.tgz",
"integrity": "sha512-GcESn8NZySmfwlTsIur+49yDqSny2IhPeZfXunQi48DMugKeZ7uy1FX83pO0X22sHntJ4Ub+9k34XQCX+oHt2A==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">= 10.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/watcher-linux-x64-musl": {
"version": "2.5.1",
"resolved": "https://registry.npmjs.org/@parcel/watcher-linux-x64-musl/-/watcher-linux-x64-musl-2.5.1.tgz",
"integrity": "sha512-n0E2EQbatQ3bXhcH2D1XIAANAcTZkQICBPVaxMeaCVBtOpBZpWJuf7LwyWPSBDITb7In8mqQgJ7gH8CILCURXg==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">= 10.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/watcher-win32-arm64": {
"version": "2.5.1",
"resolved": "https://registry.npmjs.org/@parcel/watcher-win32-arm64/-/watcher-win32-arm64-2.5.1.tgz",
"integrity": "sha512-RFzklRvmc3PkjKjry3hLF9wD7ppR4AKcWNzH7kXR7GUe0Igb3Nz8fyPwtZCSquGrhU5HhUNDr/mKBqj7tqA2Vw==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">= 10.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/watcher-win32-ia32": {
"version": "2.5.1",
"resolved": "https://registry.npmjs.org/@parcel/watcher-win32-ia32/-/watcher-win32-ia32-2.5.1.tgz",
"integrity": "sha512-c2KkcVN+NJmuA7CGlaGD1qJh1cLfDnQsHjE89E60vUEMlqduHGCdCLJCID5geFVM0dOtA3ZiIO8BoEQmzQVfpQ==",
"cpu": [
"ia32"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">= 10.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/watcher-win32-x64": {
"version": "2.5.1",
"resolved": "https://registry.npmjs.org/@parcel/watcher-win32-x64/-/watcher-win32-x64-2.5.1.tgz",
"integrity": "sha512-9lHBdJITeNR++EvSQVUcaZoWupyHfXe1jZvGZ06O/5MflPcuPLtEphScIBL+AiCWBO46tDSHzWyD0uDmmZqsgA==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">= 10.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/watcher/node_modules/detect-libc": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz",
"integrity": "sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg==",
"dev": true,
"license": "Apache-2.0",
"optional": true,
"bin": {
"detect-libc": "bin/detect-libc.js"
},
"engines": {
"node": ">=0.10"
}
},
"node_modules/@parcel/watcher/node_modules/node-addon-api": {
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-7.1.1.tgz",
"integrity": "sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ==",
"dev": true,
"license": "MIT",
"optional": true
},
"node_modules/@pkgjs/parseargs": {
"version": "0.11.0",
"resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz",
"integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==",
"dev": true,
"license": "MIT",
"optional": true,
"engines": {
"node": ">=14"
}
},
"node_modules/@rollup/rollup-android-arm-eabi": {
"version": "4.30.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.30.1.tgz",
"integrity": "sha512-pSWY+EVt3rJ9fQ3IqlrEUtXh3cGqGtPDH1FQlNZehO2yYxCHEX1SPsz1M//NXwYfbTlcKr9WObLnJX9FsS9K1Q==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
]
},
"node_modules/@rollup/rollup-android-arm64": {
"version": "4.30.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.30.1.tgz",
"integrity": "sha512-/NA2qXxE3D/BRjOJM8wQblmArQq1YoBVJjrjoTSBS09jgUisq7bqxNHJ8kjCHeV21W/9WDGwJEWSN0KQ2mtD/w==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
]
},
"node_modules/@rollup/rollup-darwin-arm64": {
"version": "4.30.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.30.1.tgz",
"integrity": "sha512-r7FQIXD7gB0WJ5mokTUgUWPl0eYIH0wnxqeSAhuIwvnnpjdVB8cRRClyKLQr7lgzjctkbp5KmswWszlwYln03Q==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
]
},
"node_modules/@rollup/rollup-darwin-x64": {
"version": "4.30.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.30.1.tgz",
"integrity": "sha512-x78BavIwSH6sqfP2xeI1hd1GpHL8J4W2BXcVM/5KYKoAD3nNsfitQhvWSw+TFtQTLZ9OmlF+FEInEHyubut2OA==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
]
},
"node_modules/@rollup/rollup-freebsd-arm64": {
"version": "4.30.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.30.1.tgz",
"integrity": "sha512-HYTlUAjbO1z8ywxsDFWADfTRfTIIy/oUlfIDmlHYmjUP2QRDTzBuWXc9O4CXM+bo9qfiCclmHk1x4ogBjOUpUQ==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"freebsd"
]
},
"node_modules/@rollup/rollup-freebsd-x64": {
"version": "4.30.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.30.1.tgz",
"integrity": "sha512-1MEdGqogQLccphhX5myCJqeGNYTNcmTyaic9S7CG3JhwuIByJ7J05vGbZxsizQthP1xpVx7kd3o31eOogfEirw==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"freebsd"
]
},
"node_modules/@rollup/rollup-linux-arm-gnueabihf": {
"version": "4.30.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.30.1.tgz",
"integrity": "sha512-PaMRNBSqCx7K3Wc9QZkFx5+CX27WFpAMxJNiYGAXfmMIKC7jstlr32UhTgK6T07OtqR+wYlWm9IxzennjnvdJg==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-arm-musleabihf": {
"version": "4.30.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.30.1.tgz",
"integrity": "sha512-B8Rcyj9AV7ZlEFqvB5BubG5iO6ANDsRKlhIxySXcF1axXYUyqwBok+XZPgIYGBgs7LDXfWfifxhw0Ik57T0Yug==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-arm64-gnu": {
"version": "4.30.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.30.1.tgz",
"integrity": "sha512-hqVyueGxAj3cBKrAI4aFHLV+h0Lv5VgWZs9CUGqr1z0fZtlADVV1YPOij6AhcK5An33EXaxnDLmJdQikcn5NEw==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-arm64-musl": {
"version": "4.30.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.30.1.tgz",
"integrity": "sha512-i4Ab2vnvS1AE1PyOIGp2kXni69gU2DAUVt6FSXeIqUCPIR3ZlheMW3oP2JkukDfu3PsexYRbOiJrY+yVNSk9oA==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-loongarch64-gnu": {
"version": "4.30.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.30.1.tgz",
"integrity": "sha512-fARcF5g296snX0oLGkVxPmysetwUk2zmHcca+e9ObOovBR++9ZPOhqFUM61UUZ2EYpXVPN1redgqVoBB34nTpQ==",
"cpu": [
"loong64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-powerpc64le-gnu": {
"version": "4.30.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.30.1.tgz",
"integrity": "sha512-GLrZraoO3wVT4uFXh67ElpwQY0DIygxdv0BNW9Hkm3X34wu+BkqrDrkcsIapAY+N2ATEbvak0XQ9gxZtCIA5Rw==",
"cpu": [
"ppc64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-riscv64-gnu": {
"version": "4.30.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.30.1.tgz",
"integrity": "sha512-0WKLaAUUHKBtll0wvOmh6yh3S0wSU9+yas923JIChfxOaaBarmb/lBKPF0w/+jTVozFnOXJeRGZ8NvOxvk/jcw==",
"cpu": [
"riscv64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-s390x-gnu": {
"version": "4.30.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.30.1.tgz",
"integrity": "sha512-GWFs97Ruxo5Bt+cvVTQkOJ6TIx0xJDD/bMAOXWJg8TCSTEK8RnFeOeiFTxKniTc4vMIaWvCplMAFBt9miGxgkA==",
"cpu": [
"s390x"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-x64-gnu": {
"version": "4.30.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.30.1.tgz",
"integrity": "sha512-UtgGb7QGgXDIO+tqqJ5oZRGHsDLO8SlpE4MhqpY9Llpzi5rJMvrK6ZGhsRCST2abZdBqIBeXW6WPD5fGK5SDwg==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-x64-musl": {
"version": "4.30.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.30.1.tgz",
"integrity": "sha512-V9U8Ey2UqmQsBT+xTOeMzPzwDzyXmnAoO4edZhL7INkwQcaW1Ckv3WJX3qrrp/VHaDkEWIBWhRwP47r8cdrOow==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-win32-arm64-msvc": {
"version": "4.30.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.30.1.tgz",
"integrity": "sha512-WabtHWiPaFF47W3PkHnjbmWawnX/aE57K47ZDT1BXTS5GgrBUEpvOzq0FI0V/UYzQJgdb8XlhVNH8/fwV8xDjw==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
]
},
"node_modules/@rollup/rollup-win32-ia32-msvc": {
"version": "4.30.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.30.1.tgz",
"integrity": "sha512-pxHAU+Zv39hLUTdQQHUVHf4P+0C47y/ZloorHpzs2SXMRqeAWmGghzAhfOlzFHHwjvgokdFAhC4V+6kC1lRRfw==",
"cpu": [
"ia32"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
]
},
"node_modules/@rollup/rollup-win32-x64-msvc": {
"version": "4.30.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.30.1.tgz",
"integrity": "sha512-D6qjsXGcvhTjv0kI4fU8tUuBDF/Ueee4SVX79VfNDXZa64TfCW1Slkb6Z7O1p7vflqZjcmOVdZlqf8gvJxc6og==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
]
},
"node_modules/@schematics/angular": {
"version": "19.1.8",
"resolved": "https://registry.npmjs.org/@schematics/angular/-/angular-19.1.8.tgz",
"integrity": "sha512-ytgClbMPn+i+w1S3QukR/Vdge+sfU9aX49ao+XRwoWdOssHUjmVjQcCEdzu0ucSrNPZnhm34bdDPzADLhln60w==",
"dev": true,
"license": "MIT",
"dependencies": {
"@angular-devkit/core": "19.1.8",
"@angular-devkit/schematics": "19.1.8",
"jsonc-parser": "3.3.1"
},
"engines": {
"node": "^18.19.1 || ^20.11.1 || >=22.0.0",
"npm": "^6.11.0 || ^7.5.6 || >=8.0.0",
"yarn": ">= 1.13.0"
}
},
"node_modules/@sigstore/bundle": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-3.1.0.tgz",
"integrity": "sha512-Mm1E3/CmDDCz3nDhFKTuYdB47EdRFRQMOE/EAbiG1MJW77/w1b3P7Qx7JSrVJs8PfwOLOVcKQCHErIwCTyPbag==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
"@sigstore/protobuf-specs": "^0.4.0"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/@sigstore/core": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/@sigstore/core/-/core-2.0.0.tgz",
"integrity": "sha512-nYxaSb/MtlSI+JWcwTHQxyNmWeWrUXJJ/G4liLrGG7+tS4vAz6LF3xRXqLH6wPIVUoZQel2Fs4ddLx4NCpiIYg==",
"dev": true,
"license": "Apache-2.0",
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/@sigstore/protobuf-specs": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.4.0.tgz",
"integrity": "sha512-o09cLSIq9EKyRXwryWDOJagkml9XgQCoCSRjHOnHLnvsivaW7Qznzz6yjfV7PHJHhIvyp8OH7OX8w0Dc5bQK7A==",
"dev": true,
"license": "Apache-2.0",
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/@sigstore/sign": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-3.1.0.tgz",
"integrity": "sha512-knzjmaOHOov1Ur7N/z4B1oPqZ0QX5geUfhrVaqVlu+hl0EAoL4o+l0MSULINcD5GCWe3Z0+YJO8ues6vFlW0Yw==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
"@sigstore/bundle": "^3.1.0",
"@sigstore/core": "^2.0.0",
"@sigstore/protobuf-specs": "^0.4.0",
"make-fetch-happen": "^14.0.2",
"proc-log": "^5.0.0",
"promise-retry": "^2.0.1"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/@sigstore/tuf": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-3.1.0.tgz",
"integrity": "sha512-suVMQEA+sKdOz5hwP9qNcEjX6B45R+hFFr4LAWzbRc5O+U2IInwvay/bpG5a4s+qR35P/JK/PiKiRGjfuLy1IA==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
"@sigstore/protobuf-specs": "^0.4.0",
"tuf-js": "^3.0.1"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/@sigstore/verify": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/@sigstore/verify/-/verify-2.1.0.tgz",
"integrity": "sha512-kAAM06ca4CzhvjIZdONAL9+MLppW3K48wOFy1TbuaWFW/OMfl8JuTgW0Bm02JB1WJGT/ET2eqav0KTEKmxqkIA==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
"@sigstore/bundle": "^3.1.0",
"@sigstore/core": "^2.0.0",
"@sigstore/protobuf-specs": "^0.4.0"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/@tufjs/canonical-json": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/@tufjs/canonical-json/-/canonical-json-2.0.0.tgz",
"integrity": "sha512-yVtV8zsdo8qFHe+/3kw81dSLyF7D576A5cCFCi4X7B39tWT7SekaEFUnvnWJHz+9qO7qJTah1JbrDjWKqFtdWA==",
"dev": true,
"license": "MIT",
"engines": {
"node": "^16.14.0 || >=18.0.0"
}
},
"node_modules/@tufjs/models": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/@tufjs/models/-/models-3.0.1.tgz",
"integrity": "sha512-UUYHISyhCU3ZgN8yaear3cGATHb3SMuKHsQ/nVbHXcmnBf+LzQ/cQfhNG+rfaSHgqGKNEm2cOCLVLELStUQ1JA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@tufjs/canonical-json": "2.0.0",
"minimatch": "^9.0.5"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/@types/estree": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz",
"integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==",
"dev": true,
"license": "MIT"
},
"node_modules/@types/node": {
"version": "22.13.4",
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.13.4.tgz",
"integrity": "sha512-ywP2X0DYtX3y08eFVx5fNIw7/uIv8hYUKgXoK8oayJlLnKcRfEYCxWMVE1XagUdVtCJlZT1AU4LXEABW+L1Peg==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"undici-types": "~6.20.0"
}
},
"node_modules/@vitejs/plugin-basic-ssl": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/@vitejs/plugin-basic-ssl/-/plugin-basic-ssl-1.2.0.tgz",
"integrity": "sha512-mkQnxTkcldAzIsomk1UuLfAu9n+kpQ3JbHcpCp7d2Oo6ITtji8pHS3QToOWjhPFvNQSnhlkAjmGbhv2QvwO/7Q==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=14.21.3"
},
"peerDependencies": {
"vite": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0"
}
},
"node_modules/@yarnpkg/lockfile": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@yarnpkg/lockfile/-/lockfile-1.1.0.tgz",
"integrity": "sha512-GpSwvyXOcOOlV70vbnzjj4fW5xW/FdUF6nQEt1ENy7m4ZCczi1+/buVUPAqmGfqznsORNFzUMjctTIp8a9tuCQ==",
"dev": true,
"license": "BSD-2-Clause"
},
"node_modules/abbrev": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/abbrev/-/abbrev-3.0.0.tgz",
"integrity": "sha512-+/kfrslGQ7TNV2ecmQwMJj/B65g5KVq1/L3SGVZ3tCYGqlzFuFCGBZJtMP99wH3NpEUyAjn0zPdPUg0D+DwrOA==",
"dev": true,
"license": "ISC",
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/agent-base": {
"version": "7.1.3",
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz",
"integrity": "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 14"
}
},
"node_modules/ajv": {
"version": "8.17.1",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz",
"integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==",
"dev": true,
"license": "MIT",
"dependencies": {
"fast-deep-equal": "^3.1.3",
"fast-uri": "^3.0.1",
"json-schema-traverse": "^1.0.0",
"require-from-string": "^2.0.2"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/epoberezkin"
}
},
"node_modules/ajv-formats": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-3.0.1.tgz",
"integrity": "sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"ajv": "^8.0.0"
},
"peerDependencies": {
"ajv": "^8.0.0"
},
"peerDependenciesMeta": {
"ajv": {
"optional": true
}
}
},
"node_modules/ansi-escapes": {
"version": "4.3.2",
"resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz",
"integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"type-fest": "^0.21.3"
},
"engines": {
"node": ">=8"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/ansi-regex": {
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz",
"integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/ansi-regex?sponsor=1"
}
},
"node_modules/ansi-styles": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
"dev": true,
"license": "MIT",
"dependencies": {
"color-convert": "^2.0.1"
},
"engines": {
"node": ">=8"
},
"funding": {
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
}
},
"node_modules/balanced-match": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
"dev": true,
"license": "MIT"
},
"node_modules/base64-js": {
"version": "1.5.1",
"resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz",
"integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==",
"dev": true,
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
],
"license": "MIT"
},
"node_modules/beasties": {
"version": "0.2.0",
"resolved": "https://registry.npmjs.org/beasties/-/beasties-0.2.0.tgz",
"integrity": "sha512-Ljqskqx/tbZagIglYoJIMzH5zgssyp+in9+9sAyh15N22AornBeIDnb8EZ6Rk+6ShfMxd92uO3gfpT0NtZbpow==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
"css-select": "^5.1.0",
"css-what": "^6.1.0",
"dom-serializer": "^2.0.0",
"domhandler": "^5.0.3",
"htmlparser2": "^9.1.0",
"picocolors": "^1.1.1",
"postcss": "^8.4.49",
"postcss-media-query-parser": "^0.2.3"
},
"engines": {
"node": ">=14.0.0"
}
},
"node_modules/bl": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz",
"integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==",
"dev": true,
"license": "MIT",
"dependencies": {
"buffer": "^5.5.0",
"inherits": "^2.0.4",
"readable-stream": "^3.4.0"
}
},
"node_modules/boolbase": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz",
"integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==",
"dev": true,
"license": "ISC"
},
"node_modules/brace-expansion": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
"dev": true,
"license": "MIT",
"dependencies": {
"balanced-match": "^1.0.0"
}
},
"node_modules/braces": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
"integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
"dev": true,
"license": "MIT",
"dependencies": {
"fill-range": "^7.1.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/browserslist": {
"version": "4.24.4",
"resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.24.4.tgz",
"integrity": "sha512-KDi1Ny1gSePi1vm0q4oxSF8b4DR44GF4BbmS2YdhPLOEqd8pDviZOGH/GsmRwoWJ2+5Lr085X7naowMwKHDG1A==",
"dev": true,
"funding": [
{
"type": "opencollective",
"url": "https://opencollective.com/browserslist"
},
{
"type": "tidelift",
"url": "https://tidelift.com/funding/github/npm/browserslist"
},
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
],
"license": "MIT",
"dependencies": {
"caniuse-lite": "^1.0.30001688",
"electron-to-chromium": "^1.5.73",
"node-releases": "^2.0.19",
"update-browserslist-db": "^1.1.1"
},
"bin": {
"browserslist": "cli.js"
},
"engines": {
"node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7"
}
},
"node_modules/buffer": {
"version": "5.7.1",
"resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz",
"integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==",
"dev": true,
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
],
"license": "MIT",
"dependencies": {
"base64-js": "^1.3.1",
"ieee754": "^1.1.13"
}
},
"node_modules/cacache": {
"version": "19.0.1",
"resolved": "https://registry.npmjs.org/cacache/-/cacache-19.0.1.tgz",
"integrity": "sha512-hdsUxulXCi5STId78vRVYEtDAjq99ICAUktLTeTYsLoTE6Z8dS0c8pWNCxwdrk9YfJeobDZc2Y186hD/5ZQgFQ==",
"dev": true,
"license": "ISC",
"dependencies": {
"@npmcli/fs": "^4.0.0",
"fs-minipass": "^3.0.0",
"glob": "^10.2.2",
"lru-cache": "^10.0.1",
"minipass": "^7.0.3",
"minipass-collect": "^2.0.1",
"minipass-flush": "^1.0.5",
"minipass-pipeline": "^1.2.4",
"p-map": "^7.0.2",
"ssri": "^12.0.0",
"tar": "^7.4.3",
"unique-filename": "^4.0.0"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/cacache/node_modules/chownr": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz",
"integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==",
"dev": true,
"license": "BlueOak-1.0.0",
"engines": {
"node": ">=18"
}
},
"node_modules/cacache/node_modules/lru-cache": {
"version": "10.4.3",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz",
"integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==",
"dev": true,
"license": "ISC"
},
"node_modules/cacache/node_modules/mkdirp": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz",
"integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==",
"dev": true,
"license": "MIT",
"bin": {
"mkdirp": "dist/cjs/src/bin.js"
},
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/cacache/node_modules/tar": {
"version": "7.4.3",
"resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz",
"integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==",
"dev": true,
"license": "ISC",
"dependencies": {
"@isaacs/fs-minipass": "^4.0.0",
"chownr": "^3.0.0",
"minipass": "^7.1.2",
"minizlib": "^3.0.1",
"mkdirp": "^3.0.1",
"yallist": "^5.0.0"
},
"engines": {
"node": ">=18"
}
},
"node_modules/cacache/node_modules/yallist": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz",
"integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==",
"dev": true,
"license": "BlueOak-1.0.0",
"engines": {
"node": ">=18"
}
},
"node_modules/caniuse-lite": {
"version": "1.0.30001700",
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001700.tgz",
"integrity": "sha512-2S6XIXwaE7K7erT8dY+kLQcpa5ms63XlRkMkReXjle+kf6c5g38vyMl+Z5y8dSxOFDhcFe+nxnn261PLxBSQsQ==",
"dev": true,
"funding": [
{
"type": "opencollective",
"url": "https://opencollective.com/browserslist"
},
{
"type": "tidelift",
"url": "https://tidelift.com/funding/github/npm/caniuse-lite"
},
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
],
"license": "CC-BY-4.0"
},
"node_modules/chalk": {
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
"integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-styles": "^4.1.0",
"supports-color": "^7.1.0"
},
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/chalk/chalk?sponsor=1"
}
},
"node_modules/chardet": {
"version": "0.7.0",
"resolved": "https://registry.npmjs.org/chardet/-/chardet-0.7.0.tgz",
"integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==",
"dev": true,
"license": "MIT"
},
"node_modules/chokidar": {
"version": "4.0.3",
"resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz",
"integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==",
"dev": true,
"license": "MIT",
"dependencies": {
"readdirp": "^4.0.1"
},
"engines": {
"node": ">= 14.16.0"
},
"funding": {
"url": "https://paulmillr.com/funding/"
}
},
"node_modules/chownr": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz",
"integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==",
"dev": true,
"license": "ISC",
"engines": {
"node": ">=10"
}
},
"node_modules/cli-cursor": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-5.0.0.tgz",
"integrity": "sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw==",
"dev": true,
"license": "MIT",
"dependencies": {
"restore-cursor": "^5.0.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/cli-spinners": {
"version": "2.9.2",
"resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.9.2.tgz",
"integrity": "sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/cli-truncate": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-4.0.0.tgz",
"integrity": "sha512-nPdaFdQ0h/GEigbPClz11D0v/ZJEwxmeVZGeMo3Z5StPtUTkA9o1lD6QwoirYiSDzbcwn2XcjwmCp68W1IS4TA==",
"dev": true,
"license": "MIT",
"dependencies": {
"slice-ansi": "^5.0.0",
"string-width": "^7.0.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/cli-width": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/cli-width/-/cli-width-4.1.0.tgz",
"integrity": "sha512-ouuZd4/dm2Sw5Gmqy6bGyNNNe1qt9RpmxveLSO7KcgsTnU7RXfsw+/bukWGo1abgBiMAic068rclZsO4IWmmxQ==",
"dev": true,
"license": "ISC",
"engines": {
"node": ">= 12"
}
},
"node_modules/cliui": {
"version": "8.0.1",
"resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz",
"integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==",
"dev": true,
"license": "ISC",
"dependencies": {
"string-width": "^4.2.0",
"strip-ansi": "^6.0.1",
"wrap-ansi": "^7.0.0"
},
"engines": {
"node": ">=12"
}
},
"node_modules/cliui/node_modules/ansi-regex": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
}
},
"node_modules/cliui/node_modules/emoji-regex": {
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
"dev": true,
"license": "MIT"
},
"node_modules/cliui/node_modules/is-fullwidth-code-point": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
"integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
}
},
"node_modules/cliui/node_modules/string-width": {
"version": "4.2.3",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
"dev": true,
"license": "MIT",
"dependencies": {
"emoji-regex": "^8.0.0",
"is-fullwidth-code-point": "^3.0.0",
"strip-ansi": "^6.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/cliui/node_modules/strip-ansi": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-regex": "^5.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/cliui/node_modules/wrap-ansi": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
"integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-styles": "^4.0.0",
"string-width": "^4.1.0",
"strip-ansi": "^6.0.0"
},
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
}
},
"node_modules/clone": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/clone/-/clone-1.0.4.tgz",
"integrity": "sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.8"
}
},
"node_modules/color-convert": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"color-name": "~1.1.4"
},
"engines": {
"node": ">=7.0.0"
}
},
"node_modules/color-name": {
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
"dev": true,
"license": "MIT"
},
"node_modules/colorette": {
"version": "2.0.20",
"resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz",
"integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==",
"dev": true,
"license": "MIT"
},
"node_modules/convert-source-map": {
"version": "1.9.0",
"resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz",
"integrity": "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==",
"dev": true,
"license": "MIT"
},
"node_modules/cross-spawn": {
"version": "7.0.6",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
"integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==",
"dev": true,
"license": "MIT",
"dependencies": {
"path-key": "^3.1.0",
"shebang-command": "^2.0.0",
"which": "^2.0.1"
},
"engines": {
"node": ">= 8"
}
},
"node_modules/cross-spawn/node_modules/isexe": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
"integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==",
"dev": true,
"license": "ISC"
},
"node_modules/cross-spawn/node_modules/which": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
"integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
"dev": true,
"license": "ISC",
"dependencies": {
"isexe": "^2.0.0"
},
"bin": {
"node-which": "bin/node-which"
},
"engines": {
"node": ">= 8"
}
},
"node_modules/css-select": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/css-select/-/css-select-5.1.0.tgz",
"integrity": "sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg==",
"dev": true,
"license": "BSD-2-Clause",
"dependencies": {
"boolbase": "^1.0.0",
"css-what": "^6.1.0",
"domhandler": "^5.0.2",
"domutils": "^3.0.1",
"nth-check": "^2.0.1"
},
"funding": {
"url": "https://github.com/sponsors/fb55"
}
},
"node_modules/css-what": {
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/css-what/-/css-what-6.1.0.tgz",
"integrity": "sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==",
"dev": true,
"license": "BSD-2-Clause",
"engines": {
"node": ">= 6"
},
"funding": {
"url": "https://github.com/sponsors/fb55"
}
},
"node_modules/debug": {
"version": "4.4.0",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz",
"integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==",
"dev": true,
"license": "MIT",
"dependencies": {
"ms": "^2.1.3"
},
"engines": {
"node": ">=6.0"
},
"peerDependenciesMeta": {
"supports-color": {
"optional": true
}
}
},
"node_modules/defaults": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/defaults/-/defaults-1.0.4.tgz",
"integrity": "sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A==",
"dev": true,
"license": "MIT",
"dependencies": {
"clone": "^1.0.2"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/detect-libc": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.3.tgz",
"integrity": "sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==",
"dev": true,
"license": "Apache-2.0",
"optional": true,
"engines": {
"node": ">=8"
}
},
"node_modules/dom-serializer": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz",
"integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==",
"dev": true,
"license": "MIT",
"dependencies": {
"domelementtype": "^2.3.0",
"domhandler": "^5.0.2",
"entities": "^4.2.0"
},
"funding": {
"url": "https://github.com/cheeriojs/dom-serializer?sponsor=1"
}
},
"node_modules/domelementtype": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz",
"integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==",
"dev": true,
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/fb55"
}
],
"license": "BSD-2-Clause"
},
"node_modules/domhandler": {
"version": "5.0.3",
"resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz",
"integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==",
"dev": true,
"license": "BSD-2-Clause",
"dependencies": {
"domelementtype": "^2.3.0"
},
"engines": {
"node": ">= 4"
},
"funding": {
"url": "https://github.com/fb55/domhandler?sponsor=1"
}
},
"node_modules/domutils": {
"version": "3.2.2",
"resolved": "https://registry.npmjs.org/domutils/-/domutils-3.2.2.tgz",
"integrity": "sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw==",
"dev": true,
"license": "BSD-2-Clause",
"dependencies": {
"dom-serializer": "^2.0.0",
"domelementtype": "^2.3.0",
"domhandler": "^5.0.3"
},
"funding": {
"url": "https://github.com/fb55/domutils?sponsor=1"
}
},
"node_modules/eastasianwidth": {
"version": "0.2.0",
"resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz",
"integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==",
"dev": true,
"license": "MIT"
},
"node_modules/electron-to-chromium": {
"version": "1.5.102",
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.102.tgz",
"integrity": "sha512-eHhqaja8tE/FNpIiBrvBjFV/SSKpyWHLvxuR9dPTdo+3V9ppdLmFB7ZZQ98qNovcngPLYIz0oOBF9P0FfZef5Q==",
"dev": true,
"license": "ISC"
},
"node_modules/emoji-regex": {
"version": "10.4.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.4.0.tgz",
"integrity": "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw==",
"dev": true,
"license": "MIT"
},
"node_modules/encoding": {
"version": "0.1.13",
"resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.13.tgz",
"integrity": "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==",
"dev": true,
"license": "MIT",
"optional": true,
"dependencies": {
"iconv-lite": "^0.6.2"
}
},
"node_modules/encoding/node_modules/iconv-lite": {
"version": "0.6.3",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz",
"integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==",
"dev": true,
"license": "MIT",
"optional": true,
"dependencies": {
"safer-buffer": ">= 2.1.2 < 3.0.0"
},
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/entities": {
"version": "4.5.0",
"resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz",
"integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==",
"dev": true,
"license": "BSD-2-Clause",
"engines": {
"node": ">=0.12"
},
"funding": {
"url": "https://github.com/fb55/entities?sponsor=1"
}
},
"node_modules/env-paths": {
"version": "2.2.1",
"resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz",
"integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6"
}
},
"node_modules/environment": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/environment/-/environment-1.1.0.tgz",
"integrity": "sha512-xUtoPkMggbz0MPyPiIWr1Kp4aeWJjDZ6SMvURhimjdZgsRuDplF5/s9hcgGhyXMhs+6vpnuoiZ2kFiu3FMnS8Q==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/err-code": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/err-code/-/err-code-2.0.3.tgz",
"integrity": "sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==",
"dev": true,
"license": "MIT"
},
"node_modules/esbuild": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.24.2.tgz",
"integrity": "sha512-+9egpBW8I3CD5XPe0n6BfT5fxLzxrlDzqydF3aviG+9ni1lDC/OvMHcxqEFV0+LANZG5R1bFMWfUrjVsdwxJvA==",
"dev": true,
"hasInstallScript": true,
"license": "MIT",
"bin": {
"esbuild": "bin/esbuild"
},
"engines": {
"node": ">=18"
},
"optionalDependencies": {
"@esbuild/aix-ppc64": "0.24.2",
"@esbuild/android-arm": "0.24.2",
"@esbuild/android-arm64": "0.24.2",
"@esbuild/android-x64": "0.24.2",
"@esbuild/darwin-arm64": "0.24.2",
"@esbuild/darwin-x64": "0.24.2",
"@esbuild/freebsd-arm64": "0.24.2",
"@esbuild/freebsd-x64": "0.24.2",
"@esbuild/linux-arm": "0.24.2",
"@esbuild/linux-arm64": "0.24.2",
"@esbuild/linux-ia32": "0.24.2",
"@esbuild/linux-loong64": "0.24.2",
"@esbuild/linux-mips64el": "0.24.2",
"@esbuild/linux-ppc64": "0.24.2",
"@esbuild/linux-riscv64": "0.24.2",
"@esbuild/linux-s390x": "0.24.2",
"@esbuild/linux-x64": "0.24.2",
"@esbuild/netbsd-arm64": "0.24.2",
"@esbuild/netbsd-x64": "0.24.2",
"@esbuild/openbsd-arm64": "0.24.2",
"@esbuild/openbsd-x64": "0.24.2",
"@esbuild/sunos-x64": "0.24.2",
"@esbuild/win32-arm64": "0.24.2",
"@esbuild/win32-ia32": "0.24.2",
"@esbuild/win32-x64": "0.24.2"
}
},
"node_modules/escalade": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz",
"integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6"
}
},
"node_modules/eventemitter3": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz",
"integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==",
"dev": true,
"license": "MIT"
},
"node_modules/exponential-backoff": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/exponential-backoff/-/exponential-backoff-3.1.2.tgz",
"integrity": "sha512-8QxYTVXUkuy7fIIoitQkPwGonB8F3Zj8eEO8Sqg9Zv/bkI7RJAzowee4gr81Hak/dUTpA2Z7VfQgoijjPNlUZA==",
"dev": true,
"license": "Apache-2.0"
},
"node_modules/external-editor": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz",
"integrity": "sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==",
"dev": true,
"license": "MIT",
"dependencies": {
"chardet": "^0.7.0",
"iconv-lite": "^0.4.24",
"tmp": "^0.0.33"
},
"engines": {
"node": ">=4"
}
},
"node_modules/fast-deep-equal": {
"version": "3.1.3",
"resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
"integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==",
"dev": true,
"license": "MIT"
},
"node_modules/fast-glob": {
"version": "3.3.3",
"resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz",
"integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@nodelib/fs.stat": "^2.0.2",
"@nodelib/fs.walk": "^1.2.3",
"glob-parent": "^5.1.2",
"merge2": "^1.3.0",
"micromatch": "^4.0.8"
},
"engines": {
"node": ">=8.6.0"
}
},
"node_modules/fast-uri": {
"version": "3.0.6",
"resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.0.6.tgz",
"integrity": "sha512-Atfo14OibSv5wAp4VWNsFYE1AchQRTv9cBGWET4pZWHzYshFSS9NQI6I57rdKn9croWVMbYFbLhJ+yJvmZIIHw==",
"dev": true,
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/fastify"
},
{
"type": "opencollective",
"url": "https://opencollective.com/fastify"
}
],
"license": "BSD-3-Clause"
},
"node_modules/fastq": {
"version": "1.19.0",
"resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.0.tgz",
"integrity": "sha512-7SFSRCNjBQIZH/xZR3iy5iQYR8aGBE0h3VG6/cwlbrpdciNYBMotQav8c1XI3HjHH+NikUpP53nPdlZSdWmFzA==",
"dev": true,
"license": "ISC",
"dependencies": {
"reusify": "^1.0.4"
}
},
"node_modules/fill-range": {
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
"integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
"dev": true,
"license": "MIT",
"dependencies": {
"to-regex-range": "^5.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/foreground-child": {
"version": "3.3.0",
"resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.0.tgz",
"integrity": "sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==",
"dev": true,
"license": "ISC",
"dependencies": {
"cross-spawn": "^7.0.0",
"signal-exit": "^4.0.1"
},
"engines": {
"node": ">=14"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/fs-minipass": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-3.0.3.tgz",
"integrity": "sha512-XUBA9XClHbnJWSfBzjkm6RvPsyg3sryZt06BEQoXcF7EK/xpGaQYJgQKDJSUH5SGZ76Y7pFx1QBnXz09rU5Fbw==",
"dev": true,
"license": "ISC",
"dependencies": {
"minipass": "^7.0.3"
},
"engines": {
"node": "^14.17.0 || ^16.13.0 || >=18.0.0"
}
},
"node_modules/fsevents": {
"version": "2.3.3",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
"integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==",
"dev": true,
"hasInstallScript": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
}
},
"node_modules/function-bind": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
"integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==",
"dev": true,
"license": "MIT",
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/gensync": {
"version": "1.0.0-beta.2",
"resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz",
"integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/get-caller-file": {
"version": "2.0.5",
"resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
"integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==",
"dev": true,
"license": "ISC",
"engines": {
"node": "6.* || 8.* || >= 10.*"
}
},
"node_modules/get-east-asian-width": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.3.0.tgz",
"integrity": "sha512-vpeMIQKxczTD/0s2CdEWHcb0eeJe6TFjxb+J5xgX7hScxqrGuyjmv4c1D4A/gelKfyox0gJJwIHF+fLjeaM8kQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/glob": {
"version": "10.4.5",
"resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz",
"integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==",
"dev": true,
"license": "ISC",
"dependencies": {
"foreground-child": "^3.1.0",
"jackspeak": "^3.1.2",
"minimatch": "^9.0.4",
"minipass": "^7.1.2",
"package-json-from-dist": "^1.0.0",
"path-scurry": "^1.11.1"
},
"bin": {
"glob": "dist/esm/bin.mjs"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/glob-parent": {
"version": "5.1.2",
"resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
"integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
"dev": true,
"license": "ISC",
"dependencies": {
"is-glob": "^4.0.1"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/glob-to-regexp": {
"version": "0.4.1",
"resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz",
"integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==",
"dev": true,
"license": "BSD-2-Clause"
},
"node_modules/globals": {
"version": "11.12.0",
"resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz",
"integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=4"
}
},
"node_modules/graceful-fs": {
"version": "4.2.11",
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz",
"integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==",
"dev": true,
"license": "ISC"
},
"node_modules/has-flag": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
"integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
}
},
"node_modules/hasown": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
"integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"function-bind": "^1.1.2"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/hosted-git-info": {
"version": "8.0.2",
"resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-8.0.2.tgz",
"integrity": "sha512-sYKnA7eGln5ov8T8gnYlkSOxFJvywzEx9BueN6xo/GKO8PGiI6uK6xx+DIGe45T3bdVjLAQDQW1aicT8z8JwQg==",
"dev": true,
"license": "ISC",
"dependencies": {
"lru-cache": "^10.0.1"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/hosted-git-info/node_modules/lru-cache": {
"version": "10.4.3",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz",
"integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==",
"dev": true,
"license": "ISC"
},
"node_modules/htmlparser2": {
"version": "9.1.0",
"resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-9.1.0.tgz",
"integrity": "sha512-5zfg6mHUoaer/97TxnGpxmbR7zJtPwIYFMZ/H5ucTlPZhKvtum05yiPK3Mgai3a0DyVxv7qYqoweaEd2nrYQzQ==",
"dev": true,
"funding": [
"https://github.com/fb55/htmlparser2?sponsor=1",
{
"type": "github",
"url": "https://github.com/sponsors/fb55"
}
],
"license": "MIT",
"dependencies": {
"domelementtype": "^2.3.0",
"domhandler": "^5.0.3",
"domutils": "^3.1.0",
"entities": "^4.5.0"
}
},
"node_modules/http-cache-semantics": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz",
"integrity": "sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==",
"dev": true,
"license": "BSD-2-Clause"
},
"node_modules/http-proxy-agent": {
"version": "7.0.2",
"resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz",
"integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==",
"dev": true,
"license": "MIT",
"dependencies": {
"agent-base": "^7.1.0",
"debug": "^4.3.4"
},
"engines": {
"node": ">= 14"
}
},
"node_modules/https-proxy-agent": {
"version": "7.0.6",
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz",
"integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==",
"dev": true,
"license": "MIT",
"dependencies": {
"agent-base": "^7.1.2",
"debug": "4"
},
"engines": {
"node": ">= 14"
}
},
"node_modules/iconv-lite": {
"version": "0.4.24",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
"integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==",
"dev": true,
"license": "MIT",
"dependencies": {
"safer-buffer": ">= 2.1.2 < 3"
},
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/ieee754": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz",
"integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==",
"dev": true,
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
],
"license": "BSD-3-Clause"
},
"node_modules/ignore-walk": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-7.0.0.tgz",
"integrity": "sha512-T4gbf83A4NH95zvhVYZc+qWocBBGlpzUXLPGurJggw/WIOwicfXJChLDP/iBZnN5WqROSu5Bm3hhle4z8a8YGQ==",
"dev": true,
"license": "ISC",
"dependencies": {
"minimatch": "^9.0.0"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/immutable": {
"version": "5.0.3",
"resolved": "https://registry.npmjs.org/immutable/-/immutable-5.0.3.tgz",
"integrity": "sha512-P8IdPQHq3lA1xVeBRi5VPqUm5HDgKnx0Ru51wZz5mjxHr5n3RWhjIpOFU7ybkUxfB+5IToy+OLaHYDBIWsv+uw==",
"dev": true,
"license": "MIT"
},
"node_modules/imurmurhash": {
"version": "0.1.4",
"resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz",
"integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.8.19"
}
},
"node_modules/inherits": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
"dev": true,
"license": "ISC"
},
"node_modules/ini": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/ini/-/ini-5.0.0.tgz",
"integrity": "sha512-+N0ngpO3e7cRUWOJAS7qw0IZIVc6XPrW4MlFBdD066F2L4k1L6ker3hLqSq7iXxU5tgS4WGkIUElWn5vogAEnw==",
"dev": true,
"license": "ISC",
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/ip-address": {
"version": "9.0.5",
"resolved": "https://registry.npmjs.org/ip-address/-/ip-address-9.0.5.tgz",
"integrity": "sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g==",
"dev": true,
"license": "MIT",
"dependencies": {
"jsbn": "1.1.0",
"sprintf-js": "^1.1.3"
},
"engines": {
"node": ">= 12"
}
},
"node_modules/is-core-module": {
"version": "2.16.1",
"resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz",
"integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==",
"dev": true,
"license": "MIT",
"dependencies": {
"hasown": "^2.0.2"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/is-extglob": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
"integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/is-fullwidth-code-point": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-4.0.0.tgz",
"integrity": "sha512-O4L094N2/dZ7xqVdrXhh9r1KODPJpFms8B5sGdJLPy664AgvXsreZUyCQQNItZRDlYug4xStLjNp/sz3HvBowQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/is-glob": {
"version": "4.0.3",
"resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
"integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
"dev": true,
"license": "MIT",
"dependencies": {
"is-extglob": "^2.1.1"
},
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/is-interactive": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/is-interactive/-/is-interactive-1.0.0.tgz",
"integrity": "sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
}
},
"node_modules/is-number": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
"integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.12.0"
}
},
"node_modules/is-unicode-supported": {
"version": "0.1.0",
"resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz",
"integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/isexe": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz",
"integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==",
"dev": true,
"license": "ISC",
"engines": {
"node": ">=16"
}
},
"node_modules/istanbul-lib-coverage": {
"version": "3.2.2",
"resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz",
"integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==",
"dev": true,
"license": "BSD-3-Clause",
"engines": {
"node": ">=8"
}
},
"node_modules/istanbul-lib-instrument": {
"version": "6.0.3",
"resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-6.0.3.tgz",
"integrity": "sha512-Vtgk7L/R2JHyyGW07spoFlB8/lpjiOLTjMdms6AFMraYt3BaJauod/NGrfnVG/y4Ix1JEuMRPDPEj2ua+zz1/Q==",
"dev": true,
"license": "BSD-3-Clause",
"dependencies": {
"@babel/core": "^7.23.9",
"@babel/parser": "^7.23.9",
"@istanbuljs/schema": "^0.1.3",
"istanbul-lib-coverage": "^3.2.0",
"semver": "^7.5.4"
},
"engines": {
"node": ">=10"
}
},
"node_modules/jackspeak": {
"version": "3.4.3",
"resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz",
"integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==",
"dev": true,
"license": "BlueOak-1.0.0",
"dependencies": {
"@isaacs/cliui": "^8.0.2"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
},
"optionalDependencies": {
"@pkgjs/parseargs": "^0.11.0"
}
},
"node_modules/js-tokens": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
"integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==",
"dev": true,
"license": "MIT"
},
"node_modules/jsbn": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/jsbn/-/jsbn-1.1.0.tgz",
"integrity": "sha512-4bYVV3aAMtDTTu4+xsDYa6sy9GyJ69/amsu9sYF2zqjiEoZA5xJi3BrfX3uY+/IekIu7MwdObdbDWpoZdBv3/A==",
"dev": true,
"license": "MIT"
},
"node_modules/jsesc": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz",
"integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==",
"dev": true,
"license": "MIT",
"bin": {
"jsesc": "bin/jsesc"
},
"engines": {
"node": ">=6"
}
},
"node_modules/json-parse-even-better-errors": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-4.0.0.tgz",
"integrity": "sha512-lR4MXjGNgkJc7tkQ97kb2nuEMnNCyU//XYVH0MKTGcXEiSudQ5MKGKen3C5QubYy0vmq+JGitUg92uuywGEwIA==",
"dev": true,
"license": "MIT",
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/json-schema-traverse": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz",
"integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==",
"dev": true,
"license": "MIT"
},
"node_modules/json5": {
"version": "2.2.3",
"resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz",
"integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==",
"dev": true,
"license": "MIT",
"bin": {
"json5": "lib/cli.js"
},
"engines": {
"node": ">=6"
}
},
"node_modules/jsonc-parser": {
"version": "3.3.1",
"resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-3.3.1.tgz",
"integrity": "sha512-HUgH65KyejrUFPvHFPbqOY0rsFip3Bo5wb4ngvdi1EpCYWUQDC5V+Y7mZws+DLkr4M//zQJoanu1SP+87Dv1oQ==",
"dev": true,
"license": "MIT"
},
"node_modules/jsonparse": {
"version": "1.3.1",
"resolved": "https://registry.npmjs.org/jsonparse/-/jsonparse-1.3.1.tgz",
"integrity": "sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg==",
"dev": true,
"engines": [
"node >= 0.2.0"
],
"license": "MIT"
},
"node_modules/listr2": {
"version": "8.2.5",
"resolved": "https://registry.npmjs.org/listr2/-/listr2-8.2.5.tgz",
"integrity": "sha512-iyAZCeyD+c1gPyE9qpFu8af0Y+MRtmKOncdGoA2S5EY8iFq99dmmvkNnHiWo+pj0s7yH7l3KPIgee77tKpXPWQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"cli-truncate": "^4.0.0",
"colorette": "^2.0.20",
"eventemitter3": "^5.0.1",
"log-update": "^6.1.0",
"rfdc": "^1.4.1",
"wrap-ansi": "^9.0.0"
},
"engines": {
"node": ">=18.0.0"
}
},
"node_modules/listr2/node_modules/ansi-styles": {
"version": "6.2.1",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz",
"integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
}
},
"node_modules/listr2/node_modules/wrap-ansi": {
"version": "9.0.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.0.tgz",
"integrity": "sha512-G8ura3S+3Z2G+mkgNRq8dqaFZAuxfsxpBB8OCTGRTCtp+l/v9nbFNmCUP1BZMts3G1142MsZfn6eeUKrr4PD1Q==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-styles": "^6.2.1",
"string-width": "^7.0.0",
"strip-ansi": "^7.1.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
}
},
"node_modules/lmdb": {
"version": "3.2.2",
"resolved": "https://registry.npmjs.org/lmdb/-/lmdb-3.2.2.tgz",
"integrity": "sha512-LriG93la4PbmPMwI7Hbv8W+0ncLK7549w4sbZSi4QGDjnnxnmNMgxUkaQTEMzH8TpwsfFvgEjpLX7V8B/I9e3g==",
"dev": true,
"hasInstallScript": true,
"license": "MIT",
"optional": true,
"dependencies": {
"msgpackr": "^1.11.2",
"node-addon-api": "^6.1.0",
"node-gyp-build-optional-packages": "5.2.2",
"ordered-binary": "^1.5.3",
"weak-lru-cache": "^1.2.2"
},
"bin": {
"download-lmdb-prebuilds": "bin/download-prebuilds.js"
},
"optionalDependencies": {
"@lmdb/lmdb-darwin-arm64": "3.2.2",
"@lmdb/lmdb-darwin-x64": "3.2.2",
"@lmdb/lmdb-linux-arm": "3.2.2",
"@lmdb/lmdb-linux-arm64": "3.2.2",
"@lmdb/lmdb-linux-x64": "3.2.2",
"@lmdb/lmdb-win32-x64": "3.2.2"
}
},
"node_modules/log-symbols": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz",
"integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==",
"dev": true,
"license": "MIT",
"dependencies": {
"chalk": "^4.1.0",
"is-unicode-supported": "^0.1.0"
},
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/log-update": {
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/log-update/-/log-update-6.1.0.tgz",
"integrity": "sha512-9ie8ItPR6tjY5uYJh8K/Zrv/RMZ5VOlOWvtZdEHYSTFKZfIBPQa9tOAEeAWhd+AnIneLJ22w5fjOYtoutpWq5w==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-escapes": "^7.0.0",
"cli-cursor": "^5.0.0",
"slice-ansi": "^7.1.0",
"strip-ansi": "^7.1.0",
"wrap-ansi": "^9.0.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/log-update/node_modules/ansi-escapes": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-7.0.0.tgz",
"integrity": "sha512-GdYO7a61mR0fOlAsvC9/rIHf7L96sBc6dEWzeOu+KAea5bZyQRPIpojrVoI4AXGJS/ycu/fBTdLrUkA4ODrvjw==",
"dev": true,
"license": "MIT",
"dependencies": {
"environment": "^1.0.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/log-update/node_modules/ansi-styles": {
"version": "6.2.1",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz",
"integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
}
},
"node_modules/log-update/node_modules/is-fullwidth-code-point": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-5.0.0.tgz",
"integrity": "sha512-OVa3u9kkBbw7b8Xw5F9P+D/T9X+Z4+JruYVNapTjPYZYUznQ5YfWeFkOj606XYYW8yugTfC8Pj0hYqvi4ryAhA==",
"dev": true,
"license": "MIT",
"dependencies": {
"get-east-asian-width": "^1.0.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/log-update/node_modules/slice-ansi": {
"version": "7.1.0",
"resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-7.1.0.tgz",
"integrity": "sha512-bSiSngZ/jWeX93BqeIAbImyTbEihizcwNjFoRUIY/T1wWQsfsm2Vw1agPKylXvQTU7iASGdHhyqRlqQzfz+Htg==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-styles": "^6.2.1",
"is-fullwidth-code-point": "^5.0.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/chalk/slice-ansi?sponsor=1"
}
},
"node_modules/log-update/node_modules/wrap-ansi": {
"version": "9.0.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.0.tgz",
"integrity": "sha512-G8ura3S+3Z2G+mkgNRq8dqaFZAuxfsxpBB8OCTGRTCtp+l/v9nbFNmCUP1BZMts3G1142MsZfn6eeUKrr4PD1Q==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-styles": "^6.2.1",
"string-width": "^7.0.0",
"strip-ansi": "^7.1.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
}
},
"node_modules/lru-cache": {
"version": "5.1.1",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz",
"integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==",
"dev": true,
"license": "ISC",
"dependencies": {
"yallist": "^3.0.2"
}
},
"node_modules/magic-string": {
"version": "0.30.17",
"resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.17.tgz",
"integrity": "sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@jridgewell/sourcemap-codec": "^1.5.0"
}
},
"node_modules/make-fetch-happen": {
"version": "14.0.3",
"resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-14.0.3.tgz",
"integrity": "sha512-QMjGbFTP0blj97EeidG5hk/QhKQ3T4ICckQGLgz38QF7Vgbk6e6FTARN8KhKxyBbWn8R0HU+bnw8aSoFPD4qtQ==",
"dev": true,
"license": "ISC",
"dependencies": {
"@npmcli/agent": "^3.0.0",
"cacache": "^19.0.1",
"http-cache-semantics": "^4.1.1",
"minipass": "^7.0.2",
"minipass-fetch": "^4.0.0",
"minipass-flush": "^1.0.5",
"minipass-pipeline": "^1.2.4",
"negotiator": "^1.0.0",
"proc-log": "^5.0.0",
"promise-retry": "^2.0.1",
"ssri": "^12.0.0"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/merge2": {
"version": "1.4.1",
"resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz",
"integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 8"
}
},
"node_modules/micromatch": {
"version": "4.0.8",
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz",
"integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==",
"dev": true,
"license": "MIT",
"dependencies": {
"braces": "^3.0.3",
"picomatch": "^2.3.1"
},
"engines": {
"node": ">=8.6"
}
},
"node_modules/micromatch/node_modules/picomatch": {
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
"integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8.6"
},
"funding": {
"url": "https://github.com/sponsors/jonschlinkert"
}
},
"node_modules/mimic-fn": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz",
"integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6"
}
},
"node_modules/mimic-function": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/mimic-function/-/mimic-function-5.0.1.tgz",
"integrity": "sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/minimatch": {
"version": "9.0.5",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
"integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
"dev": true,
"license": "ISC",
"dependencies": {
"brace-expansion": "^2.0.1"
},
"engines": {
"node": ">=16 || 14 >=14.17"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/minipass": {
"version": "7.1.2",
"resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz",
"integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==",
"dev": true,
"license": "ISC",
"engines": {
"node": ">=16 || 14 >=14.17"
}
},
"node_modules/minipass-collect": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/minipass-collect/-/minipass-collect-2.0.1.tgz",
"integrity": "sha512-D7V8PO9oaz7PWGLbCACuI1qEOsq7UKfLotx/C0Aet43fCUB/wfQ7DYeq2oR/svFJGYDHPr38SHATeaj/ZoKHKw==",
"dev": true,
"license": "ISC",
"dependencies": {
"minipass": "^7.0.3"
},
"engines": {
"node": ">=16 || 14 >=14.17"
}
},
"node_modules/minipass-fetch": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-4.0.0.tgz",
"integrity": "sha512-2v6aXUXwLP1Epd/gc32HAMIWoczx+fZwEPRHm/VwtrJzRGwR1qGZXEYV3Zp8ZjjbwaZhMrM6uHV4KVkk+XCc2w==",
"dev": true,
"license": "MIT",
"dependencies": {
"minipass": "^7.0.3",
"minipass-sized": "^1.0.3",
"minizlib": "^3.0.1"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
},
"optionalDependencies": {
"encoding": "^0.1.13"
}
},
"node_modules/minipass-flush": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/minipass-flush/-/minipass-flush-1.0.5.tgz",
"integrity": "sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==",
"dev": true,
"license": "ISC",
"dependencies": {
"minipass": "^3.0.0"
},
"engines": {
"node": ">= 8"
}
},
"node_modules/minipass-flush/node_modules/minipass": {
"version": "3.3.6",
"resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
"integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
"dev": true,
"license": "ISC",
"dependencies": {
"yallist": "^4.0.0"
},
"engines": {
"node": ">=8"
}
},
"node_modules/minipass-flush/node_modules/yallist": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==",
"dev": true,
"license": "ISC"
},
"node_modules/minipass-pipeline": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/minipass-pipeline/-/minipass-pipeline-1.2.4.tgz",
"integrity": "sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A==",
"dev": true,
"license": "ISC",
"dependencies": {
"minipass": "^3.0.0"
},
"engines": {
"node": ">=8"
}
},
"node_modules/minipass-pipeline/node_modules/minipass": {
"version": "3.3.6",
"resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
"integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
"dev": true,
"license": "ISC",
"dependencies": {
"yallist": "^4.0.0"
},
"engines": {
"node": ">=8"
}
},
"node_modules/minipass-pipeline/node_modules/yallist": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==",
"dev": true,
"license": "ISC"
},
"node_modules/minipass-sized": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/minipass-sized/-/minipass-sized-1.0.3.tgz",
"integrity": "sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g==",
"dev": true,
"license": "ISC",
"dependencies": {
"minipass": "^3.0.0"
},
"engines": {
"node": ">=8"
}
},
"node_modules/minipass-sized/node_modules/minipass": {
"version": "3.3.6",
"resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
"integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
"dev": true,
"license": "ISC",
"dependencies": {
"yallist": "^4.0.0"
},
"engines": {
"node": ">=8"
}
},
"node_modules/minipass-sized/node_modules/yallist": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==",
"dev": true,
"license": "ISC"
},
"node_modules/minizlib": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.1.tgz",
"integrity": "sha512-umcy022ILvb5/3Djuu8LWeqUa8D68JaBzlttKeMWen48SjabqS3iY5w/vzeMzMUNhLDifyhbOwKDSznB1vvrwg==",
"dev": true,
"license": "MIT",
"dependencies": {
"minipass": "^7.0.4",
"rimraf": "^5.0.5"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/mkdirp": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz",
"integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==",
"dev": true,
"license": "MIT",
"bin": {
"mkdirp": "bin/cmd.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/mrmime": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/mrmime/-/mrmime-2.0.0.tgz",
"integrity": "sha512-eu38+hdgojoyq63s+yTpN4XMBdt5l8HhMhc4VKLO9KM5caLIBvUm4thi7fFaxyTmCKeNnXZ5pAlBwCUnhA09uw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=10"
}
},
"node_modules/ms": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
"dev": true,
"license": "MIT"
},
"node_modules/msgpackr": {
"version": "1.11.2",
"resolved": "https://registry.npmjs.org/msgpackr/-/msgpackr-1.11.2.tgz",
"integrity": "sha512-F9UngXRlPyWCDEASDpTf6c9uNhGPTqnTeLVt7bN+bU1eajoR/8V9ys2BRaV5C/e5ihE6sJ9uPIKaYt6bFuO32g==",
"dev": true,
"license": "MIT",
"optional": true,
"optionalDependencies": {
"msgpackr-extract": "^3.0.2"
}
},
"node_modules/msgpackr-extract": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/msgpackr-extract/-/msgpackr-extract-3.0.3.tgz",
"integrity": "sha512-P0efT1C9jIdVRefqjzOQ9Xml57zpOXnIuS+csaB4MdZbTdmGDLo8XhzBG1N7aO11gKDDkJvBLULeFTo46wwreA==",
"dev": true,
"hasInstallScript": true,
"license": "MIT",
"optional": true,
"dependencies": {
"node-gyp-build-optional-packages": "5.2.2"
},
"bin": {
"download-msgpackr-prebuilds": "bin/download-prebuilds.js"
},
"optionalDependencies": {
"@msgpackr-extract/msgpackr-extract-darwin-arm64": "3.0.3",
"@msgpackr-extract/msgpackr-extract-darwin-x64": "3.0.3",
"@msgpackr-extract/msgpackr-extract-linux-arm": "3.0.3",
"@msgpackr-extract/msgpackr-extract-linux-arm64": "3.0.3",
"@msgpackr-extract/msgpackr-extract-linux-x64": "3.0.3",
"@msgpackr-extract/msgpackr-extract-win32-x64": "3.0.3"
}
},
"node_modules/mute-stream": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-2.0.0.tgz",
"integrity": "sha512-WWdIxpyjEn+FhQJQQv9aQAYlHoNVdzIzUySNV1gHUPDSdZJ3yZn7pAAbQcV7B56Mvu881q9FZV+0Vx2xC44VWA==",
"dev": true,
"license": "ISC",
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/nanoid": {
"version": "3.3.8",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.8.tgz",
"integrity": "sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==",
"dev": true,
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
],
"license": "MIT",
"bin": {
"nanoid": "bin/nanoid.cjs"
},
"engines": {
"node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1"
}
},
"node_modules/negotiator": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz",
"integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/node-addon-api": {
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-6.1.0.tgz",
"integrity": "sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA==",
"dev": true,
"license": "MIT",
"optional": true
},
"node_modules/node-gyp": {
"version": "11.1.0",
"resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-11.1.0.tgz",
"integrity": "sha512-/+7TuHKnBpnMvUQnsYEb0JOozDZqarQbfNuSGLXIjhStMT0fbw7IdSqWgopOP5xhRZE+lsbIvAHcekddruPZgQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"env-paths": "^2.2.0",
"exponential-backoff": "^3.1.1",
"glob": "^10.3.10",
"graceful-fs": "^4.2.6",
"make-fetch-happen": "^14.0.3",
"nopt": "^8.0.0",
"proc-log": "^5.0.0",
"semver": "^7.3.5",
"tar": "^7.4.3",
"which": "^5.0.0"
},
"bin": {
"node-gyp": "bin/node-gyp.js"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/node-gyp-build-optional-packages": {
"version": "5.2.2",
"resolved": "https://registry.npmjs.org/node-gyp-build-optional-packages/-/node-gyp-build-optional-packages-5.2.2.tgz",
"integrity": "sha512-s+w+rBWnpTMwSFbaE0UXsRlg7hU4FjekKU4eyAih5T8nJuNZT1nNsskXpxmeqSK9UzkBl6UgRlnKc8hz8IEqOw==",
"dev": true,
"license": "MIT",
"optional": true,
"dependencies": {
"detect-libc": "^2.0.1"
},
"bin": {
"node-gyp-build-optional-packages": "bin.js",
"node-gyp-build-optional-packages-optional": "optional.js",
"node-gyp-build-optional-packages-test": "build-test.js"
}
},
"node_modules/node-gyp/node_modules/chownr": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz",
"integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==",
"dev": true,
"license": "BlueOak-1.0.0",
"engines": {
"node": ">=18"
}
},
"node_modules/node-gyp/node_modules/mkdirp": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz",
"integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==",
"dev": true,
"license": "MIT",
"bin": {
"mkdirp": "dist/cjs/src/bin.js"
},
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/node-gyp/node_modules/tar": {
"version": "7.4.3",
"resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz",
"integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==",
"dev": true,
"license": "ISC",
"dependencies": {
"@isaacs/fs-minipass": "^4.0.0",
"chownr": "^3.0.0",
"minipass": "^7.1.2",
"minizlib": "^3.0.1",
"mkdirp": "^3.0.1",
"yallist": "^5.0.0"
},
"engines": {
"node": ">=18"
}
},
"node_modules/node-gyp/node_modules/yallist": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz",
"integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==",
"dev": true,
"license": "BlueOak-1.0.0",
"engines": {
"node": ">=18"
}
},
"node_modules/node-releases": {
"version": "2.0.19",
"resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.19.tgz",
"integrity": "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==",
"dev": true,
"license": "MIT"
},
"node_modules/nopt": {
"version": "8.1.0",
"resolved": "https://registry.npmjs.org/nopt/-/nopt-8.1.0.tgz",
"integrity": "sha512-ieGu42u/Qsa4TFktmaKEwM6MQH0pOWnaB3htzh0JRtx84+Mebc0cbZYN5bC+6WTZ4+77xrL9Pn5m7CV6VIkV7A==",
"dev": true,
"license": "ISC",
"dependencies": {
"abbrev": "^3.0.0"
},
"bin": {
"nopt": "bin/nopt.js"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/npm-bundled": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-4.0.0.tgz",
"integrity": "sha512-IxaQZDMsqfQ2Lz37VvyyEtKLe8FsRZuysmedy/N06TU1RyVppYKXrO4xIhR0F+7ubIBox6Q7nir6fQI3ej39iA==",
"dev": true,
"license": "ISC",
"dependencies": {
"npm-normalize-package-bin": "^4.0.0"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/npm-install-checks": {
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-7.1.1.tgz",
"integrity": "sha512-u6DCwbow5ynAX5BdiHQ9qvexme4U3qHW3MWe5NqH+NeBm0LbiH6zvGjNNew1fY+AZZUtVHbOPF3j7mJxbUzpXg==",
"dev": true,
"license": "BSD-2-Clause",
"dependencies": {
"semver": "^7.1.1"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/npm-normalize-package-bin": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-4.0.0.tgz",
"integrity": "sha512-TZKxPvItzai9kN9H/TkmCtx/ZN/hvr3vUycjlfmH0ootY9yFBzNOpiXAdIn1Iteqsvk4lQn6B5PTrt+n6h8k/w==",
"dev": true,
"license": "ISC",
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/npm-package-arg": {
"version": "12.0.1",
"resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-12.0.1.tgz",
"integrity": "sha512-aDxjFfPV3Liw0WOBWlyZLMBqtbgbg03rmGvHDJa2Ttv7tIz+1oB5qWec4psCDFZcZi9b5XdGkPdQiJxOPzvQRQ==",
"dev": true,
"license": "ISC",
"dependencies": {
"hosted-git-info": "^8.0.0",
"proc-log": "^5.0.0",
"semver": "^7.3.5",
"validate-npm-package-name": "^6.0.0"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/npm-packlist": {
"version": "9.0.0",
"resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-9.0.0.tgz",
"integrity": "sha512-8qSayfmHJQTx3nJWYbbUmflpyarbLMBc6LCAjYsiGtXxDB68HaZpb8re6zeaLGxZzDuMdhsg70jryJe+RrItVQ==",
"dev": true,
"license": "ISC",
"dependencies": {
"ignore-walk": "^7.0.0"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/npm-pick-manifest": {
"version": "10.0.0",
"resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-10.0.0.tgz",
"integrity": "sha512-r4fFa4FqYY8xaM7fHecQ9Z2nE9hgNfJR+EmoKv0+chvzWkBcORX3r0FpTByP+CbOVJDladMXnPQGVN8PBLGuTQ==",
"dev": true,
"license": "ISC",
"dependencies": {
"npm-install-checks": "^7.1.0",
"npm-normalize-package-bin": "^4.0.0",
"npm-package-arg": "^12.0.0",
"semver": "^7.3.5"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/npm-registry-fetch": {
"version": "18.0.2",
"resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-18.0.2.tgz",
"integrity": "sha512-LeVMZBBVy+oQb5R6FDV9OlJCcWDU+al10oKpe+nsvcHnG24Z3uM3SvJYKfGJlfGjVU8v9liejCrUR/M5HO5NEQ==",
"dev": true,
"license": "ISC",
"dependencies": {
"@npmcli/redact": "^3.0.0",
"jsonparse": "^1.3.1",
"make-fetch-happen": "^14.0.0",
"minipass": "^7.0.2",
"minipass-fetch": "^4.0.0",
"minizlib": "^3.0.1",
"npm-package-arg": "^12.0.0",
"proc-log": "^5.0.0"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/nth-check": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz",
"integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==",
"dev": true,
"license": "BSD-2-Clause",
"dependencies": {
"boolbase": "^1.0.0"
},
"funding": {
"url": "https://github.com/fb55/nth-check?sponsor=1"
}
},
"node_modules/onetime": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/onetime/-/onetime-7.0.0.tgz",
"integrity": "sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"mimic-function": "^5.0.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/ora": {
"version": "5.4.1",
"resolved": "https://registry.npmjs.org/ora/-/ora-5.4.1.tgz",
"integrity": "sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"bl": "^4.1.0",
"chalk": "^4.1.0",
"cli-cursor": "^3.1.0",
"cli-spinners": "^2.5.0",
"is-interactive": "^1.0.0",
"is-unicode-supported": "^0.1.0",
"log-symbols": "^4.1.0",
"strip-ansi": "^6.0.0",
"wcwidth": "^1.0.1"
},
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/ora/node_modules/ansi-regex": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
}
},
"node_modules/ora/node_modules/cli-cursor": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz",
"integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==",
"dev": true,
"license": "MIT",
"dependencies": {
"restore-cursor": "^3.1.0"
},
"engines": {
"node": ">=8"
}
},
"node_modules/ora/node_modules/onetime": {
"version": "5.1.2",
"resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz",
"integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==",
"dev": true,
"license": "MIT",
"dependencies": {
"mimic-fn": "^2.1.0"
},
"engines": {
"node": ">=6"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/ora/node_modules/restore-cursor": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz",
"integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==",
"dev": true,
"license": "MIT",
"dependencies": {
"onetime": "^5.1.0",
"signal-exit": "^3.0.2"
},
"engines": {
"node": ">=8"
}
},
"node_modules/ora/node_modules/signal-exit": {
"version": "3.0.7",
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz",
"integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==",
"dev": true,
"license": "ISC"
},
"node_modules/ora/node_modules/strip-ansi": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-regex": "^5.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/ordered-binary": {
"version": "1.5.3",
"resolved": "https://registry.npmjs.org/ordered-binary/-/ordered-binary-1.5.3.tgz",
"integrity": "sha512-oGFr3T+pYdTGJ+YFEILMpS3es+GiIbs9h/XQrclBXUtd44ey7XwfsMzM31f64I1SQOawDoDr/D823kNCADI8TA==",
"dev": true,
"license": "MIT",
"optional": true
},
"node_modules/os-tmpdir": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz",
"integrity": "sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/p-map": {
"version": "7.0.3",
"resolved": "https://registry.npmjs.org/p-map/-/p-map-7.0.3.tgz",
"integrity": "sha512-VkndIv2fIB99swvQoA65bm+fsmt6UNdGeIB0oxBs+WhAhdh08QA04JXpI7rbB9r08/nkbysKoya9rtDERYOYMA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/package-json-from-dist": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz",
"integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==",
"dev": true,
"license": "BlueOak-1.0.0"
},
"node_modules/pacote": {
"version": "20.0.0",
"resolved": "https://registry.npmjs.org/pacote/-/pacote-20.0.0.tgz",
"integrity": "sha512-pRjC5UFwZCgx9kUFDVM9YEahv4guZ1nSLqwmWiLUnDbGsjs+U5w7z6Uc8HNR1a6x8qnu5y9xtGE6D1uAuYz+0A==",
"dev": true,
"license": "ISC",
"dependencies": {
"@npmcli/git": "^6.0.0",
"@npmcli/installed-package-contents": "^3.0.0",
"@npmcli/package-json": "^6.0.0",
"@npmcli/promise-spawn": "^8.0.0",
"@npmcli/run-script": "^9.0.0",
"cacache": "^19.0.0",
"fs-minipass": "^3.0.0",
"minipass": "^7.0.2",
"npm-package-arg": "^12.0.0",
"npm-packlist": "^9.0.0",
"npm-pick-manifest": "^10.0.0",
"npm-registry-fetch": "^18.0.0",
"proc-log": "^5.0.0",
"promise-retry": "^2.0.1",
"sigstore": "^3.0.0",
"ssri": "^12.0.0",
"tar": "^6.1.11"
},
"bin": {
"pacote": "bin/index.js"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/parse5": {
"version": "7.2.1",
"resolved": "https://registry.npmjs.org/parse5/-/parse5-7.2.1.tgz",
"integrity": "sha512-BuBYQYlv1ckiPdQi/ohiivi9Sagc9JG+Ozs0r7b/0iK3sKmrb0b9FdWdBbOdx6hBCM/F9Ir82ofnBhtZOjCRPQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"entities": "^4.5.0"
},
"funding": {
"url": "https://github.com/inikulin/parse5?sponsor=1"
}
},
"node_modules/parse5-html-rewriting-stream": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/parse5-html-rewriting-stream/-/parse5-html-rewriting-stream-7.0.0.tgz",
"integrity": "sha512-mazCyGWkmCRWDI15Zp+UiCqMp/0dgEmkZRvhlsqqKYr4SsVm/TvnSpD9fCvqCA2zoWJcfRym846ejWBBHRiYEg==",
"dev": true,
"license": "MIT",
"dependencies": {
"entities": "^4.3.0",
"parse5": "^7.0.0",
"parse5-sax-parser": "^7.0.0"
},
"funding": {
"url": "https://github.com/inikulin/parse5?sponsor=1"
}
},
"node_modules/parse5-sax-parser": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/parse5-sax-parser/-/parse5-sax-parser-7.0.0.tgz",
"integrity": "sha512-5A+v2SNsq8T6/mG3ahcz8ZtQ0OUFTatxPbeidoMB7tkJSGDY3tdfl4MHovtLQHkEn5CGxijNWRQHhRQ6IRpXKg==",
"dev": true,
"license": "MIT",
"dependencies": {
"parse5": "^7.0.0"
},
"funding": {
"url": "https://github.com/inikulin/parse5?sponsor=1"
}
},
"node_modules/path-key": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz",
"integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
}
},
"node_modules/path-parse": {
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz",
"integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==",
"dev": true,
"license": "MIT"
},
"node_modules/path-scurry": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz",
"integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==",
"dev": true,
"license": "BlueOak-1.0.0",
"dependencies": {
"lru-cache": "^10.2.0",
"minipass": "^5.0.0 || ^6.0.2 || ^7.0.0"
},
"engines": {
"node": ">=16 || 14 >=14.18"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/path-scurry/node_modules/lru-cache": {
"version": "10.4.3",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz",
"integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==",
"dev": true,
"license": "ISC"
},
"node_modules/picocolors": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
"integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==",
"dev": true,
"license": "ISC"
},
"node_modules/picomatch": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz",
"integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/sponsors/jonschlinkert"
}
},
"node_modules/piscina": {
"version": "4.8.0",
"resolved": "https://registry.npmjs.org/piscina/-/piscina-4.8.0.tgz",
"integrity": "sha512-EZJb+ZxDrQf3dihsUL7p42pjNyrNIFJCrRHPMgxu/svsj+P3xS3fuEWp7k2+rfsavfl1N0G29b1HGs7J0m8rZA==",
"dev": true,
"license": "MIT",
"optionalDependencies": {
"@napi-rs/nice": "^1.0.1"
}
},
"node_modules/postcss": {
"version": "8.5.3",
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.3.tgz",
"integrity": "sha512-dle9A3yYxlBSrt8Fu+IpjGT8SY8hN0mlaA6GY8t0P5PjIOZemULz/E2Bnm/2dcUOena75OTNkHI76uZBNUUq3A==",
"dev": true,
"funding": [
{
"type": "opencollective",
"url": "https://opencollective.com/postcss/"
},
{
"type": "tidelift",
"url": "https://tidelift.com/funding/github/npm/postcss"
},
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
],
"license": "MIT",
"dependencies": {
"nanoid": "^3.3.8",
"picocolors": "^1.1.1",
"source-map-js": "^1.2.1"
},
"engines": {
"node": "^10 || ^12 || >=14"
}
},
"node_modules/postcss-media-query-parser": {
"version": "0.2.3",
"resolved": "https://registry.npmjs.org/postcss-media-query-parser/-/postcss-media-query-parser-0.2.3.tgz",
"integrity": "sha512-3sOlxmbKcSHMjlUXQZKQ06jOswE7oVkXPxmZdoB1r5l0q6gTFTQSHxNxOrCccElbW7dxNytifNEo8qidX2Vsig==",
"dev": true,
"license": "MIT"
},
"node_modules/proc-log": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/proc-log/-/proc-log-5.0.0.tgz",
"integrity": "sha512-Azwzvl90HaF0aCz1JrDdXQykFakSSNPaPoiZ9fm5qJIMHioDZEi7OAdRwSm6rSoPtY3Qutnm3L7ogmg3dc+wbQ==",
"dev": true,
"license": "ISC",
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/promise-retry": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/promise-retry/-/promise-retry-2.0.1.tgz",
"integrity": "sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==",
"dev": true,
"license": "MIT",
"dependencies": {
"err-code": "^2.0.2",
"retry": "^0.12.0"
},
"engines": {
"node": ">=10"
}
},
"node_modules/queue-microtask": {
"version": "1.2.3",
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
"integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==",
"dev": true,
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
],
"license": "MIT"
},
"node_modules/readable-stream": {
"version": "3.6.2",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz",
"integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==",
"dev": true,
"license": "MIT",
"dependencies": {
"inherits": "^2.0.3",
"string_decoder": "^1.1.1",
"util-deprecate": "^1.0.1"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/readdirp": {
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz",
"integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 14.18.0"
},
"funding": {
"type": "individual",
"url": "https://paulmillr.com/funding/"
}
},
"node_modules/reflect-metadata": {
"version": "0.2.2",
"resolved": "https://registry.npmjs.org/reflect-metadata/-/reflect-metadata-0.2.2.tgz",
"integrity": "sha512-urBwgfrvVP/eAyXx4hluJivBKzuEbSQs9rKWCrCkbSxNv8mxPcUZKeuoF3Uy4mJl3Lwprp6yy5/39VWigZ4K6Q==",
"dev": true,
"license": "Apache-2.0"
},
"node_modules/require-directory": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
"integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/require-from-string": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz",
"integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/resolve": {
"version": "1.22.10",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz",
"integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==",
"dev": true,
"license": "MIT",
"dependencies": {
"is-core-module": "^2.16.0",
"path-parse": "^1.0.7",
"supports-preserve-symlinks-flag": "^1.0.0"
},
"bin": {
"resolve": "bin/resolve"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/restore-cursor": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-5.1.0.tgz",
"integrity": "sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA==",
"dev": true,
"license": "MIT",
"dependencies": {
"onetime": "^7.0.0",
"signal-exit": "^4.1.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/retry": {
"version": "0.12.0",
"resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz",
"integrity": "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 4"
}
},
"node_modules/reusify": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz",
"integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==",
"dev": true,
"license": "MIT",
"engines": {
"iojs": ">=1.0.0",
"node": ">=0.10.0"
}
},
"node_modules/rfdc": {
"version": "1.4.1",
"resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.4.1.tgz",
"integrity": "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==",
"dev": true,
"license": "MIT"
},
"node_modules/rimraf": {
"version": "5.0.10",
"resolved": "https://registry.npmjs.org/rimraf/-/rimraf-5.0.10.tgz",
"integrity": "sha512-l0OE8wL34P4nJH/H2ffoaniAokM2qSmrtXHmlpvYr5AVVX8msAyW0l8NVJFDxlSK4u3Uh/f41cQheDVdnYijwQ==",
"dev": true,
"license": "ISC",
"dependencies": {
"glob": "^10.3.7"
},
"bin": {
"rimraf": "dist/esm/bin.mjs"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/rollup": {
"version": "4.30.1",
"resolved": "https://registry.npmjs.org/rollup/-/rollup-4.30.1.tgz",
"integrity": "sha512-mlJ4glW020fPuLi7DkM/lN97mYEZGWeqBnrljzN0gs7GLctqX3lNWxKQ7Gl712UAX+6fog/L3jh4gb7R6aVi3w==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/estree": "1.0.6"
},
"bin": {
"rollup": "dist/bin/rollup"
},
"engines": {
"node": ">=18.0.0",
"npm": ">=8.0.0"
},
"optionalDependencies": {
"@rollup/rollup-android-arm-eabi": "4.30.1",
"@rollup/rollup-android-arm64": "4.30.1",
"@rollup/rollup-darwin-arm64": "4.30.1",
"@rollup/rollup-darwin-x64": "4.30.1",
"@rollup/rollup-freebsd-arm64": "4.30.1",
"@rollup/rollup-freebsd-x64": "4.30.1",
"@rollup/rollup-linux-arm-gnueabihf": "4.30.1",
"@rollup/rollup-linux-arm-musleabihf": "4.30.1",
"@rollup/rollup-linux-arm64-gnu": "4.30.1",
"@rollup/rollup-linux-arm64-musl": "4.30.1",
"@rollup/rollup-linux-loongarch64-gnu": "4.30.1",
"@rollup/rollup-linux-powerpc64le-gnu": "4.30.1",
"@rollup/rollup-linux-riscv64-gnu": "4.30.1",
"@rollup/rollup-linux-s390x-gnu": "4.30.1",
"@rollup/rollup-linux-x64-gnu": "4.30.1",
"@rollup/rollup-linux-x64-musl": "4.30.1",
"@rollup/rollup-win32-arm64-msvc": "4.30.1",
"@rollup/rollup-win32-ia32-msvc": "4.30.1",
"@rollup/rollup-win32-x64-msvc": "4.30.1",
"fsevents": "~2.3.2"
}
},
"node_modules/run-parallel": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz",
"integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==",
"dev": true,
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
],
"license": "MIT",
"dependencies": {
"queue-microtask": "^1.2.2"
}
},
"node_modules/rxjs": {
"version": "7.8.1",
"resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.1.tgz",
"integrity": "sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==",
"license": "Apache-2.0",
"dependencies": {
"tslib": "^2.1.0"
}
},
"node_modules/safe-buffer": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
"integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
"dev": true,
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
],
"license": "MIT"
},
"node_modules/safer-buffer": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==",
"dev": true,
"license": "MIT"
},
"node_modules/sass": {
"version": "1.83.1",
"resolved": "https://registry.npmjs.org/sass/-/sass-1.83.1.tgz",
"integrity": "sha512-EVJbDaEs4Rr3F0glJzFSOvtg2/oy2V/YrGFPqPY24UqcLDWcI9ZY5sN+qyO3c/QCZwzgfirvhXvINiJCE/OLcA==",
"dev": true,
"license": "MIT",
"dependencies": {
"chokidar": "^4.0.0",
"immutable": "^5.0.2",
"source-map-js": ">=0.6.2 <2.0.0"
},
"bin": {
"sass": "sass.js"
},
"engines": {
"node": ">=14.0.0"
},
"optionalDependencies": {
"@parcel/watcher": "^2.4.1"
}
},
"node_modules/semver": {
"version": "7.6.3",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz",
"integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==",
"dev": true,
"license": "ISC",
"bin": {
"semver": "bin/semver.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/shebang-command": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
"integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
"dev": true,
"license": "MIT",
"dependencies": {
"shebang-regex": "^3.0.0"
},
"engines": {
"node": ">=8"
}
},
"node_modules/shebang-regex": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz",
"integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
}
},
"node_modules/signal-exit": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz",
"integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==",
"dev": true,
"license": "ISC",
"engines": {
"node": ">=14"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/sigstore": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/sigstore/-/sigstore-3.1.0.tgz",
"integrity": "sha512-ZpzWAFHIFqyFE56dXqgX/DkDRZdz+rRcjoIk/RQU4IX0wiCv1l8S7ZrXDHcCc+uaf+6o7w3h2l3g6GYG5TKN9Q==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
"@sigstore/bundle": "^3.1.0",
"@sigstore/core": "^2.0.0",
"@sigstore/protobuf-specs": "^0.4.0",
"@sigstore/sign": "^3.1.0",
"@sigstore/tuf": "^3.1.0",
"@sigstore/verify": "^2.1.0"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/slice-ansi": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-5.0.0.tgz",
"integrity": "sha512-FC+lgizVPfie0kkhqUScwRu1O/lF6NOgJmlCgK+/LYxDCTk8sGelYaHDhFcDN+Sn3Cv+3VSa4Byeo+IMCzpMgQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-styles": "^6.0.0",
"is-fullwidth-code-point": "^4.0.0"
},
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/slice-ansi?sponsor=1"
}
},
"node_modules/slice-ansi/node_modules/ansi-styles": {
"version": "6.2.1",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz",
"integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
}
},
"node_modules/smart-buffer": {
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz",
"integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 6.0.0",
"npm": ">= 3.0.0"
}
},
"node_modules/socks": {
"version": "2.8.4",
"resolved": "https://registry.npmjs.org/socks/-/socks-2.8.4.tgz",
"integrity": "sha512-D3YaD0aRxR3mEcqnidIs7ReYJFVzWdd6fXJYUM8ixcQcJRGTka/b3saV0KflYhyVJXKhb947GndU35SxYNResQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"ip-address": "^9.0.5",
"smart-buffer": "^4.2.0"
},
"engines": {
"node": ">= 10.0.0",
"npm": ">= 3.0.0"
}
},
"node_modules/socks-proxy-agent": {
"version": "8.0.5",
"resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-8.0.5.tgz",
"integrity": "sha512-HehCEsotFqbPW9sJ8WVYB6UbmIMv7kUUORIF2Nncq4VQvBfNBLibW9YZR5dlYCSUhwcD628pRllm7n+E+YTzJw==",
"dev": true,
"license": "MIT",
"dependencies": {
"agent-base": "^7.1.2",
"debug": "^4.3.4",
"socks": "^2.8.3"
},
"engines": {
"node": ">= 14"
}
},
"node_modules/source-map": {
"version": "0.7.4",
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz",
"integrity": "sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==",
"dev": true,
"license": "BSD-3-Clause",
"engines": {
"node": ">= 8"
}
},
"node_modules/source-map-js": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz",
"integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==",
"dev": true,
"license": "BSD-3-Clause",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/spdx-correct": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.2.0.tgz",
"integrity": "sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
"spdx-expression-parse": "^3.0.0",
"spdx-license-ids": "^3.0.0"
}
},
"node_modules/spdx-exceptions": {
"version": "2.5.0",
"resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz",
"integrity": "sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==",
"dev": true,
"license": "CC-BY-3.0"
},
"node_modules/spdx-expression-parse": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz",
"integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==",
"dev": true,
"license": "MIT",
"dependencies": {
"spdx-exceptions": "^2.1.0",
"spdx-license-ids": "^3.0.0"
}
},
"node_modules/spdx-license-ids": {
"version": "3.0.21",
"resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.21.tgz",
"integrity": "sha512-Bvg/8F5XephndSK3JffaRqdT+gyhfqIPwDHpX80tJrF8QQRYMo8sNMeaZ2Dp5+jhwKnUmIOyFFQfHRkjJm5nXg==",
"dev": true,
"license": "CC0-1.0"
},
"node_modules/sprintf-js": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.3.tgz",
"integrity": "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==",
"dev": true,
"license": "BSD-3-Clause"
},
"node_modules/ssri": {
"version": "12.0.0",
"resolved": "https://registry.npmjs.org/ssri/-/ssri-12.0.0.tgz",
"integrity": "sha512-S7iGNosepx9RadX82oimUkvr0Ct7IjJbEbs4mJcTxst8um95J3sDYU1RBEOvdu6oL1Wek2ODI5i4MAw+dZ6cAQ==",
"dev": true,
"license": "ISC",
"dependencies": {
"minipass": "^7.0.3"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/string_decoder": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
"integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==",
"dev": true,
"license": "MIT",
"dependencies": {
"safe-buffer": "~5.2.0"
}
},
"node_modules/string-width": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz",
"integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"emoji-regex": "^10.3.0",
"get-east-asian-width": "^1.0.0",
"strip-ansi": "^7.1.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/string-width-cjs": {
"name": "string-width",
"version": "4.2.3",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
"dev": true,
"license": "MIT",
"dependencies": {
"emoji-regex": "^8.0.0",
"is-fullwidth-code-point": "^3.0.0",
"strip-ansi": "^6.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/string-width-cjs/node_modules/ansi-regex": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
}
},
"node_modules/string-width-cjs/node_modules/emoji-regex": {
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
"dev": true,
"license": "MIT"
},
"node_modules/string-width-cjs/node_modules/is-fullwidth-code-point": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
"integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
}
},
"node_modules/string-width-cjs/node_modules/strip-ansi": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-regex": "^5.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/strip-ansi": {
"version": "7.1.0",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz",
"integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-regex": "^6.0.1"
},
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/strip-ansi?sponsor=1"
}
},
"node_modules/strip-ansi-cjs": {
"name": "strip-ansi",
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-regex": "^5.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/strip-ansi-cjs/node_modules/ansi-regex": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
}
},
"node_modules/supports-color": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
"integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
"dev": true,
"license": "MIT",
"dependencies": {
"has-flag": "^4.0.0"
},
"engines": {
"node": ">=8"
}
},
"node_modules/supports-preserve-symlinks-flag": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz",
"integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/symbol-observable": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/symbol-observable/-/symbol-observable-4.0.0.tgz",
"integrity": "sha512-b19dMThMV4HVFynSAM1++gBHAbk2Tc/osgLIBZMKsyqh34jb2e8Os7T6ZW/Bt3pJFdBTd2JwAnAAEQV7rSNvcQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.10"
}
},
"node_modules/tar": {
"version": "6.2.1",
"resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz",
"integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==",
"dev": true,
"license": "ISC",
"dependencies": {
"chownr": "^2.0.0",
"fs-minipass": "^2.0.0",
"minipass": "^5.0.0",
"minizlib": "^2.1.1",
"mkdirp": "^1.0.3",
"yallist": "^4.0.0"
},
"engines": {
"node": ">=10"
}
},
"node_modules/tar/node_modules/fs-minipass": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz",
"integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==",
"dev": true,
"license": "ISC",
"dependencies": {
"minipass": "^3.0.0"
},
"engines": {
"node": ">= 8"
}
},
"node_modules/tar/node_modules/fs-minipass/node_modules/minipass": {
"version": "3.3.6",
"resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
"integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
"dev": true,
"license": "ISC",
"dependencies": {
"yallist": "^4.0.0"
},
"engines": {
"node": ">=8"
}
},
"node_modules/tar/node_modules/minipass": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz",
"integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==",
"dev": true,
"license": "ISC",
"engines": {
"node": ">=8"
}
},
"node_modules/tar/node_modules/minizlib": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz",
"integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==",
"dev": true,
"license": "MIT",
"dependencies": {
"minipass": "^3.0.0",
"yallist": "^4.0.0"
},
"engines": {
"node": ">= 8"
}
},
"node_modules/tar/node_modules/minizlib/node_modules/minipass": {
"version": "3.3.6",
"resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
"integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
"dev": true,
"license": "ISC",
"dependencies": {
"yallist": "^4.0.0"
},
"engines": {
"node": ">=8"
}
},
"node_modules/tar/node_modules/yallist": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==",
"dev": true,
"license": "ISC"
},
"node_modules/tmp": {
"version": "0.0.33",
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz",
"integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==",
"dev": true,
"license": "MIT",
"dependencies": {
"os-tmpdir": "~1.0.2"
},
"engines": {
"node": ">=0.6.0"
}
},
"node_modules/to-regex-range": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
"integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"is-number": "^7.0.0"
},
"engines": {
"node": ">=8.0"
}
},
"node_modules/tslib": {
"version": "2.8.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==",
"license": "0BSD"
},
"node_modules/tuf-js": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-3.0.1.tgz",
"integrity": "sha512-+68OP1ZzSF84rTckf3FA95vJ1Zlx/uaXyiiKyPd1pA4rZNkpEvDAKmsu1xUSmbF/chCRYgZ6UZkDwC7PmzmAyA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@tufjs/models": "3.0.1",
"debug": "^4.3.6",
"make-fetch-happen": "^14.0.1"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/type-fest": {
"version": "0.21.3",
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz",
"integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==",
"dev": true,
"license": "(MIT OR CC0-1.0)",
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/typescript": {
"version": "5.7.3",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.7.3.tgz",
"integrity": "sha512-84MVSjMEHP+FQRPy3pX9sTVV/INIex71s9TL2Gm5FG/WG1SqXeKyZ0k7/blY/4FdOzI12CBy1vGc4og/eus0fw==",
"dev": true,
"license": "Apache-2.0",
"bin": {
"tsc": "bin/tsc",
"tsserver": "bin/tsserver"
},
"engines": {
"node": ">=14.17"
}
},
"node_modules/undici-types": {
"version": "6.20.0",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.20.0.tgz",
"integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==",
"dev": true,
"license": "MIT",
"peer": true
},
"node_modules/unique-filename": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-4.0.0.tgz",
"integrity": "sha512-XSnEewXmQ+veP7xX2dS5Q4yZAvO40cBN2MWkJ7D/6sW4Dg6wYBNwM1Vrnz1FhH5AdeLIlUXRI9e28z1YZi71NQ==",
"dev": true,
"license": "ISC",
"dependencies": {
"unique-slug": "^5.0.0"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/unique-slug": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-5.0.0.tgz",
"integrity": "sha512-9OdaqO5kwqR+1kVgHAhsp5vPNU0hnxRa26rBFNfNgM7M6pNtgzeBn3s/xbyCQL3dcjzOatcef6UUHpB/6MaETg==",
"dev": true,
"license": "ISC",
"dependencies": {
"imurmurhash": "^0.1.4"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/update-browserslist-db": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.2.tgz",
"integrity": "sha512-PPypAm5qvlD7XMZC3BujecnaOxwhrtoFR+Dqkk5Aa/6DssiH0ibKoketaj9w8LP7Bont1rYeoV5plxD7RTEPRg==",
"dev": true,
"funding": [
{
"type": "opencollective",
"url": "https://opencollective.com/browserslist"
},
{
"type": "tidelift",
"url": "https://tidelift.com/funding/github/npm/browserslist"
},
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
],
"license": "MIT",
"dependencies": {
"escalade": "^3.2.0",
"picocolors": "^1.1.1"
},
"bin": {
"update-browserslist-db": "cli.js"
},
"peerDependencies": {
"browserslist": ">= 4.21.0"
}
},
"node_modules/util-deprecate": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
"integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==",
"dev": true,
"license": "MIT"
},
"node_modules/validate-npm-package-license": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz",
"integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
"spdx-correct": "^3.0.0",
"spdx-expression-parse": "^3.0.0"
}
},
"node_modules/validate-npm-package-name": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-6.0.0.tgz",
"integrity": "sha512-d7KLgL1LD3U3fgnvWEY1cQXoO/q6EQ1BSz48Sa149V/5zVTAbgmZIpyI8TRi6U9/JNyeYLlTKsEMPtLC27RFUg==",
"dev": true,
"license": "ISC",
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/vite": {
"version": "6.0.11",
"resolved": "https://registry.npmjs.org/vite/-/vite-6.0.11.tgz",
"integrity": "sha512-4VL9mQPKoHy4+FE0NnRE/kbY51TOfaknxAjt3fJbGJxhIpBZiqVzlZDEesWWsuREXHwNdAoOFZ9MkPEVXczHwg==",
"dev": true,
"license": "MIT",
"dependencies": {
"esbuild": "^0.24.2",
"postcss": "^8.4.49",
"rollup": "^4.23.0"
},
"bin": {
"vite": "bin/vite.js"
},
"engines": {
"node": "^18.0.0 || ^20.0.0 || >=22.0.0"
},
"funding": {
"url": "https://github.com/vitejs/vite?sponsor=1"
},
"optionalDependencies": {
"fsevents": "~2.3.3"
},
"peerDependencies": {
"@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0",
"jiti": ">=1.21.0",
"less": "*",
"lightningcss": "^1.21.0",
"sass": "*",
"sass-embedded": "*",
"stylus": "*",
"sugarss": "*",
"terser": "^5.16.0",
"tsx": "^4.8.1",
"yaml": "^2.4.2"
},
"peerDependenciesMeta": {
"@types/node": {
"optional": true
},
"jiti": {
"optional": true
},
"less": {
"optional": true
},
"lightningcss": {
"optional": true
},
"sass": {
"optional": true
},
"sass-embedded": {
"optional": true
},
"stylus": {
"optional": true
},
"sugarss": {
"optional": true
},
"terser": {
"optional": true
},
"tsx": {
"optional": true
},
"yaml": {
"optional": true
}
}
},
"node_modules/watchpack": {
"version": "2.4.2",
"resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.4.2.tgz",
"integrity": "sha512-TnbFSbcOCcDgjZ4piURLCbJ3nJhznVh9kw6F6iokjiFPl8ONxe9A6nMDVXDiNbrSfLILs6vB07F7wLBrwPYzJw==",
"dev": true,
"license": "MIT",
"dependencies": {
"glob-to-regexp": "^0.4.1",
"graceful-fs": "^4.1.2"
},
"engines": {
"node": ">=10.13.0"
}
},
"node_modules/wcwidth": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/wcwidth/-/wcwidth-1.0.1.tgz",
"integrity": "sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg==",
"dev": true,
"license": "MIT",
"dependencies": {
"defaults": "^1.0.3"
}
},
"node_modules/weak-lru-cache": {
"version": "1.2.2",
"resolved": "https://registry.npmjs.org/weak-lru-cache/-/weak-lru-cache-1.2.2.tgz",
"integrity": "sha512-DEAoo25RfSYMuTGc9vPJzZcZullwIqRDSI9LOy+fkCJPi6hykCnfKaXTuPBDuXAUcqHXyOgFtHNp/kB2FjYHbw==",
"dev": true,
"license": "MIT",
"optional": true
},
"node_modules/which": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/which/-/which-5.0.0.tgz",
"integrity": "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ==",
"dev": true,
"license": "ISC",
"dependencies": {
"isexe": "^3.1.1"
},
"bin": {
"node-which": "bin/which.js"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
}
},
"node_modules/wrap-ansi": {
"version": "6.2.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz",
"integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-styles": "^4.0.0",
"string-width": "^4.1.0",
"strip-ansi": "^6.0.0"
},
"engines": {
"node": ">=8"
}
},
"node_modules/wrap-ansi-cjs": {
"name": "wrap-ansi",
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
"integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-styles": "^4.0.0",
"string-width": "^4.1.0",
"strip-ansi": "^6.0.0"
},
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
}
},
"node_modules/wrap-ansi-cjs/node_modules/ansi-regex": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
}
},
"node_modules/wrap-ansi-cjs/node_modules/emoji-regex": {
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
"dev": true,
"license": "MIT"
},
"node_modules/wrap-ansi-cjs/node_modules/is-fullwidth-code-point": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
"integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
}
},
"node_modules/wrap-ansi-cjs/node_modules/string-width": {
"version": "4.2.3",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
"dev": true,
"license": "MIT",
"dependencies": {
"emoji-regex": "^8.0.0",
"is-fullwidth-code-point": "^3.0.0",
"strip-ansi": "^6.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/wrap-ansi-cjs/node_modules/strip-ansi": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-regex": "^5.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/wrap-ansi/node_modules/ansi-regex": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
}
},
"node_modules/wrap-ansi/node_modules/emoji-regex": {
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
"dev": true,
"license": "MIT"
},
"node_modules/wrap-ansi/node_modules/is-fullwidth-code-point": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
"integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
}
},
"node_modules/wrap-ansi/node_modules/string-width": {
"version": "4.2.3",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
"dev": true,
"license": "MIT",
"dependencies": {
"emoji-regex": "^8.0.0",
"is-fullwidth-code-point": "^3.0.0",
"strip-ansi": "^6.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/wrap-ansi/node_modules/strip-ansi": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-regex": "^5.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/y18n": {
"version": "5.0.8",
"resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz",
"integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==",
"dev": true,
"license": "ISC",
"engines": {
"node": ">=10"
}
},
"node_modules/yallist": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz",
"integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==",
"dev": true,
"license": "ISC"
},
"node_modules/yargs": {
"version": "17.7.2",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz",
"integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==",
"dev": true,
"license": "MIT",
"dependencies": {
"cliui": "^8.0.1",
"escalade": "^3.1.1",
"get-caller-file": "^2.0.5",
"require-directory": "^2.1.1",
"string-width": "^4.2.3",
"y18n": "^5.0.5",
"yargs-parser": "^21.1.1"
},
"engines": {
"node": ">=12"
}
},
"node_modules/yargs-parser": {
"version": "21.1.1",
"resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz",
"integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==",
"dev": true,
"license": "ISC",
"engines": {
"node": ">=12"
}
},
"node_modules/yargs/node_modules/ansi-regex": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
}
},
"node_modules/yargs/node_modules/emoji-regex": {
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
"dev": true,
"license": "MIT"
},
"node_modules/yargs/node_modules/is-fullwidth-code-point": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
"integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
}
},
"node_modules/yargs/node_modules/string-width": {
"version": "4.2.3",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
"dev": true,
"license": "MIT",
"dependencies": {
"emoji-regex": "^8.0.0",
"is-fullwidth-code-point": "^3.0.0",
"strip-ansi": "^6.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/yargs/node_modules/strip-ansi": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-regex": "^5.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/yoctocolors-cjs": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/yoctocolors-cjs/-/yoctocolors-cjs-2.1.2.tgz",
"integrity": "sha512-cYVsTjKl8b+FrnidjibDWskAv7UKOfcwaVZdp/it9n1s9fU3IkgDbhdIRKCW4JDsAlECJY0ytoVPT3sK6kideA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/zone.js": {
"version": "0.15.0",
"resolved": "https://registry.npmjs.org/zone.js/-/zone.js-0.15.0.tgz",
"integrity": "sha512-9oxn0IIjbCZkJ67L+LkhYWRyAy7axphb3VgE2MBDlOqnmHMPWGYMxJxBYFueFq/JGY2GMwS0rU+UCLunEmy5UA==",
"license": "MIT"
}
}
} | json | github | https://github.com/angular/angular | adev/shared-docs/pipeline/tutorials/common/package-lock.json |
from django.utils.encoding import smart_str
from hashlib import sha1
from mediagenerator.generators.bundles.base import Filter
from mediagenerator.utils import find_file, read_text_file
from subprocess import Popen, PIPE
import os
import sys
class CoffeeScript(Filter):
takes_input = False
def __init__(self, **kwargs):
self.config(kwargs, module=None)
super(CoffeeScript, self).__init__(**kwargs)
assert self.filetype == 'js', (
'CoffeeScript only supports compilation to js. '
'The parent filter expects "%s".' % self.filetype)
self._compiled = None
self._compiled_hash = None
self._mtime = None
@classmethod
def from_default(cls, name):
return {'module': name}
def get_output(self, variation):
self._regenerate(debug=False)
yield self._compiled
def get_dev_output(self, name, variation):
assert name == self.module
self._regenerate(debug=True)
return self._compiled
def get_dev_output_names(self, variation):
self._regenerate(debug=True)
yield self.module, self._compiled_hash
def _regenerate(self, debug=False):
path = find_file(self.module)
mtime = os.path.getmtime(path)
if mtime == self._mtime:
return
source = read_text_file(path)
self._compiled = self._compile(source, debug=debug)
self._compiled_hash = sha1(smart_str(self._compiled)).hexdigest()
self._mtime = mtime
def _compile(self, input, debug=False):
try:
shell = sys.platform == 'win32'
cmd = Popen(['coffee', '--compile', '--print', '--stdio', '--bare'],
stdin=PIPE, stdout=PIPE, stderr=PIPE,
shell=shell, universal_newlines=True)
output, error = cmd.communicate(smart_str(input))
assert cmd.wait() == 0, ('CoffeeScript command returned bad '
'result:\n%s' % error)
return output.decode('utf-8')
except Exception, e:
raise ValueError("Failed to run CoffeeScript compiler for this "
"file. Please confirm that the \"coffee\" application is "
"on your path and that you can run it from your own command "
"line.\n"
"Error was: %s" % e) | unknown | codeparrot/codeparrot-clean | ||
benchmark:
vm_string_literal: |
x = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
loop_count: 6000000 | unknown | github | https://github.com/ruby/ruby | benchmark/vm_string_literal.yml |
from __future__ import unicode_literals
import boto3
import sure # noqa
from moto import mock_opsworks
from moto import mock_ec2
@mock_opsworks
def test_create_instance():
client = boto3.client('opsworks', region_name='us-east-1')
stack_id = client.create_stack(
Name="test_stack_1",
Region="us-east-1",
ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn"
)['StackId']
layer_id = client.create_layer(
StackId=stack_id,
Type="custom",
Name="TestLayer",
Shortname="TestLayerShortName"
)['LayerId']
response = client.create_instance(
StackId=stack_id, LayerIds=[layer_id], InstanceType="t2.micro"
)
response.should.contain("InstanceId")
client.create_instance.when.called_with(
StackId="nothere", LayerIds=[layer_id], InstanceType="t2.micro"
).should.throw(Exception, "Unable to find stack with ID nothere")
client.create_instance.when.called_with(
StackId=stack_id, LayerIds=["nothere"], InstanceType="t2.micro"
).should.throw(Exception, "nothere")
@mock_opsworks
def test_describe_instances():
"""
create two stacks, with 1 layer and 2 layers (S1L1, S2L1, S2L2)
populate S1L1 with 2 instances (S1L1_i1, S1L1_i2)
populate S2L1 with 1 instance (S2L1_i1)
populate S2L2 with 3 instances (S2L2_i1..2)
"""
client = boto3.client('opsworks', region_name='us-east-1')
S1 = client.create_stack(
Name="S1",
Region="us-east-1",
ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn"
)['StackId']
S1L1 = client.create_layer(
StackId=S1,
Type="custom",
Name="S1L1",
Shortname="S1L1"
)['LayerId']
S2 = client.create_stack(
Name="S2",
Region="us-east-1",
ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn"
)['StackId']
S2L1 = client.create_layer(
StackId=S2,
Type="custom",
Name="S2L1",
Shortname="S2L1"
)['LayerId']
S2L2 = client.create_layer(
StackId=S2,
Type="custom",
Name="S2L2",
Shortname="S2L2"
)['LayerId']
S1L1_i1 = client.create_instance(
StackId=S1, LayerIds=[S1L1], InstanceType="t2.micro"
)['InstanceId']
S1L1_i2 = client.create_instance(
StackId=S1, LayerIds=[S1L1], InstanceType="t2.micro"
)['InstanceId']
S2L1_i1 = client.create_instance(
StackId=S2, LayerIds=[S2L1], InstanceType="t2.micro"
)['InstanceId']
S2L2_i1 = client.create_instance(
StackId=S2, LayerIds=[S2L2], InstanceType="t2.micro"
)['InstanceId']
S2L2_i2 = client.create_instance(
StackId=S2, LayerIds=[S2L2], InstanceType="t2.micro"
)['InstanceId']
# instances in Stack 1
response = client.describe_instances(StackId=S1)['Instances']
response.should.have.length_of(2)
S1L1_i1.should.be.within([i["InstanceId"] for i in response])
S1L1_i2.should.be.within([i["InstanceId"] for i in response])
response2 = client.describe_instances(InstanceIds=[S1L1_i1, S1L1_i2])['Instances']
sorted(response2, key=lambda d: d['InstanceId']).should.equal(
sorted(response, key=lambda d: d['InstanceId']))
response3 = client.describe_instances(LayerId=S1L1)['Instances']
sorted(response3, key=lambda d: d['InstanceId']).should.equal(
sorted(response, key=lambda d: d['InstanceId']))
response = client.describe_instances(StackId=S1)['Instances']
response.should.have.length_of(2)
S1L1_i1.should.be.within([i["InstanceId"] for i in response])
S1L1_i2.should.be.within([i["InstanceId"] for i in response])
# instances in Stack 2
response = client.describe_instances(StackId=S2)['Instances']
response.should.have.length_of(3)
S2L1_i1.should.be.within([i["InstanceId"] for i in response])
S2L2_i1.should.be.within([i["InstanceId"] for i in response])
S2L2_i2.should.be.within([i["InstanceId"] for i in response])
response = client.describe_instances(LayerId=S2L1)['Instances']
response.should.have.length_of(1)
S2L1_i1.should.be.within([i["InstanceId"] for i in response])
response = client.describe_instances(LayerId=S2L2)['Instances']
response.should.have.length_of(2)
S2L1_i1.should_not.be.within([i["InstanceId"] for i in response])
@mock_opsworks
@mock_ec2
def test_ec2_integration():
"""
instances created via OpsWorks should be discoverable via ec2
"""
opsworks = boto3.client('opsworks', region_name='us-east-1')
stack_id = opsworks.create_stack(
Name="S1",
Region="us-east-1",
ServiceRoleArn="service_arn",
DefaultInstanceProfileArn="profile_arn"
)['StackId']
layer_id = opsworks.create_layer(
StackId=stack_id,
Type="custom",
Name="S1L1",
Shortname="S1L1"
)['LayerId']
instance_id = opsworks.create_instance(
StackId=stack_id, LayerIds=[layer_id], InstanceType="t2.micro"
)['InstanceId']
ec2 = boto3.client('ec2', region_name='us-east-1')
# Before starting the instance, it shouldn't be discoverable via ec2
reservations = ec2.describe_instances()['Reservations']
assert reservations.should.be.empty
# After starting the instance, it should be discoverable via ec2
opsworks.start_instance(InstanceId=instance_id)
reservations = ec2.describe_instances()['Reservations']
reservations[0]['Instances'].should.have.length_of(1)
instance = reservations[0]['Instances'][0]
opsworks_instance = opsworks.describe_instances(StackId=stack_id)['Instances'][0]
instance['InstanceId'].should.equal(opsworks_instance['Ec2InstanceId'])
instance['PrivateIpAddress'].should.equal(opsworks_instance['PrivateIp']) | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env bash
# Copyright 2023 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script is convenience to download and install protoc in third_party.
# Usage: `hack/install-protoc.sh`.
set -o errexit
set -o nounset
set -o pipefail
KUBE_ROOT=$(dirname "${BASH_SOURCE[0]}")/..
source "${KUBE_ROOT}/hack/lib/protoc.sh"
kube::protoc::install | unknown | github | https://github.com/kubernetes/kubernetes | hack/install-protoc.sh |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.