diff --git a/.gitattributes b/.gitattributes
index df75ca26a0b787df37b6c87251adef008a2f29e1..f0c613eb78839418ba0bdc9ba5eaaaeb4d9abeda 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -1140,3 +1140,6 @@ mgm/lib/python3.10/__pycache__/_pydecimal.cpython-310.pyc filter=lfs diff=lfs me
mgm/lib/python3.10/lib-dynload/readline.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
wemm/lib/python3.10/site-packages/torch/lib/libcudnn_adv_infer.so.8 filter=lfs diff=lfs merge=lfs -text
vlmpy310/lib/python3.10/site-packages/decord/libdecord.so filter=lfs diff=lfs merge=lfs -text
+llava_next/lib/python3.10/site-packages/pillow.libs/libxcb-b8a56d01.so.1.1.0 filter=lfs diff=lfs merge=lfs -text
+llava_next/lib/python3.10/site-packages/pillow.libs/libfreetype-be14bf51.so.6.20.1 filter=lfs diff=lfs merge=lfs -text
+llava_next/lib/python3.10/site-packages/pillow.libs/libbrotlicommon-3ecfe81c.so.1 filter=lfs diff=lfs merge=lfs -text
diff --git a/llava_next/lib/python3.10/site-packages/accelerate-0.21.0.dist-info/INSTALLER b/llava_next/lib/python3.10/site-packages/accelerate-0.21.0.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/accelerate-0.21.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/llava_next/lib/python3.10/site-packages/accelerate-0.21.0.dist-info/LICENSE b/llava_next/lib/python3.10/site-packages/accelerate-0.21.0.dist-info/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/accelerate-0.21.0.dist-info/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/llava_next/lib/python3.10/site-packages/accelerate-0.21.0.dist-info/METADATA b/llava_next/lib/python3.10/site-packages/accelerate-0.21.0.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..98cb32fe27e6f88e0bbae29f75cae71c50b76439
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/accelerate-0.21.0.dist-info/METADATA
@@ -0,0 +1,359 @@
+Metadata-Version: 2.1
+Name: accelerate
+Version: 0.21.0
+Summary: Accelerate
+Home-page: https://github.com/huggingface/accelerate
+Author: The HuggingFace team
+Author-email: sylvain@huggingface.co
+License: Apache
+Keywords: deep learning
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: Education
+Classifier: Intended Audience :: Science/Research
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
+Requires-Python: >=3.8.0
+Description-Content-Type: text/markdown
+License-File: LICENSE
+Requires-Dist: numpy (>=1.17)
+Requires-Dist: packaging (>=20.0)
+Requires-Dist: psutil
+Requires-Dist: pyyaml
+Requires-Dist: torch (>=1.10.0)
+Provides-Extra: dev
+Requires-Dist: black (~=23.1) ; extra == 'dev'
+Requires-Dist: ruff (>=0.0.241) ; extra == 'dev'
+Requires-Dist: hf-doc-builder (>=0.3.0) ; extra == 'dev'
+Requires-Dist: urllib3 (<2.0.0) ; extra == 'dev'
+Requires-Dist: pytest ; extra == 'dev'
+Requires-Dist: pytest-xdist ; extra == 'dev'
+Requires-Dist: pytest-subtests ; extra == 'dev'
+Requires-Dist: parameterized ; extra == 'dev'
+Requires-Dist: datasets ; extra == 'dev'
+Requires-Dist: evaluate ; extra == 'dev'
+Requires-Dist: transformers ; extra == 'dev'
+Requires-Dist: scipy ; extra == 'dev'
+Requires-Dist: scikit-learn ; extra == 'dev'
+Requires-Dist: deepspeed ; extra == 'dev'
+Requires-Dist: tqdm ; extra == 'dev'
+Requires-Dist: rich ; extra == 'dev'
+Provides-Extra: docs
+Provides-Extra: quality
+Requires-Dist: black (~=23.1) ; extra == 'quality'
+Requires-Dist: ruff (>=0.0.241) ; extra == 'quality'
+Requires-Dist: hf-doc-builder (>=0.3.0) ; extra == 'quality'
+Requires-Dist: urllib3 (<2.0.0) ; extra == 'quality'
+Provides-Extra: rich
+Requires-Dist: rich ; extra == 'rich'
+Provides-Extra: sagemaker
+Requires-Dist: sagemaker ; extra == 'sagemaker'
+Provides-Extra: test_dev
+Requires-Dist: datasets ; extra == 'test_dev'
+Requires-Dist: evaluate ; extra == 'test_dev'
+Requires-Dist: transformers ; extra == 'test_dev'
+Requires-Dist: scipy ; extra == 'test_dev'
+Requires-Dist: scikit-learn ; extra == 'test_dev'
+Requires-Dist: deepspeed ; extra == 'test_dev'
+Requires-Dist: tqdm ; extra == 'test_dev'
+Provides-Extra: test_prod
+Requires-Dist: pytest ; extra == 'test_prod'
+Requires-Dist: pytest-xdist ; extra == 'test_prod'
+Requires-Dist: pytest-subtests ; extra == 'test_prod'
+Requires-Dist: parameterized ; extra == 'test_prod'
+Provides-Extra: test_trackers
+Requires-Dist: wandb ; extra == 'test_trackers'
+Requires-Dist: comet-ml ; extra == 'test_trackers'
+Requires-Dist: tensorboard ; extra == 'test_trackers'
+Provides-Extra: testing
+Requires-Dist: pytest ; extra == 'testing'
+Requires-Dist: pytest-xdist ; extra == 'testing'
+Requires-Dist: pytest-subtests ; extra == 'testing'
+Requires-Dist: parameterized ; extra == 'testing'
+Requires-Dist: datasets ; extra == 'testing'
+Requires-Dist: evaluate ; extra == 'testing'
+Requires-Dist: transformers ; extra == 'testing'
+Requires-Dist: scipy ; extra == 'testing'
+Requires-Dist: scikit-learn ; extra == 'testing'
+Requires-Dist: deepspeed ; extra == 'testing'
+Requires-Dist: tqdm ; extra == 'testing'
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Run your *raw* PyTorch training script on any kind of device
+
+
+
+
+
+
+## Easy to integrate
+
+🤗 Accelerate was created for PyTorch users who like to write the training loop of PyTorch models but are reluctant to write and maintain the boilerplate code needed to use multi-GPUs/TPU/fp16.
+
+🤗 Accelerate abstracts exactly and only the boilerplate code related to multi-GPUs/TPU/fp16 and leaves the rest of your code unchanged.
+
+Here is an example:
+
+```diff
+ import torch
+ import torch.nn.functional as F
+ from datasets import load_dataset
++ from accelerate import Accelerator
+
++ accelerator = Accelerator()
+- device = 'cpu'
++ device = accelerator.device
+
+ model = torch.nn.Transformer().to(device)
+ optimizer = torch.optim.Adam(model.parameters())
+
+ dataset = load_dataset('my_dataset')
+ data = torch.utils.data.DataLoader(dataset, shuffle=True)
+
++ model, optimizer, data = accelerator.prepare(model, optimizer, data)
+
+ model.train()
+ for epoch in range(10):
+ for source, targets in data:
+ source = source.to(device)
+ targets = targets.to(device)
+
+ optimizer.zero_grad()
+
+ output = model(source)
+ loss = F.cross_entropy(output, targets)
+
+- loss.backward()
++ accelerator.backward(loss)
+
+ optimizer.step()
+```
+
+As you can see in this example, by adding 5-lines to any standard PyTorch training script you can now run on any kind of single or distributed node setting (single CPU, single GPU, multi-GPUs and TPUs) as well as with or without mixed precision (fp8, fp16, bf16).
+
+In particular, the same code can then be run without modification on your local machine for debugging or your training environment.
+
+🤗 Accelerate even handles the device placement for you (which requires a few more changes to your code, but is safer in general), so you can even simplify your training loop further:
+
+```diff
+ import torch
+ import torch.nn.functional as F
+ from datasets import load_dataset
++ from accelerate import Accelerator
+
+- device = 'cpu'
++ accelerator = Accelerator()
+
+- model = torch.nn.Transformer().to(device)
++ model = torch.nn.Transformer()
+ optimizer = torch.optim.Adam(model.parameters())
+
+ dataset = load_dataset('my_dataset')
+ data = torch.utils.data.DataLoader(dataset, shuffle=True)
+
++ model, optimizer, data = accelerator.prepare(model, optimizer, data)
+
+ model.train()
+ for epoch in range(10):
+ for source, targets in data:
+- source = source.to(device)
+- targets = targets.to(device)
+
+ optimizer.zero_grad()
+
+ output = model(source)
+ loss = F.cross_entropy(output, targets)
+
+- loss.backward()
++ accelerator.backward(loss)
+
+ optimizer.step()
+```
+
+Want to learn more? Check out the [documentation](https://huggingface.co/docs/accelerate) or have a look at our [examples](https://github.com/huggingface/accelerate/tree/main/examples).
+
+## Launching script
+
+🤗 Accelerate also provides an optional CLI tool that allows you to quickly configure and test your training environment before launching the scripts. No need to remember how to use `torch.distributed.run` or to write a specific launcher for TPU training!
+On your machine(s) just run:
+
+```bash
+accelerate config
+```
+
+and answer the questions asked. This will generate a config file that will be used automatically to properly set the default options when doing
+
+```bash
+accelerate launch my_script.py --args_to_my_script
+```
+
+For instance, here is how you would run the GLUE example on the MRPC task (from the root of the repo):
+
+```bash
+accelerate launch examples/nlp_example.py
+```
+
+This CLI tool is **optional**, and you can still use `python my_script.py` or `python -m torchrun my_script.py` at your convenience.
+
+You can also directly pass in the arguments you would to `torchrun` as arguments to `accelerate launch` if you wish to not run` accelerate config`.
+
+For example, here is how to launch on two GPUs:
+
+```bash
+accelerate launch --multi_gpu --num_processes 2 examples/nlp_example.py
+```
+
+To learn more, check the CLI documentation available [here](https://huggingface.co/docs/accelerate/package_reference/cli).
+
+## Launching multi-CPU run using MPI
+
+🤗 Here is another way to launch multi-CPU run using MPI. You can learn how to install Open MPI on [this page](https://www.open-mpi.org/faq/?category=building#easy-build). You can use Intel MPI or MVAPICH as well.
+Once you have MPI setup on your cluster, just run:
+
+```bash
+mpirun -np 2 python examples/nlp_example.py
+```
+
+## Launching training using DeepSpeed
+
+🤗 Accelerate supports training on single/multiple GPUs using DeepSpeed. To use it, you don't need to change anything in your training code; you can set everything using just `accelerate config`. However, if you desire to tweak your DeepSpeed related args from your Python script, we provide you the `DeepSpeedPlugin`.
+
+```python
+from accelerate import Accelerator, DeepSpeedPlugin
+
+# deepspeed needs to know your gradient accumulation steps beforehand, so don't forget to pass it
+# Remember you still need to do gradient accumulation by yourself, just like you would have done without deepspeed
+deepspeed_plugin = DeepSpeedPlugin(zero_stage=2, gradient_accumulation_steps=2)
+accelerator = Accelerator(mixed_precision='fp16', deepspeed_plugin=deepspeed_plugin)
+
+# How to save your 🤗 Transformer?
+accelerator.wait_for_everyone()
+unwrapped_model = accelerator.unwrap_model(model)
+unwrapped_model.save_pretrained(save_dir, save_function=accelerator.save, state_dict=accelerator.get_state_dict(model))
+```
+
+Note: DeepSpeed support is experimental for now. In case you get into some problem, please open an issue.
+
+## Launching your training from a notebook
+
+🤗 Accelerate also provides a `notebook_launcher` function you can use in a notebook to launch a distributed training. This is especially useful for Colab or Kaggle notebooks with a TPU backend. Just define your training loop in a `training_function` then in your last cell, add:
+
+```python
+from accelerate import notebook_launcher
+
+notebook_launcher(training_function)
+```
+
+An example can be found in [this notebook](https://github.com/huggingface/notebooks/blob/main/examples/accelerate_examples/simple_nlp_example.ipynb). [](https://colab.research.google.com/github/huggingface/notebooks/blob/main/examples/accelerate_examples/simple_nlp_example.ipynb)
+
+## Why should I use 🤗 Accelerate?
+
+You should use 🤗 Accelerate when you want to easily run your training scripts in a distributed environment without having to renounce full control over your training loop. This is not a high-level framework above PyTorch, just a thin wrapper so you don't have to learn a new library. In fact, the whole API of 🤗 Accelerate is in one class, the `Accelerator` object.
+
+## Why shouldn't I use 🤗 Accelerate?
+
+You shouldn't use 🤗 Accelerate if you don't want to write a training loop yourself. There are plenty of high-level libraries above PyTorch that will offer you that, 🤗 Accelerate is not one of them.
+
+## Frameworks using 🤗 Accelerate
+
+If you like the simplicity of 🤗 Accelerate but would prefer a higher-level abstraction around its capabilities, some frameworks and libraries that are built on top of 🤗 Accelerate are listed below:
+
+* [Animus](https://github.com/Scitator/animus) is a minimalistic framework to run machine learning experiments. Animus highlights common "breakpoints" in ML experiments and provides a unified interface for them within [IExperiment](https://github.com/Scitator/animus/blob/main/animus/core.py#L76).
+* [Catalyst](https://github.com/catalyst-team/catalyst#getting-started) is a PyTorch framework for Deep Learning Research and Development. It focuses on reproducibility, rapid experimentation, and codebase reuse so you can create something new rather than write yet another train loop. Catalyst provides a [Runner](https://catalyst-team.github.io/catalyst/api/core.html#runner) to connect all parts of the experiment: hardware backend, data transformations, model training, and inference logic.
+* [fastai](https://github.com/fastai/fastai#installing) is a PyTorch framework for Deep Learning that simplifies training fast and accurate neural nets using modern best practices. fastai provides a [Learner](https://docs.fast.ai/learner.html#Learner) to handle the training, fine-tuning, and inference of deep learning algorithms.
+* [Finetuner](https://github.com/jina-ai/finetuner) is a service that enables models to create higher-quality embeddings for semantic search, visual similarity search, cross-modal text<->image search, recommendation systems, clustering, duplication detection, anomaly detection, or other uses.
+* [InvokeAI](https://github.com/invoke-ai/InvokeAI) is a creative engine for Stable Diffusion models, offering industry-leading WebUI, terminal usage support, and serves as the foundation for many commercial products.
+* [Kornia](https://kornia.readthedocs.io/en/latest/get-started/introduction.html) is a differentiable library that allows classical computer vision to be integrated into deep learning models. Kornia provides a [Trainer](https://kornia.readthedocs.io/en/latest/x.html#kornia.x.Trainer) with the specific purpose to train and fine-tune the supported deep learning algorithms within the library.
+* [Open Assistant](https://projects.laion.ai/Open-Assistant/) is a chat-based assistant that understands tasks, can interact with their party systems, and retrieve information dynamically to do so.
+* [pytorch-accelerated](https://github.com/Chris-hughes10/pytorch-accelerated) is a lightweight training library, with a streamlined feature set centered around a general-purpose [Trainer](https://pytorch-accelerated.readthedocs.io/en/latest/trainer.html), that places a huge emphasis on simplicity and transparency; enabling users to understand exactly what is going on under the hood, but without having to write and maintain the boilerplate themselves!
+* [Stable Diffusion web UI](https://github.com/AUTOMATIC1111/stable-diffusion-webui) is an open-source browser-based easy-to-use interface based on the Gradio library for Stable Diffusion.
+* [torchkeras](https://github.com/lyhue1991/torchkeras) is a simple tool for training pytorch model just in a keras style, a dynamic and beautiful plot is provided in notebook to monitor your loss or metric.
+* [transformers](https://github.com/huggingface/transformers) as a tool for helping train state-of-the-art machine learning models in PyTorch, Tensorflow, and JAX. (Accelerate is the backend for the PyTorch side).
+
+
+## Installation
+
+This repository is tested on Python 3.8+ and PyTorch 1.10.0+
+
+You should install 🤗 Accelerate in a [virtual environment](https://docs.python.org/3/library/venv.html). If you're unfamiliar with Python virtual environments, check out the [user guide](https://packaging.python.org/guides/installing-using-pip-and-virtual-environments/).
+
+First, create a virtual environment with the version of Python you're going to use and activate it.
+
+Then, you will need to install PyTorch: refer to the [official installation page](https://pytorch.org/get-started/locally/#start-locally) regarding the specific install command for your platform. Then 🤗 Accelerate can be installed using pip as follows:
+
+```bash
+pip install accelerate
+```
+
+## Supported integrations
+
+- CPU only
+- multi-CPU on one node (machine)
+- multi-CPU on several nodes (machines)
+- single GPU
+- multi-GPU on one node (machine)
+- multi-GPU on several nodes (machines)
+- TPU
+- FP16/BFloat16 mixed precision
+- FP8 mixed precision with [Transformer Engine](https://github.com/NVIDIA/TransformerEngine)
+- DeepSpeed support (Experimental)
+- PyTorch Fully Sharded Data Parallel (FSDP) support (Experimental)
+- Megatron-LM support (Experimental)
+
+## Citing 🤗 Accelerate
+
+If you use 🤗 Accelerate in your publication, please cite it by using the following BibTeX entry.
+
+```bibtex
+@Misc{accelerate,
+ title = {Accelerate: Training and inference at scale made simple, efficient and adaptable.},
+ author = {Sylvain Gugger, Lysandre Debut, Thomas Wolf, Philipp Schmid, Zachary Mueller, Sourab Mangrulkar},
+ howpublished = {\url{https://github.com/huggingface/accelerate}},
+ year = {2022}
+}
+```
diff --git a/llava_next/lib/python3.10/site-packages/accelerate-0.21.0.dist-info/RECORD b/llava_next/lib/python3.10/site-packages/accelerate-0.21.0.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..870f7aee651d18d8ac60ae9b0ae7632f21169a59
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/accelerate-0.21.0.dist-info/RECORD
@@ -0,0 +1,151 @@
+../../../bin/accelerate,sha256=ykjGr7JOpA2q7Js29fX0TeeqlhovksFpWm1rYy0cS6I,249
+../../../bin/accelerate-config,sha256=ILoBYa125VMtXzN3GXc1Ge-8W5TBU-oxTW62-HNyoBU,241
+../../../bin/accelerate-launch,sha256=UGDafjw0crDny1FqhDGqkxPfR-fLzR_CpHwSaN6t4OM,241
+accelerate-0.21.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+accelerate-0.21.0.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
+accelerate-0.21.0.dist-info/METADATA,sha256=DmKcH62uEAfKORx0NLLvMCTfs8DWyKWaU5Tw81MC2hk,17758
+accelerate-0.21.0.dist-info/RECORD,,
+accelerate-0.21.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+accelerate-0.21.0.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92
+accelerate-0.21.0.dist-info/entry_points.txt,sha256=rG3RclT0BGKdw2nRZCtUGAsb1jzVWEG_ZX9WQBEeSAA,175
+accelerate-0.21.0.dist-info/top_level.txt,sha256=esVfdxTidsjQ90zsN_rPpjLFJ4ijRlx4mnLrG09hlt4,11
+accelerate/__init__.py,sha256=n484ccMKp-qETKV0RH-qSXnIi-ihHg8a-_8rJrzcDZk,764
+accelerate/__pycache__/__init__.cpython-310.pyc,,
+accelerate/__pycache__/accelerator.cpython-310.pyc,,
+accelerate/__pycache__/big_modeling.cpython-310.pyc,,
+accelerate/__pycache__/checkpointing.cpython-310.pyc,,
+accelerate/__pycache__/data_loader.cpython-310.pyc,,
+accelerate/__pycache__/hooks.cpython-310.pyc,,
+accelerate/__pycache__/launchers.cpython-310.pyc,,
+accelerate/__pycache__/local_sgd.cpython-310.pyc,,
+accelerate/__pycache__/logging.cpython-310.pyc,,
+accelerate/__pycache__/memory_utils.cpython-310.pyc,,
+accelerate/__pycache__/optimizer.cpython-310.pyc,,
+accelerate/__pycache__/scheduler.cpython-310.pyc,,
+accelerate/__pycache__/state.cpython-310.pyc,,
+accelerate/__pycache__/tracking.cpython-310.pyc,,
+accelerate/accelerator.py,sha256=kzoZzXQRpE5G0jxU2_yMKrlLPykiXmas7tQp5R3UP4o,129438
+accelerate/big_modeling.py,sha256=_KpgBUeoPfr7rqlUbr-IOWEbK28ETPU8LWcysRI8I64,23534
+accelerate/checkpointing.py,sha256=zxBD37SrV0xJP79IyfaszvoAYLArECbLgHxlqV57KBE,8564
+accelerate/commands/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+accelerate/commands/__pycache__/__init__.cpython-310.pyc,,
+accelerate/commands/__pycache__/accelerate_cli.cpython-310.pyc,,
+accelerate/commands/__pycache__/env.cpython-310.pyc,,
+accelerate/commands/__pycache__/launch.cpython-310.pyc,,
+accelerate/commands/__pycache__/test.cpython-310.pyc,,
+accelerate/commands/__pycache__/tpu.cpython-310.pyc,,
+accelerate/commands/accelerate_cli.py,sha256=F6_h_3vCeAeJdY_i0xk8Wpy_dQtBdTjVzT2wHlwSzi0,1605
+accelerate/commands/config/__init__.py,sha256=iJK8dgj3pc5Vdr1E7UuGoFu-BlybyXLxYDoTg9gXngE,1645
+accelerate/commands/config/__pycache__/__init__.cpython-310.pyc,,
+accelerate/commands/config/__pycache__/cluster.cpython-310.pyc,,
+accelerate/commands/config/__pycache__/config.cpython-310.pyc,,
+accelerate/commands/config/__pycache__/config_args.cpython-310.pyc,,
+accelerate/commands/config/__pycache__/config_utils.cpython-310.pyc,,
+accelerate/commands/config/__pycache__/default.cpython-310.pyc,,
+accelerate/commands/config/__pycache__/sagemaker.cpython-310.pyc,,
+accelerate/commands/config/__pycache__/update.cpython-310.pyc,,
+accelerate/commands/config/cluster.py,sha256=QQeI2T7qB1Je-uM-Ip54GoKaJIJNtazM3ud8B4irFJU,26202
+accelerate/commands/config/config.py,sha256=FuRlQvOjgATEtyqOSsGD-KEtOCvACOHjs2C-krrtldk,3035
+accelerate/commands/config/config_args.py,sha256=UDzUi7on2MKOmFTGhuD2TSQvUfYebD3WjEQPKdoiNlw,8691
+accelerate/commands/config/config_utils.py,sha256=-xBy8rdXGRNq4-AVDQKJYVUVXRy3WMRdp3skohBGRQg,2879
+accelerate/commands/config/default.py,sha256=clFfkx8JuY39TfwtBnFrg027xvvsc4G4p3OHoWptqfI,5016
+accelerate/commands/config/sagemaker.py,sha256=af3ZXG2ybfhFOGTp97BorbGZYd20kY_wKJZQykFv074,9947
+accelerate/commands/config/update.py,sha256=NXW1J7GkUHpg71QlIXsmMB_0z8S8IZo2FWax5POwrhc,2395
+accelerate/commands/env.py,sha256=7guiNUOE0SFe2CRexn2FNmI-4yuwGOXSJLd3QG8tVpA,3056
+accelerate/commands/launch.py,sha256=xXgCcRfa73SkLoaqkRayGZmFDimmqVgLUOJbD3jH1Sw,38005
+accelerate/commands/menu/__init__.py,sha256=5EhDZN5_e1TAuh9_KqJ4Ghs61offoeGZy1pktSBDpa0,39
+accelerate/commands/menu/__pycache__/__init__.cpython-310.pyc,,
+accelerate/commands/menu/__pycache__/cursor.cpython-310.pyc,,
+accelerate/commands/menu/__pycache__/helpers.cpython-310.pyc,,
+accelerate/commands/menu/__pycache__/input.cpython-310.pyc,,
+accelerate/commands/menu/__pycache__/keymap.cpython-310.pyc,,
+accelerate/commands/menu/__pycache__/selection_menu.cpython-310.pyc,,
+accelerate/commands/menu/cursor.py,sha256=-lmpJVAzvNc0c3EOtSuLoKB59zqylVCbYyWLPnrOmvQ,2028
+accelerate/commands/menu/helpers.py,sha256=KrSB5fJjH4MUEUAQJ6bYaN16AYcnl9UalDrPD3DYeeg,1483
+accelerate/commands/menu/input.py,sha256=uW2ywuqWPOKjkS7XBjqNpuVWLTgVKici2_xLyltEbMs,2581
+accelerate/commands/menu/keymap.py,sha256=c9YEMMmNlBGtMiWFk2rdhtTSCZ9w_uJ77cNCwAKguHk,4087
+accelerate/commands/menu/selection_menu.py,sha256=UZKwSIZKKG60y2fuWbSoCx0RbrPS4MbY2DwvxWRBIBQ,4920
+accelerate/commands/test.py,sha256=whf_g7X263A5OErEHRzKu_L5x6HWbIIVNS8N5ERtGao,2179
+accelerate/commands/tpu.py,sha256=OnFQNu9zhlK5D7xXouZZXJevN5623Jgy_HsHTuy4HAE,5553
+accelerate/data_loader.py,sha256=d0HUd6b7AvGPjlhx00i5Y_xmmlGCH0MQIhUlXwLEjLg,40793
+accelerate/hooks.py,sha256=XfcTHugSE7rBm8Gpa8YQ5_4l0z9gHo5X908bK5eq6xo,24746
+accelerate/launchers.py,sha256=jr0jXsWRbEYhSqwD8C3D0-_VUqGHkWy3ikRQ9thbxDw,8335
+accelerate/local_sgd.py,sha256=znJcwwpRb0imRslW5_uQ4OYJmM8zxekMv4XTnbzXlZk,3924
+accelerate/logging.py,sha256=NgFWX5C7yERtFQYkbgD9TgTST6JKtlJysruz868L-jI,4283
+accelerate/memory_utils.py,sha256=3R5LoeHl6GgTZ-IMPrDZMdaEehWarGdPqODushb-6pg,862
+accelerate/optimizer.py,sha256=OnLV0iyOsBcys9L-6fAgCJvdRwJk-N-DfSK34K-jkx0,6454
+accelerate/scheduler.py,sha256=des_4M_Tt1W8gCYZZbLla0GHBEgJY3Wx2EGBQPTzeiY,4238
+accelerate/state.py,sha256=aFx7O6kSzXOT3zNX8HrYNO68GzqbgVQgDriR0a2t2Hk,42891
+accelerate/test_utils/__init__.py,sha256=XwDtBwCjU1wTbjyLng3knf3v0rPZkZX9yu8KmuWidEk,534
+accelerate/test_utils/__pycache__/__init__.cpython-310.pyc,,
+accelerate/test_utils/__pycache__/examples.cpython-310.pyc,,
+accelerate/test_utils/__pycache__/testing.cpython-310.pyc,,
+accelerate/test_utils/__pycache__/training.cpython-310.pyc,,
+accelerate/test_utils/examples.py,sha256=PJAAy5MjIeyH5Sgj9sFqh0VGebfI7Tg4i_3OBABVVYg,7301
+accelerate/test_utils/scripts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+accelerate/test_utils/scripts/__pycache__/__init__.cpython-310.pyc,,
+accelerate/test_utils/scripts/__pycache__/test_cli.cpython-310.pyc,,
+accelerate/test_utils/scripts/__pycache__/test_distributed_data_loop.cpython-310.pyc,,
+accelerate/test_utils/scripts/__pycache__/test_ops.cpython-310.pyc,,
+accelerate/test_utils/scripts/__pycache__/test_script.cpython-310.pyc,,
+accelerate/test_utils/scripts/__pycache__/test_sync.cpython-310.pyc,,
+accelerate/test_utils/scripts/external_deps/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+accelerate/test_utils/scripts/external_deps/__pycache__/__init__.cpython-310.pyc,,
+accelerate/test_utils/scripts/external_deps/__pycache__/test_checkpointing.cpython-310.pyc,,
+accelerate/test_utils/scripts/external_deps/__pycache__/test_metrics.cpython-310.pyc,,
+accelerate/test_utils/scripts/external_deps/__pycache__/test_peak_memory_usage.cpython-310.pyc,,
+accelerate/test_utils/scripts/external_deps/__pycache__/test_performance.cpython-310.pyc,,
+accelerate/test_utils/scripts/external_deps/test_checkpointing.py,sha256=eJ8dpY6Bi9De7Vb9oDw435NELTjWegjWD7wuckvkaoQ,10686
+accelerate/test_utils/scripts/external_deps/test_metrics.py,sha256=LXBzKN-TIXET9zzuQ856UTerLSSGcCqwytIcw0TIA4c,7199
+accelerate/test_utils/scripts/external_deps/test_peak_memory_usage.py,sha256=lqOBqkU5g772-e30bl3G3lVu8YtG1pLsWerWKS3XYwY,9793
+accelerate/test_utils/scripts/external_deps/test_performance.py,sha256=VqYjGaIK509389-iukIBb1397dFLbrWyHwjAKF3Fcvw,9093
+accelerate/test_utils/scripts/test_cli.py,sha256=EJClouXlerf7cpgqY1P1VY2ohUcRXk56GoVkM6-jmrU,227
+accelerate/test_utils/scripts/test_distributed_data_loop.py,sha256=TEqnW4WIlSDsMB9D2bcvAC_AB9hHtJRiERt3ZGKzK80,8236
+accelerate/test_utils/scripts/test_ops.py,sha256=NGTAilGAt7f_8Q2gVJogth9wEmXP5wAHK4EmP4rW65E,3560
+accelerate/test_utils/scripts/test_script.py,sha256=__b8IO__NwICAA90g6XbKMtwqMJXm3JUtShMMLxxSo4,23525
+accelerate/test_utils/scripts/test_sync.py,sha256=rwcjWzJAnninOBAbUwBztNaTodEMVkBT0wrdgHtD8BM,14441
+accelerate/test_utils/testing.py,sha256=KtWKTwulfEvsMw23o2LXInjkNnL-H3kztJ3oH3vPRNc,14217
+accelerate/test_utils/training.py,sha256=7RNVMmRb6WFCvGzyR2tWTaPL5tKO4YGzjXN0GFWvI8U,4019
+accelerate/tracking.py,sha256=PP8NaPFcW0bGdMPaGCM6oO4wt_3IbVKeU_KdSKI7LVY,28328
+accelerate/utils/__init__.py,sha256=kqdnZPFPLPAaJjBGlrDP9L3WCVcs6vMFL6TpGIKz5ik,4556
+accelerate/utils/__pycache__/__init__.cpython-310.pyc,,
+accelerate/utils/__pycache__/bnb.cpython-310.pyc,,
+accelerate/utils/__pycache__/constants.cpython-310.pyc,,
+accelerate/utils/__pycache__/dataclasses.cpython-310.pyc,,
+accelerate/utils/__pycache__/deepspeed.cpython-310.pyc,,
+accelerate/utils/__pycache__/environment.cpython-310.pyc,,
+accelerate/utils/__pycache__/fsdp_utils.cpython-310.pyc,,
+accelerate/utils/__pycache__/imports.cpython-310.pyc,,
+accelerate/utils/__pycache__/launch.cpython-310.pyc,,
+accelerate/utils/__pycache__/megatron_lm.cpython-310.pyc,,
+accelerate/utils/__pycache__/memory.cpython-310.pyc,,
+accelerate/utils/__pycache__/modeling.cpython-310.pyc,,
+accelerate/utils/__pycache__/offload.cpython-310.pyc,,
+accelerate/utils/__pycache__/operations.cpython-310.pyc,,
+accelerate/utils/__pycache__/other.cpython-310.pyc,,
+accelerate/utils/__pycache__/random.cpython-310.pyc,,
+accelerate/utils/__pycache__/rich.cpython-310.pyc,,
+accelerate/utils/__pycache__/torch_xla.cpython-310.pyc,,
+accelerate/utils/__pycache__/tqdm.cpython-310.pyc,,
+accelerate/utils/__pycache__/transformer_engine.cpython-310.pyc,,
+accelerate/utils/__pycache__/versions.cpython-310.pyc,,
+accelerate/utils/bnb.py,sha256=pVUVJNB4h9Y_1R_i_qnavE25h_a3XZYeh_fpFdqwuiM,20402
+accelerate/utils/constants.py,sha256=uJFvRq3h86h3P331v9_JxbIh9eNJksByXvLKcxCRt8c,2398
+accelerate/utils/dataclasses.py,sha256=gwF344kbSfit3TKGrfiyqdw8qF7U7OCAWCwz3LN4OVo,62268
+accelerate/utils/deepspeed.py,sha256=B-CDDAWZwAWe_gXKszeV4NcDJVe4ACPwvDg-5Q96OJQ,9939
+accelerate/utils/environment.py,sha256=ZG-2HbPrTihFX2Ak_bP0-uKLjs5KdNUrRUkhuLOwPX8,1302
+accelerate/utils/fsdp_utils.py,sha256=xMznlR-37wtYZmS_Rv40zBKULMTVn4u5VVeuaxoLGwI,8924
+accelerate/utils/imports.py,sha256=CpGodEPc234u-h7ESwm7my89nQgZ-suTrwZSc6GTasI,8387
+accelerate/utils/launch.py,sha256=AfvwsFUXUyNgpfM2hcum_XTP-gdQ5NLOcwzbP792QcI,23659
+accelerate/utils/megatron_lm.py,sha256=yOrhJ2u9NKBO3LR_FWlIxh44PWUx_cgdcCVDAjrBiE8,57263
+accelerate/utils/memory.py,sha256=d2DBzqkcoYAPlpK0aMQ5f5c-R-M6Wx9KBx_2UM6qhNw,4880
+accelerate/utils/modeling.py,sha256=ccAz-a34fQ9nTtjYPqPcV9cAisLW0XSzsIpIGLW4-Vo,61743
+accelerate/utils/offload.py,sha256=UoinJf_eUs_cIkdo48RA8kblwl3QGfRACQ8ncbdikeU,7596
+accelerate/utils/operations.py,sha256=PFsfIS8QpnV69cHN5u7NS941h4DFoRzUHOVb689c5k4,22002
+accelerate/utils/other.py,sha256=cdJFvN1cvjTq0JoBL-e064-pO3_JFyg878gRB33wuhA,5453
+accelerate/utils/random.py,sha256=IWVnFFjRuZZOO8HI9L7suHRSM33Pk2NXYywOpU0BKIg,4292
+accelerate/utils/rich.py,sha256=8JZX_uGMQX-BufdXxJpdne7BWd1KyLHSgbiGxrDMYr8,847
+accelerate/utils/torch_xla.py,sha256=Pq1tuqN0X_pWDVza6YgjfO45uoJdoRVRForLeLQzFus,1908
+accelerate/utils/tqdm.py,sha256=0cegNnuA93tKT3o6HDip90rPl8BODLFLu4jP1E3aJ08,1344
+accelerate/utils/transformer_engine.py,sha256=TlbaYL85ppjFD3DUgkUopTJkVIWxQOk476EpGb2LJ58,3665
+accelerate/utils/versions.py,sha256=UgmcbjBm--6CIx1ZamSAMjAK_B_2l48LbeaNygqej8M,2149
diff --git a/llava_next/lib/python3.10/site-packages/accelerate-0.21.0.dist-info/REQUESTED b/llava_next/lib/python3.10/site-packages/accelerate-0.21.0.dist-info/REQUESTED
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/llava_next/lib/python3.10/site-packages/httpx-0.24.0.dist-info/RECORD b/llava_next/lib/python3.10/site-packages/httpx-0.24.0.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..767ebf2a02de2f950933029fcc9ebe11c9b6cf80
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/httpx-0.24.0.dist-info/RECORD
@@ -0,0 +1,57 @@
+../../../bin/httpx,sha256=guIUHe5POr5U3JDxGZvb1maB-fTl_I760cdYVBsxE8k,220
+httpx-0.24.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+httpx-0.24.0.dist-info/METADATA,sha256=i1VGa3zTHR-dIW93Y9VApsMWu_AOOuzcWh4vz7_zqVA,8070
+httpx-0.24.0.dist-info/RECORD,,
+httpx-0.24.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+httpx-0.24.0.dist-info/WHEEL,sha256=EI2JsGydwUL5GP9t6kzZv7G3HDPi7FuZDDf9In6amRM,87
+httpx-0.24.0.dist-info/entry_points.txt,sha256=2lVkdQmxLA1pNMgSN2eV89o90HCZezhmNwsy6ryKDSA,37
+httpx-0.24.0.dist-info/licenses/LICENSE.md,sha256=TsWdVE8StfU5o6cW_TIaxYzNgDC0ZSIfLIgCAM3yjY0,1508
+httpx/__init__.py,sha256=oCxVAsePEy5DE9eLhGAAq9H3RBGZUDaUROtGEyzbBRo,3210
+httpx/__pycache__/__init__.cpython-310.pyc,,
+httpx/__pycache__/__version__.cpython-310.pyc,,
+httpx/__pycache__/_api.cpython-310.pyc,,
+httpx/__pycache__/_auth.cpython-310.pyc,,
+httpx/__pycache__/_client.cpython-310.pyc,,
+httpx/__pycache__/_compat.cpython-310.pyc,,
+httpx/__pycache__/_config.cpython-310.pyc,,
+httpx/__pycache__/_content.cpython-310.pyc,,
+httpx/__pycache__/_decoders.cpython-310.pyc,,
+httpx/__pycache__/_exceptions.cpython-310.pyc,,
+httpx/__pycache__/_main.cpython-310.pyc,,
+httpx/__pycache__/_models.cpython-310.pyc,,
+httpx/__pycache__/_multipart.cpython-310.pyc,,
+httpx/__pycache__/_status_codes.cpython-310.pyc,,
+httpx/__pycache__/_types.cpython-310.pyc,,
+httpx/__pycache__/_urlparse.cpython-310.pyc,,
+httpx/__pycache__/_urls.cpython-310.pyc,,
+httpx/__pycache__/_utils.cpython-310.pyc,,
+httpx/__version__.py,sha256=9Gk5Kj_c778Xjs4WJbvag1s_DVWXql64qRyncd3a6kA,108
+httpx/_api.py,sha256=cVU9ErzaXve5rqoPoSHr9yJbovHtICrcxR7yBoNSeOw,13011
+httpx/_auth.py,sha256=WnTcFM__63hDCex56w5udXociXGctfo3BQuE7v3d4OQ,11766
+httpx/_client.py,sha256=R6Snj6msUWTWIdZIW2Lf5TYu_Zx6Oz8L6J5sfjaXrYw,68139
+httpx/_compat.py,sha256=lQa4SnZhS-kNQ8HKpSwKrmJ00nYQKDVaWwwnOYEvjMI,1602
+httpx/_config.py,sha256=9Tg0-pV93Hl5knjyZhCLcoEXymAMn-OLaDsEn2uPK14,12391
+httpx/_content.py,sha256=olbWqawdWWweXeW6gDYHPiEGjip5lqFZKv9OmVd-zIg,8092
+httpx/_decoders.py,sha256=dd8GSkEAe45BzRUF47zH_lg3-BcwXtxzPBSGP5Y4F90,9739
+httpx/_exceptions.py,sha256=xKw-U6vW7zmdReUAGYHMegYWZuDAuE5039L087SHe4Q,7880
+httpx/_main.py,sha256=m9C4RuqjOB6UqL3FFHMjmC45f4SDSO-iOREFLdw4IdM,15784
+httpx/_models.py,sha256=Ho9YjmVMkS-lEMhCGpecfYsenVZy2jsLJmKCexO50tI,42696
+httpx/_multipart.py,sha256=LTcxKvbIkVbleNDhb3_JEIayIdYxXfxr812uP_Hudz0,8978
+httpx/_status_codes.py,sha256=XKArMrSoo8oKBQCHdFGA-wsM2PcSTaHE8svDYOUcwWk,5584
+httpx/_transports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+httpx/_transports/__pycache__/__init__.cpython-310.pyc,,
+httpx/_transports/__pycache__/asgi.cpython-310.pyc,,
+httpx/_transports/__pycache__/base.cpython-310.pyc,,
+httpx/_transports/__pycache__/default.cpython-310.pyc,,
+httpx/_transports/__pycache__/mock.cpython-310.pyc,,
+httpx/_transports/__pycache__/wsgi.cpython-310.pyc,,
+httpx/_transports/asgi.py,sha256=lKAL-6dhxqSnZA2fMWtj-MokSTIzjnwwa3DTkkof5cE,5317
+httpx/_transports/base.py,sha256=0BM8yZZEkdFT4tXXSm0h0dK0cSYA4hLgInj_BljGEGw,2510
+httpx/_transports/default.py,sha256=fla9xvSAM3BuGtaMa4PhbX1gW_9oafl8vzujOhcE-H8,12626
+httpx/_transports/mock.py,sha256=sDt3BDXbz8-W94kC8OXtGzF1PWH0y73h1De7Q-XkVtg,1179
+httpx/_transports/wsgi.py,sha256=72ZMPBLPV-aZB4gfsz_SOrJpgKJb6Z9W5wFxhlMQcqg,4754
+httpx/_types.py,sha256=BnX0adSAxLT9BzkxuX96S4odkC9UdLMgws6waxqEKuI,3333
+httpx/_urlparse.py,sha256=sYl4v1ndRHbuXQswp3kN9I4VNWsj-Vu6icmHofOdUzM,15897
+httpx/_urls.py,sha256=JAONd-2reXpB_WuQ7WuvhUcLuebiQeYJQPyszADmCow,21840
+httpx/_utils.py,sha256=FbZDT9UCu_8EjyynU8g_YsQGzQmSgSp5dtexxBolgIA,14580
+httpx/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/llava_next/lib/python3.10/site-packages/idna/__pycache__/__init__.cpython-310.pyc b/llava_next/lib/python3.10/site-packages/idna/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2467724404d2cc23ff41433db769e6d983d269d8
Binary files /dev/null and b/llava_next/lib/python3.10/site-packages/idna/__pycache__/__init__.cpython-310.pyc differ
diff --git a/llava_next/lib/python3.10/site-packages/networkx/__init__.py b/llava_next/lib/python3.10/site-packages/networkx/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..5710827b61d5f5fa25dde778489c2d56677b9b37
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/networkx/__init__.py
@@ -0,0 +1,53 @@
+"""
+NetworkX
+========
+
+NetworkX is a Python package for the creation, manipulation, and study of the
+structure, dynamics, and functions of complex networks.
+
+See https://networkx.org for complete documentation.
+"""
+
+__version__ = "3.4.2"
+
+
+# These are imported in order as listed
+from networkx.lazy_imports import _lazy_import
+
+from networkx.exception import *
+
+from networkx import utils
+from networkx.utils import _clear_cache, _dispatchable
+
+# load_and_call entry_points, set configs
+config = utils.backends._set_configs_from_environment()
+utils.config = utils.configs.config = config # type: ignore[attr-defined]
+
+from networkx import classes
+from networkx.classes import filters
+from networkx.classes import *
+
+from networkx import convert
+from networkx.convert import *
+
+from networkx import convert_matrix
+from networkx.convert_matrix import *
+
+from networkx import relabel
+from networkx.relabel import *
+
+from networkx import generators
+from networkx.generators import *
+
+from networkx import readwrite
+from networkx.readwrite import *
+
+# Need to test with SciPy, when available
+from networkx import algorithms
+from networkx.algorithms import *
+
+from networkx import linalg
+from networkx.linalg import *
+
+from networkx import drawing
+from networkx.drawing import *
diff --git a/llava_next/lib/python3.10/site-packages/networkx/algorithms/chains.py b/llava_next/lib/python3.10/site-packages/networkx/algorithms/chains.py
new file mode 100644
index 0000000000000000000000000000000000000000..ae342d9c8669acd832a3bdb4fe8eecf3e300464f
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/networkx/algorithms/chains.py
@@ -0,0 +1,172 @@
+"""Functions for finding chains in a graph."""
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+__all__ = ["chain_decomposition"]
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def chain_decomposition(G, root=None):
+ """Returns the chain decomposition of a graph.
+
+ The *chain decomposition* of a graph with respect a depth-first
+ search tree is a set of cycles or paths derived from the set of
+ fundamental cycles of the tree in the following manner. Consider
+ each fundamental cycle with respect to the given tree, represented
+ as a list of edges beginning with the nontree edge oriented away
+ from the root of the tree. For each fundamental cycle, if it
+ overlaps with any previous fundamental cycle, just take the initial
+ non-overlapping segment, which is a path instead of a cycle. Each
+ cycle or path is called a *chain*. For more information, see [1]_.
+
+ Parameters
+ ----------
+ G : undirected graph
+
+ root : node (optional)
+ A node in the graph `G`. If specified, only the chain
+ decomposition for the connected component containing this node
+ will be returned. This node indicates the root of the depth-first
+ search tree.
+
+ Yields
+ ------
+ chain : list
+ A list of edges representing a chain. There is no guarantee on
+ the orientation of the edges in each chain (for example, if a
+ chain includes the edge joining nodes 1 and 2, the chain may
+ include either (1, 2) or (2, 1)).
+
+ Raises
+ ------
+ NodeNotFound
+ If `root` is not in the graph `G`.
+
+ Examples
+ --------
+ >>> G = nx.Graph([(0, 1), (1, 4), (3, 4), (3, 5), (4, 5)])
+ >>> list(nx.chain_decomposition(G))
+ [[(4, 5), (5, 3), (3, 4)]]
+
+ Notes
+ -----
+ The worst-case running time of this implementation is linear in the
+ number of nodes and number of edges [1]_.
+
+ References
+ ----------
+ .. [1] Jens M. Schmidt (2013). "A simple test on 2-vertex-
+ and 2-edge-connectivity." *Information Processing Letters*,
+ 113, 241–244. Elsevier.
+
+ """
+
+ def _dfs_cycle_forest(G, root=None):
+ """Builds a directed graph composed of cycles from the given graph.
+
+ `G` is an undirected simple graph. `root` is a node in the graph
+ from which the depth-first search is started.
+
+ This function returns both the depth-first search cycle graph
+ (as a :class:`~networkx.DiGraph`) and the list of nodes in
+ depth-first preorder. The depth-first search cycle graph is a
+ directed graph whose edges are the edges of `G` oriented toward
+ the root if the edge is a tree edge and away from the root if
+ the edge is a non-tree edge. If `root` is not specified, this
+ performs a depth-first search on each connected component of `G`
+ and returns a directed forest instead.
+
+ If `root` is not in the graph, this raises :exc:`KeyError`.
+
+ """
+ # Create a directed graph from the depth-first search tree with
+ # root node `root` in which tree edges are directed toward the
+ # root and nontree edges are directed away from the root. For
+ # each node with an incident nontree edge, this creates a
+ # directed cycle starting with the nontree edge and returning to
+ # that node.
+ #
+ # The `parent` node attribute stores the parent of each node in
+ # the DFS tree. The `nontree` edge attribute indicates whether
+ # the edge is a tree edge or a nontree edge.
+ #
+ # We also store the order of the nodes found in the depth-first
+ # search in the `nodes` list.
+ H = nx.DiGraph()
+ nodes = []
+ for u, v, d in nx.dfs_labeled_edges(G, source=root):
+ if d == "forward":
+ # `dfs_labeled_edges()` yields (root, root, 'forward')
+ # if it is beginning the search on a new connected
+ # component.
+ if u == v:
+ H.add_node(v, parent=None)
+ nodes.append(v)
+ else:
+ H.add_node(v, parent=u)
+ H.add_edge(v, u, nontree=False)
+ nodes.append(v)
+ # `dfs_labeled_edges` considers nontree edges in both
+ # orientations, so we need to not add the edge if it its
+ # other orientation has been added.
+ elif d == "nontree" and v not in H[u]:
+ H.add_edge(v, u, nontree=True)
+ else:
+ # Do nothing on 'reverse' edges; we only care about
+ # forward and nontree edges.
+ pass
+ return H, nodes
+
+ def _build_chain(G, u, v, visited):
+ """Generate the chain starting from the given nontree edge.
+
+ `G` is a DFS cycle graph as constructed by
+ :func:`_dfs_cycle_graph`. The edge (`u`, `v`) is a nontree edge
+ that begins a chain. `visited` is a set representing the nodes
+ in `G` that have already been visited.
+
+ This function yields the edges in an initial segment of the
+ fundamental cycle of `G` starting with the nontree edge (`u`,
+ `v`) that includes all the edges up until the first node that
+ appears in `visited`. The tree edges are given by the 'parent'
+ node attribute. The `visited` set is updated to add each node in
+ an edge yielded by this function.
+
+ """
+ while v not in visited:
+ yield u, v
+ visited.add(v)
+ u, v = v, G.nodes[v]["parent"]
+ yield u, v
+
+ # Check if the root is in the graph G. If not, raise NodeNotFound
+ if root is not None and root not in G:
+ raise nx.NodeNotFound(f"Root node {root} is not in graph")
+
+ # Create a directed version of H that has the DFS edges directed
+ # toward the root and the nontree edges directed away from the root
+ # (in each connected component).
+ H, nodes = _dfs_cycle_forest(G, root)
+
+ # Visit the nodes again in DFS order. For each node, and for each
+ # nontree edge leaving that node, compute the fundamental cycle for
+ # that nontree edge starting with that edge. If the fundamental
+ # cycle overlaps with any visited nodes, just take the prefix of the
+ # cycle up to the point of visited nodes.
+ #
+ # We repeat this process for each connected component (implicitly,
+ # since `nodes` already has a list of the nodes grouped by connected
+ # component).
+ visited = set()
+ for u in nodes:
+ visited.add(u)
+ # For each nontree edge going out of node u...
+ edges = ((u, v) for u, v, d in H.out_edges(u, data="nontree") if d)
+ for u, v in edges:
+ # Create the cycle or cycle prefix starting with the
+ # nontree edge.
+ chain = list(_build_chain(H, u, v, visited))
+ yield chain
diff --git a/llava_next/lib/python3.10/site-packages/networkx/algorithms/chordal.py b/llava_next/lib/python3.10/site-packages/networkx/algorithms/chordal.py
new file mode 100644
index 0000000000000000000000000000000000000000..ab71c243f314d02b74eac9a7b0b4e601ed7e484d
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/networkx/algorithms/chordal.py
@@ -0,0 +1,443 @@
+"""
+Algorithms for chordal graphs.
+
+A graph is chordal if every cycle of length at least 4 has a chord
+(an edge joining two nodes not adjacent in the cycle).
+https://en.wikipedia.org/wiki/Chordal_graph
+"""
+
+import sys
+
+import networkx as nx
+from networkx.algorithms.components import connected_components
+from networkx.utils import arbitrary_element, not_implemented_for
+
+__all__ = [
+ "is_chordal",
+ "find_induced_nodes",
+ "chordal_graph_cliques",
+ "chordal_graph_treewidth",
+ "NetworkXTreewidthBoundExceeded",
+ "complete_to_chordal_graph",
+]
+
+
+class NetworkXTreewidthBoundExceeded(nx.NetworkXException):
+ """Exception raised when a treewidth bound has been provided and it has
+ been exceeded"""
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def is_chordal(G):
+ """Checks whether G is a chordal graph.
+
+ A graph is chordal if every cycle of length at least 4 has a chord
+ (an edge joining two nodes not adjacent in the cycle).
+
+ Parameters
+ ----------
+ G : graph
+ A NetworkX graph.
+
+ Returns
+ -------
+ chordal : bool
+ True if G is a chordal graph and False otherwise.
+
+ Raises
+ ------
+ NetworkXNotImplemented
+ The algorithm does not support DiGraph, MultiGraph and MultiDiGraph.
+
+ Examples
+ --------
+ >>> e = [
+ ... (1, 2),
+ ... (1, 3),
+ ... (2, 3),
+ ... (2, 4),
+ ... (3, 4),
+ ... (3, 5),
+ ... (3, 6),
+ ... (4, 5),
+ ... (4, 6),
+ ... (5, 6),
+ ... ]
+ >>> G = nx.Graph(e)
+ >>> nx.is_chordal(G)
+ True
+
+ Notes
+ -----
+ The routine tries to go through every node following maximum cardinality
+ search. It returns False when it finds that the separator for any node
+ is not a clique. Based on the algorithms in [1]_.
+
+ Self loops are ignored.
+
+ References
+ ----------
+ .. [1] R. E. Tarjan and M. Yannakakis, Simple linear-time algorithms
+ to test chordality of graphs, test acyclicity of hypergraphs, and
+ selectively reduce acyclic hypergraphs, SIAM J. Comput., 13 (1984),
+ pp. 566–579.
+ """
+ if len(G.nodes) <= 3:
+ return True
+ return len(_find_chordality_breaker(G)) == 0
+
+
+@nx._dispatchable
+def find_induced_nodes(G, s, t, treewidth_bound=sys.maxsize):
+ """Returns the set of induced nodes in the path from s to t.
+
+ Parameters
+ ----------
+ G : graph
+ A chordal NetworkX graph
+ s : node
+ Source node to look for induced nodes
+ t : node
+ Destination node to look for induced nodes
+ treewidth_bound: float
+ Maximum treewidth acceptable for the graph H. The search
+ for induced nodes will end as soon as the treewidth_bound is exceeded.
+
+ Returns
+ -------
+ induced_nodes : Set of nodes
+ The set of induced nodes in the path from s to t in G
+
+ Raises
+ ------
+ NetworkXError
+ The algorithm does not support DiGraph, MultiGraph and MultiDiGraph.
+ If the input graph is an instance of one of these classes, a
+ :exc:`NetworkXError` is raised.
+ The algorithm can only be applied to chordal graphs. If the input
+ graph is found to be non-chordal, a :exc:`NetworkXError` is raised.
+
+ Examples
+ --------
+ >>> G = nx.Graph()
+ >>> G = nx.generators.classic.path_graph(10)
+ >>> induced_nodes = nx.find_induced_nodes(G, 1, 9, 2)
+ >>> sorted(induced_nodes)
+ [1, 2, 3, 4, 5, 6, 7, 8, 9]
+
+ Notes
+ -----
+ G must be a chordal graph and (s,t) an edge that is not in G.
+
+ If a treewidth_bound is provided, the search for induced nodes will end
+ as soon as the treewidth_bound is exceeded.
+
+ The algorithm is inspired by Algorithm 4 in [1]_.
+ A formal definition of induced node can also be found on that reference.
+
+ Self Loops are ignored
+
+ References
+ ----------
+ .. [1] Learning Bounded Treewidth Bayesian Networks.
+ Gal Elidan, Stephen Gould; JMLR, 9(Dec):2699--2731, 2008.
+ http://jmlr.csail.mit.edu/papers/volume9/elidan08a/elidan08a.pdf
+ """
+ if not is_chordal(G):
+ raise nx.NetworkXError("Input graph is not chordal.")
+
+ H = nx.Graph(G)
+ H.add_edge(s, t)
+ induced_nodes = set()
+ triplet = _find_chordality_breaker(H, s, treewidth_bound)
+ while triplet:
+ (u, v, w) = triplet
+ induced_nodes.update(triplet)
+ for n in triplet:
+ if n != s:
+ H.add_edge(s, n)
+ triplet = _find_chordality_breaker(H, s, treewidth_bound)
+ if induced_nodes:
+ # Add t and the second node in the induced path from s to t.
+ induced_nodes.add(t)
+ for u in G[s]:
+ if len(induced_nodes & set(G[u])) == 2:
+ induced_nodes.add(u)
+ break
+ return induced_nodes
+
+
+@nx._dispatchable
+def chordal_graph_cliques(G):
+ """Returns all maximal cliques of a chordal graph.
+
+ The algorithm breaks the graph in connected components and performs a
+ maximum cardinality search in each component to get the cliques.
+
+ Parameters
+ ----------
+ G : graph
+ A NetworkX graph
+
+ Yields
+ ------
+ frozenset of nodes
+ Maximal cliques, each of which is a frozenset of
+ nodes in `G`. The order of cliques is arbitrary.
+
+ Raises
+ ------
+ NetworkXError
+ The algorithm does not support DiGraph, MultiGraph and MultiDiGraph.
+ The algorithm can only be applied to chordal graphs. If the input
+ graph is found to be non-chordal, a :exc:`NetworkXError` is raised.
+
+ Examples
+ --------
+ >>> e = [
+ ... (1, 2),
+ ... (1, 3),
+ ... (2, 3),
+ ... (2, 4),
+ ... (3, 4),
+ ... (3, 5),
+ ... (3, 6),
+ ... (4, 5),
+ ... (4, 6),
+ ... (5, 6),
+ ... (7, 8),
+ ... ]
+ >>> G = nx.Graph(e)
+ >>> G.add_node(9)
+ >>> cliques = [c for c in chordal_graph_cliques(G)]
+ >>> cliques[0]
+ frozenset({1, 2, 3})
+ """
+ for C in (G.subgraph(c).copy() for c in connected_components(G)):
+ if C.number_of_nodes() == 1:
+ if nx.number_of_selfloops(C) > 0:
+ raise nx.NetworkXError("Input graph is not chordal.")
+ yield frozenset(C.nodes())
+ else:
+ unnumbered = set(C.nodes())
+ v = arbitrary_element(C)
+ unnumbered.remove(v)
+ numbered = {v}
+ clique_wanna_be = {v}
+ while unnumbered:
+ v = _max_cardinality_node(C, unnumbered, numbered)
+ unnumbered.remove(v)
+ numbered.add(v)
+ new_clique_wanna_be = set(C.neighbors(v)) & numbered
+ sg = C.subgraph(clique_wanna_be)
+ if _is_complete_graph(sg):
+ new_clique_wanna_be.add(v)
+ if not new_clique_wanna_be >= clique_wanna_be:
+ yield frozenset(clique_wanna_be)
+ clique_wanna_be = new_clique_wanna_be
+ else:
+ raise nx.NetworkXError("Input graph is not chordal.")
+ yield frozenset(clique_wanna_be)
+
+
+@nx._dispatchable
+def chordal_graph_treewidth(G):
+ """Returns the treewidth of the chordal graph G.
+
+ Parameters
+ ----------
+ G : graph
+ A NetworkX graph
+
+ Returns
+ -------
+ treewidth : int
+ The size of the largest clique in the graph minus one.
+
+ Raises
+ ------
+ NetworkXError
+ The algorithm does not support DiGraph, MultiGraph and MultiDiGraph.
+ The algorithm can only be applied to chordal graphs. If the input
+ graph is found to be non-chordal, a :exc:`NetworkXError` is raised.
+
+ Examples
+ --------
+ >>> e = [
+ ... (1, 2),
+ ... (1, 3),
+ ... (2, 3),
+ ... (2, 4),
+ ... (3, 4),
+ ... (3, 5),
+ ... (3, 6),
+ ... (4, 5),
+ ... (4, 6),
+ ... (5, 6),
+ ... (7, 8),
+ ... ]
+ >>> G = nx.Graph(e)
+ >>> G.add_node(9)
+ >>> nx.chordal_graph_treewidth(G)
+ 3
+
+ References
+ ----------
+ .. [1] https://en.wikipedia.org/wiki/Tree_decomposition#Treewidth
+ """
+ if not is_chordal(G):
+ raise nx.NetworkXError("Input graph is not chordal.")
+
+ max_clique = -1
+ for clique in nx.chordal_graph_cliques(G):
+ max_clique = max(max_clique, len(clique))
+ return max_clique - 1
+
+
+def _is_complete_graph(G):
+ """Returns True if G is a complete graph."""
+ if nx.number_of_selfloops(G) > 0:
+ raise nx.NetworkXError("Self loop found in _is_complete_graph()")
+ n = G.number_of_nodes()
+ if n < 2:
+ return True
+ e = G.number_of_edges()
+ max_edges = (n * (n - 1)) / 2
+ return e == max_edges
+
+
+def _find_missing_edge(G):
+ """Given a non-complete graph G, returns a missing edge."""
+ nodes = set(G)
+ for u in G:
+ missing = nodes - set(list(G[u].keys()) + [u])
+ if missing:
+ return (u, missing.pop())
+
+
+def _max_cardinality_node(G, choices, wanna_connect):
+ """Returns a the node in choices that has more connections in G
+ to nodes in wanna_connect.
+ """
+ max_number = -1
+ for x in choices:
+ number = len([y for y in G[x] if y in wanna_connect])
+ if number > max_number:
+ max_number = number
+ max_cardinality_node = x
+ return max_cardinality_node
+
+
+def _find_chordality_breaker(G, s=None, treewidth_bound=sys.maxsize):
+ """Given a graph G, starts a max cardinality search
+ (starting from s if s is given and from an arbitrary node otherwise)
+ trying to find a non-chordal cycle.
+
+ If it does find one, it returns (u,v,w) where u,v,w are the three
+ nodes that together with s are involved in the cycle.
+
+ It ignores any self loops.
+ """
+ if len(G) == 0:
+ raise nx.NetworkXPointlessConcept("Graph has no nodes.")
+ unnumbered = set(G)
+ if s is None:
+ s = arbitrary_element(G)
+ unnumbered.remove(s)
+ numbered = {s}
+ current_treewidth = -1
+ while unnumbered: # and current_treewidth <= treewidth_bound:
+ v = _max_cardinality_node(G, unnumbered, numbered)
+ unnumbered.remove(v)
+ numbered.add(v)
+ clique_wanna_be = set(G[v]) & numbered
+ sg = G.subgraph(clique_wanna_be)
+ if _is_complete_graph(sg):
+ # The graph seems to be chordal by now. We update the treewidth
+ current_treewidth = max(current_treewidth, len(clique_wanna_be))
+ if current_treewidth > treewidth_bound:
+ raise nx.NetworkXTreewidthBoundExceeded(
+ f"treewidth_bound exceeded: {current_treewidth}"
+ )
+ else:
+ # sg is not a clique,
+ # look for an edge that is not included in sg
+ (u, w) = _find_missing_edge(sg)
+ return (u, v, w)
+ return ()
+
+
+@not_implemented_for("directed")
+@nx._dispatchable(returns_graph=True)
+def complete_to_chordal_graph(G):
+ """Return a copy of G completed to a chordal graph
+
+ Adds edges to a copy of G to create a chordal graph. A graph G=(V,E) is
+ called chordal if for each cycle with length bigger than 3, there exist
+ two non-adjacent nodes connected by an edge (called a chord).
+
+ Parameters
+ ----------
+ G : NetworkX graph
+ Undirected graph
+
+ Returns
+ -------
+ H : NetworkX graph
+ The chordal enhancement of G
+ alpha : Dictionary
+ The elimination ordering of nodes of G
+
+ Notes
+ -----
+ There are different approaches to calculate the chordal
+ enhancement of a graph. The algorithm used here is called
+ MCS-M and gives at least minimal (local) triangulation of graph. Note
+ that this triangulation is not necessarily a global minimum.
+
+ https://en.wikipedia.org/wiki/Chordal_graph
+
+ References
+ ----------
+ .. [1] Berry, Anne & Blair, Jean & Heggernes, Pinar & Peyton, Barry. (2004)
+ Maximum Cardinality Search for Computing Minimal Triangulations of
+ Graphs. Algorithmica. 39. 287-298. 10.1007/s00453-004-1084-3.
+
+ Examples
+ --------
+ >>> from networkx.algorithms.chordal import complete_to_chordal_graph
+ >>> G = nx.wheel_graph(10)
+ >>> H, alpha = complete_to_chordal_graph(G)
+ """
+ H = G.copy()
+ alpha = {node: 0 for node in H}
+ if nx.is_chordal(H):
+ return H, alpha
+ chords = set()
+ weight = {node: 0 for node in H.nodes()}
+ unnumbered_nodes = list(H.nodes())
+ for i in range(len(H.nodes()), 0, -1):
+ # get the node in unnumbered_nodes with the maximum weight
+ z = max(unnumbered_nodes, key=lambda node: weight[node])
+ unnumbered_nodes.remove(z)
+ alpha[z] = i
+ update_nodes = []
+ for y in unnumbered_nodes:
+ if G.has_edge(y, z):
+ update_nodes.append(y)
+ else:
+ # y_weight will be bigger than node weights between y and z
+ y_weight = weight[y]
+ lower_nodes = [
+ node for node in unnumbered_nodes if weight[node] < y_weight
+ ]
+ if nx.has_path(H.subgraph(lower_nodes + [z, y]), y, z):
+ update_nodes.append(y)
+ chords.add((z, y))
+ # during calculation of paths the weights should not be updated
+ for node in update_nodes:
+ weight[node] += 1
+ H.add_edges_from(chords)
+ return H, alpha
diff --git a/llava_next/lib/python3.10/site-packages/networkx/algorithms/graphical.py b/llava_next/lib/python3.10/site-packages/networkx/algorithms/graphical.py
new file mode 100644
index 0000000000000000000000000000000000000000..d5d82dedda6f9810e3f51bc4c82a9a2b252fa998
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/networkx/algorithms/graphical.py
@@ -0,0 +1,483 @@
+"""Test sequences for graphiness."""
+
+import heapq
+
+import networkx as nx
+
+__all__ = [
+ "is_graphical",
+ "is_multigraphical",
+ "is_pseudographical",
+ "is_digraphical",
+ "is_valid_degree_sequence_erdos_gallai",
+ "is_valid_degree_sequence_havel_hakimi",
+]
+
+
+@nx._dispatchable(graphs=None)
+def is_graphical(sequence, method="eg"):
+ """Returns True if sequence is a valid degree sequence.
+
+ A degree sequence is valid if some graph can realize it.
+
+ Parameters
+ ----------
+ sequence : list or iterable container
+ A sequence of integer node degrees
+
+ method : "eg" | "hh" (default: 'eg')
+ The method used to validate the degree sequence.
+ "eg" corresponds to the Erdős-Gallai algorithm
+ [EG1960]_, [choudum1986]_, and
+ "hh" to the Havel-Hakimi algorithm
+ [havel1955]_, [hakimi1962]_, [CL1996]_.
+
+ Returns
+ -------
+ valid : bool
+ True if the sequence is a valid degree sequence and False if not.
+
+ Examples
+ --------
+ >>> G = nx.path_graph(4)
+ >>> sequence = (d for n, d in G.degree())
+ >>> nx.is_graphical(sequence)
+ True
+
+ To test a non-graphical sequence:
+ >>> sequence_list = [d for n, d in G.degree()]
+ >>> sequence_list[-1] += 1
+ >>> nx.is_graphical(sequence_list)
+ False
+
+ References
+ ----------
+ .. [EG1960] Erdős and Gallai, Mat. Lapok 11 264, 1960.
+ .. [choudum1986] S.A. Choudum. "A simple proof of the Erdős-Gallai theorem on
+ graph sequences." Bulletin of the Australian Mathematical Society, 33,
+ pp 67-70, 1986. https://doi.org/10.1017/S0004972700002872
+ .. [havel1955] Havel, V. "A Remark on the Existence of Finite Graphs"
+ Casopis Pest. Mat. 80, 477-480, 1955.
+ .. [hakimi1962] Hakimi, S. "On the Realizability of a Set of Integers as
+ Degrees of the Vertices of a Graph." SIAM J. Appl. Math. 10, 496-506, 1962.
+ .. [CL1996] G. Chartrand and L. Lesniak, "Graphs and Digraphs",
+ Chapman and Hall/CRC, 1996.
+ """
+ if method == "eg":
+ valid = is_valid_degree_sequence_erdos_gallai(list(sequence))
+ elif method == "hh":
+ valid = is_valid_degree_sequence_havel_hakimi(list(sequence))
+ else:
+ msg = "`method` must be 'eg' or 'hh'"
+ raise nx.NetworkXException(msg)
+ return valid
+
+
+def _basic_graphical_tests(deg_sequence):
+ # Sort and perform some simple tests on the sequence
+ deg_sequence = nx.utils.make_list_of_ints(deg_sequence)
+ p = len(deg_sequence)
+ num_degs = [0] * p
+ dmax, dmin, dsum, n = 0, p, 0, 0
+ for d in deg_sequence:
+ # Reject if degree is negative or larger than the sequence length
+ if d < 0 or d >= p:
+ raise nx.NetworkXUnfeasible
+ # Process only the non-zero integers
+ elif d > 0:
+ dmax, dmin, dsum, n = max(dmax, d), min(dmin, d), dsum + d, n + 1
+ num_degs[d] += 1
+ # Reject sequence if it has odd sum or is oversaturated
+ if dsum % 2 or dsum > n * (n - 1):
+ raise nx.NetworkXUnfeasible
+ return dmax, dmin, dsum, n, num_degs
+
+
+@nx._dispatchable(graphs=None)
+def is_valid_degree_sequence_havel_hakimi(deg_sequence):
+ r"""Returns True if deg_sequence can be realized by a simple graph.
+
+ The validation proceeds using the Havel-Hakimi theorem
+ [havel1955]_, [hakimi1962]_, [CL1996]_.
+ Worst-case run time is $O(s)$ where $s$ is the sum of the sequence.
+
+ Parameters
+ ----------
+ deg_sequence : list
+ A list of integers where each element specifies the degree of a node
+ in a graph.
+
+ Returns
+ -------
+ valid : bool
+ True if deg_sequence is graphical and False if not.
+
+ Examples
+ --------
+ >>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)])
+ >>> sequence = (d for _, d in G.degree())
+ >>> nx.is_valid_degree_sequence_havel_hakimi(sequence)
+ True
+
+ To test a non-valid sequence:
+ >>> sequence_list = [d for _, d in G.degree()]
+ >>> sequence_list[-1] += 1
+ >>> nx.is_valid_degree_sequence_havel_hakimi(sequence_list)
+ False
+
+ Notes
+ -----
+ The ZZ condition says that for the sequence d if
+
+ .. math::
+ |d| >= \frac{(\max(d) + \min(d) + 1)^2}{4*\min(d)}
+
+ then d is graphical. This was shown in Theorem 6 in [1]_.
+
+ References
+ ----------
+ .. [1] I.E. Zverovich and V.E. Zverovich. "Contributions to the theory
+ of graphic sequences", Discrete Mathematics, 105, pp. 292-303 (1992).
+ .. [havel1955] Havel, V. "A Remark on the Existence of Finite Graphs"
+ Casopis Pest. Mat. 80, 477-480, 1955.
+ .. [hakimi1962] Hakimi, S. "On the Realizability of a Set of Integers as
+ Degrees of the Vertices of a Graph." SIAM J. Appl. Math. 10, 496-506, 1962.
+ .. [CL1996] G. Chartrand and L. Lesniak, "Graphs and Digraphs",
+ Chapman and Hall/CRC, 1996.
+ """
+ try:
+ dmax, dmin, dsum, n, num_degs = _basic_graphical_tests(deg_sequence)
+ except nx.NetworkXUnfeasible:
+ return False
+ # Accept if sequence has no non-zero degrees or passes the ZZ condition
+ if n == 0 or 4 * dmin * n >= (dmax + dmin + 1) * (dmax + dmin + 1):
+ return True
+
+ modstubs = [0] * (dmax + 1)
+ # Successively reduce degree sequence by removing the maximum degree
+ while n > 0:
+ # Retrieve the maximum degree in the sequence
+ while num_degs[dmax] == 0:
+ dmax -= 1
+ # If there are not enough stubs to connect to, then the sequence is
+ # not graphical
+ if dmax > n - 1:
+ return False
+
+ # Remove largest stub in list
+ num_degs[dmax], n = num_degs[dmax] - 1, n - 1
+ # Reduce the next dmax largest stubs
+ mslen = 0
+ k = dmax
+ for i in range(dmax):
+ while num_degs[k] == 0:
+ k -= 1
+ num_degs[k], n = num_degs[k] - 1, n - 1
+ if k > 1:
+ modstubs[mslen] = k - 1
+ mslen += 1
+ # Add back to the list any non-zero stubs that were removed
+ for i in range(mslen):
+ stub = modstubs[i]
+ num_degs[stub], n = num_degs[stub] + 1, n + 1
+ return True
+
+
+@nx._dispatchable(graphs=None)
+def is_valid_degree_sequence_erdos_gallai(deg_sequence):
+ r"""Returns True if deg_sequence can be realized by a simple graph.
+
+ The validation is done using the Erdős-Gallai theorem [EG1960]_.
+
+ Parameters
+ ----------
+ deg_sequence : list
+ A list of integers
+
+ Returns
+ -------
+ valid : bool
+ True if deg_sequence is graphical and False if not.
+
+ Examples
+ --------
+ >>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)])
+ >>> sequence = (d for _, d in G.degree())
+ >>> nx.is_valid_degree_sequence_erdos_gallai(sequence)
+ True
+
+ To test a non-valid sequence:
+ >>> sequence_list = [d for _, d in G.degree()]
+ >>> sequence_list[-1] += 1
+ >>> nx.is_valid_degree_sequence_erdos_gallai(sequence_list)
+ False
+
+ Notes
+ -----
+
+ This implementation uses an equivalent form of the Erdős-Gallai criterion.
+ Worst-case run time is $O(n)$ where $n$ is the length of the sequence.
+
+ Specifically, a sequence d is graphical if and only if the
+ sum of the sequence is even and for all strong indices k in the sequence,
+
+ .. math::
+
+ \sum_{i=1}^{k} d_i \leq k(k-1) + \sum_{j=k+1}^{n} \min(d_i,k)
+ = k(n-1) - ( k \sum_{j=0}^{k-1} n_j - \sum_{j=0}^{k-1} j n_j )
+
+ A strong index k is any index where d_k >= k and the value n_j is the
+ number of occurrences of j in d. The maximal strong index is called the
+ Durfee index.
+
+ This particular rearrangement comes from the proof of Theorem 3 in [2]_.
+
+ The ZZ condition says that for the sequence d if
+
+ .. math::
+ |d| >= \frac{(\max(d) + \min(d) + 1)^2}{4*\min(d)}
+
+ then d is graphical. This was shown in Theorem 6 in [2]_.
+
+ References
+ ----------
+ .. [1] A. Tripathi and S. Vijay. "A note on a theorem of Erdős & Gallai",
+ Discrete Mathematics, 265, pp. 417-420 (2003).
+ .. [2] I.E. Zverovich and V.E. Zverovich. "Contributions to the theory
+ of graphic sequences", Discrete Mathematics, 105, pp. 292-303 (1992).
+ .. [EG1960] Erdős and Gallai, Mat. Lapok 11 264, 1960.
+ """
+ try:
+ dmax, dmin, dsum, n, num_degs = _basic_graphical_tests(deg_sequence)
+ except nx.NetworkXUnfeasible:
+ return False
+ # Accept if sequence has no non-zero degrees or passes the ZZ condition
+ if n == 0 or 4 * dmin * n >= (dmax + dmin + 1) * (dmax + dmin + 1):
+ return True
+
+ # Perform the EG checks using the reformulation of Zverovich and Zverovich
+ k, sum_deg, sum_nj, sum_jnj = 0, 0, 0, 0
+ for dk in range(dmax, dmin - 1, -1):
+ if dk < k + 1: # Check if already past Durfee index
+ return True
+ if num_degs[dk] > 0:
+ run_size = num_degs[dk] # Process a run of identical-valued degrees
+ if dk < k + run_size: # Check if end of run is past Durfee index
+ run_size = dk - k # Adjust back to Durfee index
+ sum_deg += run_size * dk
+ for v in range(run_size):
+ sum_nj += num_degs[k + v]
+ sum_jnj += (k + v) * num_degs[k + v]
+ k += run_size
+ if sum_deg > k * (n - 1) - k * sum_nj + sum_jnj:
+ return False
+ return True
+
+
+@nx._dispatchable(graphs=None)
+def is_multigraphical(sequence):
+ """Returns True if some multigraph can realize the sequence.
+
+ Parameters
+ ----------
+ sequence : list
+ A list of integers
+
+ Returns
+ -------
+ valid : bool
+ True if deg_sequence is a multigraphic degree sequence and False if not.
+
+ Examples
+ --------
+ >>> G = nx.MultiGraph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)])
+ >>> sequence = (d for _, d in G.degree())
+ >>> nx.is_multigraphical(sequence)
+ True
+
+ To test a non-multigraphical sequence:
+ >>> sequence_list = [d for _, d in G.degree()]
+ >>> sequence_list[-1] += 1
+ >>> nx.is_multigraphical(sequence_list)
+ False
+
+ Notes
+ -----
+ The worst-case run time is $O(n)$ where $n$ is the length of the sequence.
+
+ References
+ ----------
+ .. [1] S. L. Hakimi. "On the realizability of a set of integers as
+ degrees of the vertices of a linear graph", J. SIAM, 10, pp. 496-506
+ (1962).
+ """
+ try:
+ deg_sequence = nx.utils.make_list_of_ints(sequence)
+ except nx.NetworkXError:
+ return False
+ dsum, dmax = 0, 0
+ for d in deg_sequence:
+ if d < 0:
+ return False
+ dsum, dmax = dsum + d, max(dmax, d)
+ if dsum % 2 or dsum < 2 * dmax:
+ return False
+ return True
+
+
+@nx._dispatchable(graphs=None)
+def is_pseudographical(sequence):
+ """Returns True if some pseudograph can realize the sequence.
+
+ Every nonnegative integer sequence with an even sum is pseudographical
+ (see [1]_).
+
+ Parameters
+ ----------
+ sequence : list or iterable container
+ A sequence of integer node degrees
+
+ Returns
+ -------
+ valid : bool
+ True if the sequence is a pseudographic degree sequence and False if not.
+
+ Examples
+ --------
+ >>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)])
+ >>> sequence = (d for _, d in G.degree())
+ >>> nx.is_pseudographical(sequence)
+ True
+
+ To test a non-pseudographical sequence:
+ >>> sequence_list = [d for _, d in G.degree()]
+ >>> sequence_list[-1] += 1
+ >>> nx.is_pseudographical(sequence_list)
+ False
+
+ Notes
+ -----
+ The worst-case run time is $O(n)$ where n is the length of the sequence.
+
+ References
+ ----------
+ .. [1] F. Boesch and F. Harary. "Line removal algorithms for graphs
+ and their degree lists", IEEE Trans. Circuits and Systems, CAS-23(12),
+ pp. 778-782 (1976).
+ """
+ try:
+ deg_sequence = nx.utils.make_list_of_ints(sequence)
+ except nx.NetworkXError:
+ return False
+ return sum(deg_sequence) % 2 == 0 and min(deg_sequence) >= 0
+
+
+@nx._dispatchable(graphs=None)
+def is_digraphical(in_sequence, out_sequence):
+ r"""Returns True if some directed graph can realize the in- and out-degree
+ sequences.
+
+ Parameters
+ ----------
+ in_sequence : list or iterable container
+ A sequence of integer node in-degrees
+
+ out_sequence : list or iterable container
+ A sequence of integer node out-degrees
+
+ Returns
+ -------
+ valid : bool
+ True if in and out-sequences are digraphic False if not.
+
+ Examples
+ --------
+ >>> G = nx.DiGraph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)])
+ >>> in_seq = (d for n, d in G.in_degree())
+ >>> out_seq = (d for n, d in G.out_degree())
+ >>> nx.is_digraphical(in_seq, out_seq)
+ True
+
+ To test a non-digraphical scenario:
+ >>> in_seq_list = [d for n, d in G.in_degree()]
+ >>> in_seq_list[-1] += 1
+ >>> nx.is_digraphical(in_seq_list, out_seq)
+ False
+
+ Notes
+ -----
+ This algorithm is from Kleitman and Wang [1]_.
+ The worst case runtime is $O(s \times \log n)$ where $s$ and $n$ are the
+ sum and length of the sequences respectively.
+
+ References
+ ----------
+ .. [1] D.J. Kleitman and D.L. Wang
+ Algorithms for Constructing Graphs and Digraphs with Given Valences
+ and Factors, Discrete Mathematics, 6(1), pp. 79-88 (1973)
+ """
+ try:
+ in_deg_sequence = nx.utils.make_list_of_ints(in_sequence)
+ out_deg_sequence = nx.utils.make_list_of_ints(out_sequence)
+ except nx.NetworkXError:
+ return False
+ # Process the sequences and form two heaps to store degree pairs with
+ # either zero or non-zero out degrees
+ sumin, sumout, nin, nout = 0, 0, len(in_deg_sequence), len(out_deg_sequence)
+ maxn = max(nin, nout)
+ maxin = 0
+ if maxn == 0:
+ return True
+ stubheap, zeroheap = [], []
+ for n in range(maxn):
+ in_deg, out_deg = 0, 0
+ if n < nout:
+ out_deg = out_deg_sequence[n]
+ if n < nin:
+ in_deg = in_deg_sequence[n]
+ if in_deg < 0 or out_deg < 0:
+ return False
+ sumin, sumout, maxin = sumin + in_deg, sumout + out_deg, max(maxin, in_deg)
+ if in_deg > 0:
+ stubheap.append((-1 * out_deg, -1 * in_deg))
+ elif out_deg > 0:
+ zeroheap.append(-1 * out_deg)
+ if sumin != sumout:
+ return False
+ heapq.heapify(stubheap)
+ heapq.heapify(zeroheap)
+
+ modstubs = [(0, 0)] * (maxin + 1)
+ # Successively reduce degree sequence by removing the maximum out degree
+ while stubheap:
+ # Take the first value in the sequence with non-zero in degree
+ (freeout, freein) = heapq.heappop(stubheap)
+ freein *= -1
+ if freein > len(stubheap) + len(zeroheap):
+ return False
+
+ # Attach out stubs to the nodes with the most in stubs
+ mslen = 0
+ for i in range(freein):
+ if zeroheap and (not stubheap or stubheap[0][0] > zeroheap[0]):
+ stubout = heapq.heappop(zeroheap)
+ stubin = 0
+ else:
+ (stubout, stubin) = heapq.heappop(stubheap)
+ if stubout == 0:
+ return False
+ # Check if target is now totally connected
+ if stubout + 1 < 0 or stubin < 0:
+ modstubs[mslen] = (stubout + 1, stubin)
+ mslen += 1
+
+ # Add back the nodes to the heap that still have available stubs
+ for i in range(mslen):
+ stub = modstubs[i]
+ if stub[1] < 0:
+ heapq.heappush(stubheap, stub)
+ else:
+ heapq.heappush(zeroheap, stub[0])
+ if freeout < 0:
+ heapq.heappush(zeroheap, freeout)
+ return True
diff --git a/llava_next/lib/python3.10/site-packages/networkx/algorithms/non_randomness.py b/llava_next/lib/python3.10/site-packages/networkx/algorithms/non_randomness.py
new file mode 100644
index 0000000000000000000000000000000000000000..137991157747e3720f026481ff1b0b3b5e8af1cf
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/networkx/algorithms/non_randomness.py
@@ -0,0 +1,98 @@
+r"""Computation of graph non-randomness"""
+
+import math
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+__all__ = ["non_randomness"]
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable(edge_attrs="weight")
+def non_randomness(G, k=None, weight="weight"):
+ """Compute the non-randomness of graph G.
+
+ The first returned value nr is the sum of non-randomness values of all
+ edges within the graph (where the non-randomness of an edge tends to be
+ small when the two nodes linked by that edge are from two different
+ communities).
+
+ The second computed value nr_rd is a relative measure that indicates
+ to what extent graph G is different from random graphs in terms
+ of probability. When it is close to 0, the graph tends to be more
+ likely generated by an Erdos Renyi model.
+
+ Parameters
+ ----------
+ G : NetworkX graph
+ Graph must be symmetric, connected, and without self-loops.
+
+ k : int
+ The number of communities in G.
+ If k is not set, the function will use a default community
+ detection algorithm to set it.
+
+ weight : string or None, optional (default=None)
+ The name of an edge attribute that holds the numerical value used
+ as a weight. If None, then each edge has weight 1, i.e., the graph is
+ binary.
+
+ Returns
+ -------
+ non-randomness : (float, float) tuple
+ Non-randomness, Relative non-randomness w.r.t.
+ Erdos Renyi random graphs.
+
+ Raises
+ ------
+ NetworkXException
+ if the input graph is not connected.
+ NetworkXError
+ if the input graph contains self-loops or if graph has no edges.
+
+ Examples
+ --------
+ >>> G = nx.karate_club_graph()
+ >>> nr, nr_rd = nx.non_randomness(G, 2)
+ >>> nr, nr_rd = nx.non_randomness(G, 2, "weight")
+
+ Notes
+ -----
+ This computes Eq. (4.4) and (4.5) in Ref. [1]_.
+
+ If a weight field is passed, this algorithm will use the eigenvalues
+ of the weighted adjacency matrix to compute Eq. (4.4) and (4.5).
+
+ References
+ ----------
+ .. [1] Xiaowei Ying and Xintao Wu,
+ On Randomness Measures for Social Networks,
+ SIAM International Conference on Data Mining. 2009
+ """
+ import numpy as np
+
+ # corner case: graph has no edges
+ if nx.is_empty(G):
+ raise nx.NetworkXError("non_randomness not applicable to empty graphs")
+ if not nx.is_connected(G):
+ raise nx.NetworkXException("Non connected graph.")
+ if len(list(nx.selfloop_edges(G))) > 0:
+ raise nx.NetworkXError("Graph must not contain self-loops")
+
+ if k is None:
+ k = len(tuple(nx.community.label_propagation_communities(G)))
+
+ # eq. 4.4
+ eigenvalues = np.linalg.eigvals(nx.to_numpy_array(G, weight=weight))
+ nr = float(np.real(np.sum(eigenvalues[:k])))
+
+ n = G.number_of_nodes()
+ m = G.number_of_edges()
+ p = (2 * k * m) / (n * (n - k))
+
+ # eq. 4.5
+ nr_rd = (nr - ((n - 2 * k) * p + k)) / math.sqrt(2 * k * p * (1 - p))
+
+ return nr, nr_rd
diff --git a/llava_next/lib/python3.10/site-packages/networkx/algorithms/reciprocity.py b/llava_next/lib/python3.10/site-packages/networkx/algorithms/reciprocity.py
new file mode 100644
index 0000000000000000000000000000000000000000..5ea7ed2ce26ab973e07bcc6ec0d92aa4799d9a6a
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/networkx/algorithms/reciprocity.py
@@ -0,0 +1,98 @@
+"""Algorithms to calculate reciprocity in a directed graph."""
+
+import networkx as nx
+from networkx import NetworkXError
+
+from ..utils import not_implemented_for
+
+__all__ = ["reciprocity", "overall_reciprocity"]
+
+
+@not_implemented_for("undirected", "multigraph")
+@nx._dispatchable
+def reciprocity(G, nodes=None):
+ r"""Compute the reciprocity in a directed graph.
+
+ The reciprocity of a directed graph is defined as the ratio
+ of the number of edges pointing in both directions to the total
+ number of edges in the graph.
+ Formally, $r = |{(u,v) \in G|(v,u) \in G}| / |{(u,v) \in G}|$.
+
+ The reciprocity of a single node u is defined similarly,
+ it is the ratio of the number of edges in both directions to
+ the total number of edges attached to node u.
+
+ Parameters
+ ----------
+ G : graph
+ A networkx directed graph
+ nodes : container of nodes, optional (default=whole graph)
+ Compute reciprocity for nodes in this container.
+
+ Returns
+ -------
+ out : dictionary
+ Reciprocity keyed by node label.
+
+ Notes
+ -----
+ The reciprocity is not defined for isolated nodes.
+ In such cases this function will return None.
+
+ """
+ # If `nodes` is not specified, calculate the reciprocity of the graph.
+ if nodes is None:
+ return overall_reciprocity(G)
+
+ # If `nodes` represents a single node in the graph, return only its
+ # reciprocity.
+ if nodes in G:
+ reciprocity = next(_reciprocity_iter(G, nodes))[1]
+ if reciprocity is None:
+ raise NetworkXError("Not defined for isolated nodes.")
+ else:
+ return reciprocity
+
+ # Otherwise, `nodes` represents an iterable of nodes, so return a
+ # dictionary mapping node to its reciprocity.
+ return dict(_reciprocity_iter(G, nodes))
+
+
+def _reciprocity_iter(G, nodes):
+ """Return an iterator of (node, reciprocity)."""
+ n = G.nbunch_iter(nodes)
+ for node in n:
+ pred = set(G.predecessors(node))
+ succ = set(G.successors(node))
+ overlap = pred & succ
+ n_total = len(pred) + len(succ)
+
+ # Reciprocity is not defined for isolated nodes.
+ # Return None.
+ if n_total == 0:
+ yield (node, None)
+ else:
+ reciprocity = 2 * len(overlap) / n_total
+ yield (node, reciprocity)
+
+
+@not_implemented_for("undirected", "multigraph")
+@nx._dispatchable
+def overall_reciprocity(G):
+ """Compute the reciprocity for the whole graph.
+
+ See the doc of reciprocity for the definition.
+
+ Parameters
+ ----------
+ G : graph
+ A networkx graph
+
+ """
+ n_all_edge = G.number_of_edges()
+ n_overlap_edge = (n_all_edge - G.to_undirected().number_of_edges()) * 2
+
+ if n_all_edge == 0:
+ raise NetworkXError("Not defined for empty graphs")
+
+ return n_overlap_edge / n_all_edge
diff --git a/llava_next/lib/python3.10/site-packages/networkx/algorithms/smetric.py b/llava_next/lib/python3.10/site-packages/networkx/algorithms/smetric.py
new file mode 100644
index 0000000000000000000000000000000000000000..d985aa805b4fb21300680afe389aae4732793a73
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/networkx/algorithms/smetric.py
@@ -0,0 +1,30 @@
+import networkx as nx
+
+__all__ = ["s_metric"]
+
+
+@nx._dispatchable
+def s_metric(G):
+ """Returns the s-metric [1]_ of graph.
+
+ The s-metric is defined as the sum of the products ``deg(u) * deg(v)``
+ for every edge ``(u, v)`` in `G`.
+
+ Parameters
+ ----------
+ G : graph
+ The graph used to compute the s-metric.
+
+ Returns
+ -------
+ s : float
+ The s-metric of the graph.
+
+ References
+ ----------
+ .. [1] Lun Li, David Alderson, John C. Doyle, and Walter Willinger,
+ Towards a Theory of Scale-Free Graphs:
+ Definition, Properties, and Implications (Extended Version), 2005.
+ https://arxiv.org/abs/cond-mat/0501169
+ """
+ return float(sum(G.degree(u) * G.degree(v) for (u, v) in G.edges()))
diff --git a/llava_next/lib/python3.10/site-packages/networkx/algorithms/structuralholes.py b/llava_next/lib/python3.10/site-packages/networkx/algorithms/structuralholes.py
new file mode 100644
index 0000000000000000000000000000000000000000..bae42d060af9e2c8bc5d9732b5bc7905d3d895b9
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/networkx/algorithms/structuralholes.py
@@ -0,0 +1,283 @@
+"""Functions for computing measures of structural holes."""
+
+import networkx as nx
+
+__all__ = ["constraint", "local_constraint", "effective_size"]
+
+
+@nx._dispatchable(edge_attrs="weight")
+def mutual_weight(G, u, v, weight=None):
+ """Returns the sum of the weights of the edge from `u` to `v` and
+ the edge from `v` to `u` in `G`.
+
+ `weight` is the edge data key that represents the edge weight. If
+ the specified key is `None` or is not in the edge data for an edge,
+ that edge is assumed to have weight 1.
+
+ Pre-conditions: `u` and `v` must both be in `G`.
+
+ """
+ try:
+ a_uv = G[u][v].get(weight, 1)
+ except KeyError:
+ a_uv = 0
+ try:
+ a_vu = G[v][u].get(weight, 1)
+ except KeyError:
+ a_vu = 0
+ return a_uv + a_vu
+
+
+@nx._dispatchable(edge_attrs="weight")
+def normalized_mutual_weight(G, u, v, norm=sum, weight=None):
+ """Returns normalized mutual weight of the edges from `u` to `v`
+ with respect to the mutual weights of the neighbors of `u` in `G`.
+
+ `norm` specifies how the normalization factor is computed. It must
+ be a function that takes a single argument and returns a number.
+ The argument will be an iterable of mutual weights
+ of pairs ``(u, w)``, where ``w`` ranges over each (in- and
+ out-)neighbor of ``u``. Commons values for `normalization` are
+ ``sum`` and ``max``.
+
+ `weight` can be ``None`` or a string, if None, all edge weights
+ are considered equal. Otherwise holds the name of the edge
+ attribute used as weight.
+
+ """
+ scale = norm(mutual_weight(G, u, w, weight) for w in set(nx.all_neighbors(G, u)))
+ return 0 if scale == 0 else mutual_weight(G, u, v, weight) / scale
+
+
+@nx._dispatchable(edge_attrs="weight")
+def effective_size(G, nodes=None, weight=None):
+ r"""Returns the effective size of all nodes in the graph ``G``.
+
+ The *effective size* of a node's ego network is based on the concept
+ of redundancy. A person's ego network has redundancy to the extent
+ that her contacts are connected to each other as well. The
+ nonredundant part of a person's relationships is the effective
+ size of her ego network [1]_. Formally, the effective size of a
+ node $u$, denoted $e(u)$, is defined by
+
+ .. math::
+
+ e(u) = \sum_{v \in N(u) \setminus \{u\}}
+ \left(1 - \sum_{w \in N(v)} p_{uw} m_{vw}\right)
+
+ where $N(u)$ is the set of neighbors of $u$ and $p_{uw}$ is the
+ normalized mutual weight of the (directed or undirected) edges
+ joining $u$ and $v$, for each vertex $u$ and $v$ [1]_. And $m_{vw}$
+ is the mutual weight of $v$ and $w$ divided by $v$ highest mutual
+ weight with any of its neighbors. The *mutual weight* of $u$ and $v$
+ is the sum of the weights of edges joining them (edge weights are
+ assumed to be one if the graph is unweighted).
+
+ For the case of unweighted and undirected graphs, Borgatti proposed
+ a simplified formula to compute effective size [2]_
+
+ .. math::
+
+ e(u) = n - \frac{2t}{n}
+
+ where `t` is the number of ties in the ego network (not including
+ ties to ego) and `n` is the number of nodes (excluding ego).
+
+ Parameters
+ ----------
+ G : NetworkX graph
+ The graph containing ``v``. Directed graphs are treated like
+ undirected graphs when computing neighbors of ``v``.
+
+ nodes : container, optional
+ Container of nodes in the graph ``G`` to compute the effective size.
+ If None, the effective size of every node is computed.
+
+ weight : None or string, optional
+ If None, all edge weights are considered equal.
+ Otherwise holds the name of the edge attribute used as weight.
+
+ Returns
+ -------
+ dict
+ Dictionary with nodes as keys and the effective size of the node as values.
+
+ Notes
+ -----
+ Burt also defined the related concept of *efficiency* of a node's ego
+ network, which is its effective size divided by the degree of that
+ node [1]_. So you can easily compute efficiency:
+
+ >>> G = nx.DiGraph()
+ >>> G.add_edges_from([(0, 1), (0, 2), (1, 0), (2, 1)])
+ >>> esize = nx.effective_size(G)
+ >>> efficiency = {n: v / G.degree(n) for n, v in esize.items()}
+
+ See also
+ --------
+ constraint
+
+ References
+ ----------
+ .. [1] Burt, Ronald S.
+ *Structural Holes: The Social Structure of Competition.*
+ Cambridge: Harvard University Press, 1995.
+
+ .. [2] Borgatti, S.
+ "Structural Holes: Unpacking Burt's Redundancy Measures"
+ CONNECTIONS 20(1):35-38.
+ http://www.analytictech.com/connections/v20(1)/holes.htm
+
+ """
+
+ def redundancy(G, u, v, weight=None):
+ nmw = normalized_mutual_weight
+ r = sum(
+ nmw(G, u, w, weight=weight) * nmw(G, v, w, norm=max, weight=weight)
+ for w in set(nx.all_neighbors(G, u))
+ )
+ return 1 - r
+
+ effective_size = {}
+ if nodes is None:
+ nodes = G
+ # Use Borgatti's simplified formula for unweighted and undirected graphs
+ if not G.is_directed() and weight is None:
+ for v in nodes:
+ # Effective size is not defined for isolated nodes
+ if len(G[v]) == 0:
+ effective_size[v] = float("nan")
+ continue
+ E = nx.ego_graph(G, v, center=False, undirected=True)
+ effective_size[v] = len(E) - (2 * E.size()) / len(E)
+ else:
+ for v in nodes:
+ # Effective size is not defined for isolated nodes
+ if len(G[v]) == 0:
+ effective_size[v] = float("nan")
+ continue
+ effective_size[v] = sum(
+ redundancy(G, v, u, weight) for u in set(nx.all_neighbors(G, v))
+ )
+ return effective_size
+
+
+@nx._dispatchable(edge_attrs="weight")
+def constraint(G, nodes=None, weight=None):
+ r"""Returns the constraint on all nodes in the graph ``G``.
+
+ The *constraint* is a measure of the extent to which a node *v* is
+ invested in those nodes that are themselves invested in the
+ neighbors of *v*. Formally, the *constraint on v*, denoted `c(v)`,
+ is defined by
+
+ .. math::
+
+ c(v) = \sum_{w \in N(v) \setminus \{v\}} \ell(v, w)
+
+ where $N(v)$ is the subset of the neighbors of `v` that are either
+ predecessors or successors of `v` and $\ell(v, w)$ is the local
+ constraint on `v` with respect to `w` [1]_. For the definition of local
+ constraint, see :func:`local_constraint`.
+
+ Parameters
+ ----------
+ G : NetworkX graph
+ The graph containing ``v``. This can be either directed or undirected.
+
+ nodes : container, optional
+ Container of nodes in the graph ``G`` to compute the constraint. If
+ None, the constraint of every node is computed.
+
+ weight : None or string, optional
+ If None, all edge weights are considered equal.
+ Otherwise holds the name of the edge attribute used as weight.
+
+ Returns
+ -------
+ dict
+ Dictionary with nodes as keys and the constraint on the node as values.
+
+ See also
+ --------
+ local_constraint
+
+ References
+ ----------
+ .. [1] Burt, Ronald S.
+ "Structural holes and good ideas".
+ American Journal of Sociology (110): 349–399.
+
+ """
+ if nodes is None:
+ nodes = G
+ constraint = {}
+ for v in nodes:
+ # Constraint is not defined for isolated nodes
+ if len(G[v]) == 0:
+ constraint[v] = float("nan")
+ continue
+ constraint[v] = sum(
+ local_constraint(G, v, n, weight) for n in set(nx.all_neighbors(G, v))
+ )
+ return constraint
+
+
+@nx._dispatchable(edge_attrs="weight")
+def local_constraint(G, u, v, weight=None):
+ r"""Returns the local constraint on the node ``u`` with respect to
+ the node ``v`` in the graph ``G``.
+
+ Formally, the *local constraint on u with respect to v*, denoted
+ $\ell(u, v)$, is defined by
+
+ .. math::
+
+ \ell(u, v) = \left(p_{uv} + \sum_{w \in N(v)} p_{uw} p_{wv}\right)^2,
+
+ where $N(v)$ is the set of neighbors of $v$ and $p_{uv}$ is the
+ normalized mutual weight of the (directed or undirected) edges
+ joining $u$ and $v$, for each vertex $u$ and $v$ [1]_. The *mutual
+ weight* of $u$ and $v$ is the sum of the weights of edges joining
+ them (edge weights are assumed to be one if the graph is
+ unweighted).
+
+ Parameters
+ ----------
+ G : NetworkX graph
+ The graph containing ``u`` and ``v``. This can be either
+ directed or undirected.
+
+ u : node
+ A node in the graph ``G``.
+
+ v : node
+ A node in the graph ``G``.
+
+ weight : None or string, optional
+ If None, all edge weights are considered equal.
+ Otherwise holds the name of the edge attribute used as weight.
+
+ Returns
+ -------
+ float
+ The constraint of the node ``v`` in the graph ``G``.
+
+ See also
+ --------
+ constraint
+
+ References
+ ----------
+ .. [1] Burt, Ronald S.
+ "Structural holes and good ideas".
+ American Journal of Sociology (110): 349–399.
+
+ """
+ nmw = normalized_mutual_weight
+ direct = nmw(G, u, v, weight=weight)
+ indirect = sum(
+ nmw(G, u, w, weight=weight) * nmw(G, w, v, weight=weight)
+ for w in set(nx.all_neighbors(G, u))
+ )
+ return (direct + indirect) ** 2
diff --git a/llava_next/lib/python3.10/site-packages/networkx/algorithms/tournament.py b/llava_next/lib/python3.10/site-packages/networkx/algorithms/tournament.py
new file mode 100644
index 0000000000000000000000000000000000000000..25c1983e6a6d0a5dbba05d28f127d3ef62140117
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/networkx/algorithms/tournament.py
@@ -0,0 +1,403 @@
+"""Functions concerning tournament graphs.
+
+A `tournament graph`_ is a complete oriented graph. In other words, it
+is a directed graph in which there is exactly one directed edge joining
+each pair of distinct nodes. For each function in this module that
+accepts a graph as input, you must provide a tournament graph. The
+responsibility is on the caller to ensure that the graph is a tournament
+graph:
+
+ >>> G = nx.DiGraph([(0, 1), (1, 2), (2, 0)])
+ >>> nx.is_tournament(G)
+ True
+
+To access the functions in this module, you must access them through the
+:mod:`networkx.tournament` module::
+
+ >>> nx.tournament.is_reachable(G, 0, 1)
+ True
+
+.. _tournament graph: https://en.wikipedia.org/wiki/Tournament_%28graph_theory%29
+
+"""
+
+from itertools import combinations
+
+import networkx as nx
+from networkx.algorithms.simple_paths import is_simple_path as is_path
+from networkx.utils import arbitrary_element, not_implemented_for, py_random_state
+
+__all__ = [
+ "hamiltonian_path",
+ "is_reachable",
+ "is_strongly_connected",
+ "is_tournament",
+ "random_tournament",
+ "score_sequence",
+]
+
+
+def index_satisfying(iterable, condition):
+ """Returns the index of the first element in `iterable` that
+ satisfies the given condition.
+
+ If no such element is found (that is, when the iterable is
+ exhausted), this returns the length of the iterable (that is, one
+ greater than the last index of the iterable).
+
+ `iterable` must not be empty. If `iterable` is empty, this
+ function raises :exc:`ValueError`.
+
+ """
+ # Pre-condition: iterable must not be empty.
+ for i, x in enumerate(iterable):
+ if condition(x):
+ return i
+ # If we reach the end of the iterable without finding an element
+ # that satisfies the condition, return the length of the iterable,
+ # which is one greater than the index of its last element. If the
+ # iterable was empty, `i` will not be defined, so we raise an
+ # exception.
+ try:
+ return i + 1
+ except NameError as err:
+ raise ValueError("iterable must be non-empty") from err
+
+
+@not_implemented_for("undirected")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def is_tournament(G):
+ """Returns True if and only if `G` is a tournament.
+
+ A tournament is a directed graph, with neither self-loops nor
+ multi-edges, in which there is exactly one directed edge joining
+ each pair of distinct nodes.
+
+ Parameters
+ ----------
+ G : NetworkX graph
+ A directed graph representing a tournament.
+
+ Returns
+ -------
+ bool
+ Whether the given graph is a tournament graph.
+
+ Examples
+ --------
+ >>> G = nx.DiGraph([(0, 1), (1, 2), (2, 0)])
+ >>> nx.is_tournament(G)
+ True
+
+ Notes
+ -----
+ Some definitions require a self-loop on each node, but that is not
+ the convention used here.
+
+ """
+ # In a tournament, there is exactly one directed edge joining each pair.
+ return (
+ all((v in G[u]) ^ (u in G[v]) for u, v in combinations(G, 2))
+ and nx.number_of_selfloops(G) == 0
+ )
+
+
+@not_implemented_for("undirected")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def hamiltonian_path(G):
+ """Returns a Hamiltonian path in the given tournament graph.
+
+ Each tournament has a Hamiltonian path. If furthermore, the
+ tournament is strongly connected, then the returned Hamiltonian path
+ is a Hamiltonian cycle (by joining the endpoints of the path).
+
+ Parameters
+ ----------
+ G : NetworkX graph
+ A directed graph representing a tournament.
+
+ Returns
+ -------
+ path : list
+ A list of nodes which form a Hamiltonian path in `G`.
+
+ Examples
+ --------
+ >>> G = nx.DiGraph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)])
+ >>> nx.is_tournament(G)
+ True
+ >>> nx.tournament.hamiltonian_path(G)
+ [0, 1, 2, 3]
+
+ Notes
+ -----
+ This is a recursive implementation with an asymptotic running time
+ of $O(n^2)$, ignoring multiplicative polylogarithmic factors, where
+ $n$ is the number of nodes in the graph.
+
+ """
+ if len(G) == 0:
+ return []
+ if len(G) == 1:
+ return [arbitrary_element(G)]
+ v = arbitrary_element(G)
+ hampath = hamiltonian_path(G.subgraph(set(G) - {v}))
+ # Get the index of the first node in the path that does *not* have
+ # an edge to `v`, then insert `v` before that node.
+ index = index_satisfying(hampath, lambda u: v not in G[u])
+ hampath.insert(index, v)
+ return hampath
+
+
+@py_random_state(1)
+@nx._dispatchable(graphs=None, returns_graph=True)
+def random_tournament(n, seed=None):
+ r"""Returns a random tournament graph on `n` nodes.
+
+ Parameters
+ ----------
+ n : int
+ The number of nodes in the returned graph.
+ seed : integer, random_state, or None (default)
+ Indicator of random number generation state.
+ See :ref:`Randomness`.
+
+ Returns
+ -------
+ G : DiGraph
+ A tournament on `n` nodes, with exactly one directed edge joining
+ each pair of distinct nodes.
+
+ Notes
+ -----
+ This algorithm adds, for each pair of distinct nodes, an edge with
+ uniformly random orientation. In other words, `\binom{n}{2}` flips
+ of an unbiased coin decide the orientations of the edges in the
+ graph.
+
+ """
+ # Flip an unbiased coin for each pair of distinct nodes.
+ coins = (seed.random() for i in range((n * (n - 1)) // 2))
+ pairs = combinations(range(n), 2)
+ edges = ((u, v) if r < 0.5 else (v, u) for (u, v), r in zip(pairs, coins))
+ return nx.DiGraph(edges)
+
+
+@not_implemented_for("undirected")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def score_sequence(G):
+ """Returns the score sequence for the given tournament graph.
+
+ The score sequence is the sorted list of the out-degrees of the
+ nodes of the graph.
+
+ Parameters
+ ----------
+ G : NetworkX graph
+ A directed graph representing a tournament.
+
+ Returns
+ -------
+ list
+ A sorted list of the out-degrees of the nodes of `G`.
+
+ Examples
+ --------
+ >>> G = nx.DiGraph([(1, 0), (1, 3), (0, 2), (0, 3), (2, 1), (3, 2)])
+ >>> nx.is_tournament(G)
+ True
+ >>> nx.tournament.score_sequence(G)
+ [1, 1, 2, 2]
+
+ """
+ return sorted(d for v, d in G.out_degree())
+
+
+@not_implemented_for("undirected")
+@not_implemented_for("multigraph")
+@nx._dispatchable(preserve_edge_attrs={"G": {"weight": 1}})
+def tournament_matrix(G):
+ r"""Returns the tournament matrix for the given tournament graph.
+
+ This function requires SciPy.
+
+ The *tournament matrix* of a tournament graph with edge set *E* is
+ the matrix *T* defined by
+
+ .. math::
+
+ T_{i j} =
+ \begin{cases}
+ +1 & \text{if } (i, j) \in E \\
+ -1 & \text{if } (j, i) \in E \\
+ 0 & \text{if } i == j.
+ \end{cases}
+
+ An equivalent definition is `T = A - A^T`, where *A* is the
+ adjacency matrix of the graph `G`.
+
+ Parameters
+ ----------
+ G : NetworkX graph
+ A directed graph representing a tournament.
+
+ Returns
+ -------
+ SciPy sparse array
+ The tournament matrix of the tournament graph `G`.
+
+ Raises
+ ------
+ ImportError
+ If SciPy is not available.
+
+ """
+ A = nx.adjacency_matrix(G)
+ return A - A.T
+
+
+@not_implemented_for("undirected")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def is_reachable(G, s, t):
+ """Decides whether there is a path from `s` to `t` in the
+ tournament.
+
+ This function is more theoretically efficient than the reachability
+ checks than the shortest path algorithms in
+ :mod:`networkx.algorithms.shortest_paths`.
+
+ The given graph **must** be a tournament, otherwise this function's
+ behavior is undefined.
+
+ Parameters
+ ----------
+ G : NetworkX graph
+ A directed graph representing a tournament.
+
+ s : node
+ A node in the graph.
+
+ t : node
+ A node in the graph.
+
+ Returns
+ -------
+ bool
+ Whether there is a path from `s` to `t` in `G`.
+
+ Examples
+ --------
+ >>> G = nx.DiGraph([(1, 0), (1, 3), (1, 2), (2, 3), (2, 0), (3, 0)])
+ >>> nx.is_tournament(G)
+ True
+ >>> nx.tournament.is_reachable(G, 1, 3)
+ True
+ >>> nx.tournament.is_reachable(G, 3, 2)
+ False
+
+ Notes
+ -----
+ Although this function is more theoretically efficient than the
+ generic shortest path functions, a speedup requires the use of
+ parallelism. Though it may in the future, the current implementation
+ does not use parallelism, thus you may not see much of a speedup.
+
+ This algorithm comes from [1].
+
+ References
+ ----------
+ .. [1] Tantau, Till.
+ "A note on the complexity of the reachability problem for
+ tournaments."
+ *Electronic Colloquium on Computational Complexity*. 2001.
+
+ """
+
+ def two_neighborhood(G, v):
+ """Returns the set of nodes at distance at most two from `v`.
+
+ `G` must be a graph and `v` a node in that graph.
+
+ The returned set includes the nodes at distance zero (that is,
+ the node `v` itself), the nodes at distance one (that is, the
+ out-neighbors of `v`), and the nodes at distance two.
+
+ """
+ return {
+ x for x in G if x == v or x in G[v] or any(is_path(G, [v, z, x]) for z in G)
+ }
+
+ def is_closed(G, nodes):
+ """Decides whether the given set of nodes is closed.
+
+ A set *S* of nodes is *closed* if for each node *u* in the graph
+ not in *S* and for each node *v* in *S*, there is an edge from
+ *u* to *v*.
+
+ """
+ return all(v in G[u] for u in set(G) - nodes for v in nodes)
+
+ neighborhoods = [two_neighborhood(G, v) for v in G]
+ return all(not (is_closed(G, S) and s in S and t not in S) for S in neighborhoods)
+
+
+@not_implemented_for("undirected")
+@not_implemented_for("multigraph")
+@nx._dispatchable(name="tournament_is_strongly_connected")
+def is_strongly_connected(G):
+ """Decides whether the given tournament is strongly connected.
+
+ This function is more theoretically efficient than the
+ :func:`~networkx.algorithms.components.is_strongly_connected`
+ function.
+
+ The given graph **must** be a tournament, otherwise this function's
+ behavior is undefined.
+
+ Parameters
+ ----------
+ G : NetworkX graph
+ A directed graph representing a tournament.
+
+ Returns
+ -------
+ bool
+ Whether the tournament is strongly connected.
+
+ Examples
+ --------
+ >>> G = nx.DiGraph([(0, 1), (0, 2), (1, 2), (1, 3), (2, 3), (3, 0)])
+ >>> nx.is_tournament(G)
+ True
+ >>> nx.tournament.is_strongly_connected(G)
+ True
+ >>> G.remove_edge(3, 0)
+ >>> G.add_edge(0, 3)
+ >>> nx.is_tournament(G)
+ True
+ >>> nx.tournament.is_strongly_connected(G)
+ False
+
+ Notes
+ -----
+ Although this function is more theoretically efficient than the
+ generic strong connectivity function, a speedup requires the use of
+ parallelism. Though it may in the future, the current implementation
+ does not use parallelism, thus you may not see much of a speedup.
+
+ This algorithm comes from [1].
+
+ References
+ ----------
+ .. [1] Tantau, Till.
+ "A note on the complexity of the reachability problem for
+ tournaments."
+ *Electronic Colloquium on Computational Complexity*. 2001.
+
+
+ """
+ return all(is_reachable(G, u, v) for u in G for v in G)
diff --git a/llava_next/lib/python3.10/site-packages/networkx/algorithms/triads.py b/llava_next/lib/python3.10/site-packages/networkx/algorithms/triads.py
new file mode 100644
index 0000000000000000000000000000000000000000..640fc304e2393bb0ee2a004f6bf9dc8d197c01b2
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/networkx/algorithms/triads.py
@@ -0,0 +1,604 @@
+# See https://github.com/networkx/networkx/pull/1474
+# Copyright 2011 Reya Group
+# Copyright 2011 Alex Levenson
+# Copyright 2011 Diederik van Liere
+"""Functions for analyzing triads of a graph."""
+
+from collections import defaultdict
+from itertools import combinations, permutations
+
+import networkx as nx
+from networkx.utils import not_implemented_for, py_random_state
+
+__all__ = [
+ "triadic_census",
+ "is_triad",
+ "all_triplets",
+ "all_triads",
+ "triads_by_type",
+ "triad_type",
+ "random_triad",
+]
+
+#: The integer codes representing each type of triad.
+#:
+#: Triads that are the same up to symmetry have the same code.
+TRICODES = (
+ 1,
+ 2,
+ 2,
+ 3,
+ 2,
+ 4,
+ 6,
+ 8,
+ 2,
+ 6,
+ 5,
+ 7,
+ 3,
+ 8,
+ 7,
+ 11,
+ 2,
+ 6,
+ 4,
+ 8,
+ 5,
+ 9,
+ 9,
+ 13,
+ 6,
+ 10,
+ 9,
+ 14,
+ 7,
+ 14,
+ 12,
+ 15,
+ 2,
+ 5,
+ 6,
+ 7,
+ 6,
+ 9,
+ 10,
+ 14,
+ 4,
+ 9,
+ 9,
+ 12,
+ 8,
+ 13,
+ 14,
+ 15,
+ 3,
+ 7,
+ 8,
+ 11,
+ 7,
+ 12,
+ 14,
+ 15,
+ 8,
+ 14,
+ 13,
+ 15,
+ 11,
+ 15,
+ 15,
+ 16,
+)
+
+#: The names of each type of triad. The order of the elements is
+#: important: it corresponds to the tricodes given in :data:`TRICODES`.
+TRIAD_NAMES = (
+ "003",
+ "012",
+ "102",
+ "021D",
+ "021U",
+ "021C",
+ "111D",
+ "111U",
+ "030T",
+ "030C",
+ "201",
+ "120D",
+ "120U",
+ "120C",
+ "210",
+ "300",
+)
+
+
+#: A dictionary mapping triad code to triad name.
+TRICODE_TO_NAME = {i: TRIAD_NAMES[code - 1] for i, code in enumerate(TRICODES)}
+
+
+def _tricode(G, v, u, w):
+ """Returns the integer code of the given triad.
+
+ This is some fancy magic that comes from Batagelj and Mrvar's paper. It
+ treats each edge joining a pair of `v`, `u`, and `w` as a bit in
+ the binary representation of an integer.
+
+ """
+ combos = ((v, u, 1), (u, v, 2), (v, w, 4), (w, v, 8), (u, w, 16), (w, u, 32))
+ return sum(x for u, v, x in combos if v in G[u])
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def triadic_census(G, nodelist=None):
+ """Determines the triadic census of a directed graph.
+
+ The triadic census is a count of how many of the 16 possible types of
+ triads are present in a directed graph. If a list of nodes is passed, then
+ only those triads are taken into account which have elements of nodelist in them.
+
+ Parameters
+ ----------
+ G : digraph
+ A NetworkX DiGraph
+ nodelist : list
+ List of nodes for which you want to calculate triadic census
+
+ Returns
+ -------
+ census : dict
+ Dictionary with triad type as keys and number of occurrences as values.
+
+ Examples
+ --------
+ >>> G = nx.DiGraph([(1, 2), (2, 3), (3, 1), (3, 4), (4, 1), (4, 2)])
+ >>> triadic_census = nx.triadic_census(G)
+ >>> for key, value in triadic_census.items():
+ ... print(f"{key}: {value}")
+ 003: 0
+ 012: 0
+ 102: 0
+ 021D: 0
+ 021U: 0
+ 021C: 0
+ 111D: 0
+ 111U: 0
+ 030T: 2
+ 030C: 2
+ 201: 0
+ 120D: 0
+ 120U: 0
+ 120C: 0
+ 210: 0
+ 300: 0
+
+ Notes
+ -----
+ This algorithm has complexity $O(m)$ where $m$ is the number of edges in
+ the graph.
+
+ For undirected graphs, the triadic census can be computed by first converting
+ the graph into a directed graph using the ``G.to_directed()`` method.
+ After this conversion, only the triad types 003, 102, 201 and 300 will be
+ present in the undirected scenario.
+
+ Raises
+ ------
+ ValueError
+ If `nodelist` contains duplicate nodes or nodes not in `G`.
+ If you want to ignore this you can preprocess with `set(nodelist) & G.nodes`
+
+ See also
+ --------
+ triad_graph
+
+ References
+ ----------
+ .. [1] Vladimir Batagelj and Andrej Mrvar, A subquadratic triad census
+ algorithm for large sparse networks with small maximum degree,
+ University of Ljubljana,
+ http://vlado.fmf.uni-lj.si/pub/networks/doc/triads/triads.pdf
+
+ """
+ nodeset = set(G.nbunch_iter(nodelist))
+ if nodelist is not None and len(nodelist) != len(nodeset):
+ raise ValueError("nodelist includes duplicate nodes or nodes not in G")
+
+ N = len(G)
+ Nnot = N - len(nodeset) # can signal special counting for subset of nodes
+
+ # create an ordering of nodes with nodeset nodes first
+ m = {n: i for i, n in enumerate(nodeset)}
+ if Nnot:
+ # add non-nodeset nodes later in the ordering
+ not_nodeset = G.nodes - nodeset
+ m.update((n, i + N) for i, n in enumerate(not_nodeset))
+
+ # build all_neighbor dicts for easy counting
+ # After Python 3.8 can leave off these keys(). Speedup also using G._pred
+ # nbrs = {n: G._pred[n].keys() | G._succ[n].keys() for n in G}
+ nbrs = {n: G.pred[n].keys() | G.succ[n].keys() for n in G}
+ dbl_nbrs = {n: G.pred[n].keys() & G.succ[n].keys() for n in G}
+
+ if Nnot:
+ sgl_nbrs = {n: G.pred[n].keys() ^ G.succ[n].keys() for n in not_nodeset}
+ # find number of edges not incident to nodes in nodeset
+ sgl = sum(1 for n in not_nodeset for nbr in sgl_nbrs[n] if nbr not in nodeset)
+ sgl_edges_outside = sgl // 2
+ dbl = sum(1 for n in not_nodeset for nbr in dbl_nbrs[n] if nbr not in nodeset)
+ dbl_edges_outside = dbl // 2
+
+ # Initialize the count for each triad to be zero.
+ census = {name: 0 for name in TRIAD_NAMES}
+ # Main loop over nodes
+ for v in nodeset:
+ vnbrs = nbrs[v]
+ dbl_vnbrs = dbl_nbrs[v]
+ if Nnot:
+ # set up counts of edges attached to v.
+ sgl_unbrs_bdy = sgl_unbrs_out = dbl_unbrs_bdy = dbl_unbrs_out = 0
+ for u in vnbrs:
+ if m[u] <= m[v]:
+ continue
+ unbrs = nbrs[u]
+ neighbors = (vnbrs | unbrs) - {u, v}
+ # Count connected triads.
+ for w in neighbors:
+ if m[u] < m[w] or (m[v] < m[w] < m[u] and v not in nbrs[w]):
+ code = _tricode(G, v, u, w)
+ census[TRICODE_TO_NAME[code]] += 1
+
+ # Use a formula for dyadic triads with edge incident to v
+ if u in dbl_vnbrs:
+ census["102"] += N - len(neighbors) - 2
+ else:
+ census["012"] += N - len(neighbors) - 2
+
+ # Count edges attached to v. Subtract later to get triads with v isolated
+ # _out are (u,unbr) for unbrs outside boundary of nodeset
+ # _bdy are (u,unbr) for unbrs on boundary of nodeset (get double counted)
+ if Nnot and u not in nodeset:
+ sgl_unbrs = sgl_nbrs[u]
+ sgl_unbrs_bdy += len(sgl_unbrs & vnbrs - nodeset)
+ sgl_unbrs_out += len(sgl_unbrs - vnbrs - nodeset)
+ dbl_unbrs = dbl_nbrs[u]
+ dbl_unbrs_bdy += len(dbl_unbrs & vnbrs - nodeset)
+ dbl_unbrs_out += len(dbl_unbrs - vnbrs - nodeset)
+ # if nodeset == G.nodes, skip this b/c we will find the edge later.
+ if Nnot:
+ # Count edges outside nodeset not connected with v (v isolated triads)
+ census["012"] += sgl_edges_outside - (sgl_unbrs_out + sgl_unbrs_bdy // 2)
+ census["102"] += dbl_edges_outside - (dbl_unbrs_out + dbl_unbrs_bdy // 2)
+
+ # calculate null triads: "003"
+ # null triads = total number of possible triads - all found triads
+ total_triangles = (N * (N - 1) * (N - 2)) // 6
+ triangles_without_nodeset = (Nnot * (Nnot - 1) * (Nnot - 2)) // 6
+ total_census = total_triangles - triangles_without_nodeset
+ census["003"] = total_census - sum(census.values())
+
+ return census
+
+
+@nx._dispatchable
+def is_triad(G):
+ """Returns True if the graph G is a triad, else False.
+
+ Parameters
+ ----------
+ G : graph
+ A NetworkX Graph
+
+ Returns
+ -------
+ istriad : boolean
+ Whether G is a valid triad
+
+ Examples
+ --------
+ >>> G = nx.DiGraph([(1, 2), (2, 3), (3, 1)])
+ >>> nx.is_triad(G)
+ True
+ >>> G.add_edge(0, 1)
+ >>> nx.is_triad(G)
+ False
+ """
+ if isinstance(G, nx.Graph):
+ if G.order() == 3 and nx.is_directed(G):
+ if not any((n, n) in G.edges() for n in G.nodes()):
+ return True
+ return False
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def all_triplets(G):
+ """Returns a generator of all possible sets of 3 nodes in a DiGraph.
+
+ .. deprecated:: 3.3
+
+ all_triplets is deprecated and will be removed in NetworkX version 3.5.
+ Use `itertools.combinations` instead::
+
+ all_triplets = itertools.combinations(G, 3)
+
+ Parameters
+ ----------
+ G : digraph
+ A NetworkX DiGraph
+
+ Returns
+ -------
+ triplets : generator of 3-tuples
+ Generator of tuples of 3 nodes
+
+ Examples
+ --------
+ >>> G = nx.DiGraph([(1, 2), (2, 3), (3, 4)])
+ >>> list(nx.all_triplets(G))
+ [(1, 2, 3), (1, 2, 4), (1, 3, 4), (2, 3, 4)]
+
+ """
+ import warnings
+
+ warnings.warn(
+ (
+ "\n\nall_triplets is deprecated and will be removed in v3.5.\n"
+ "Use `itertools.combinations(G, 3)` instead."
+ ),
+ category=DeprecationWarning,
+ stacklevel=4,
+ )
+ triplets = combinations(G.nodes(), 3)
+ return triplets
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable(returns_graph=True)
+def all_triads(G):
+ """A generator of all possible triads in G.
+
+ Parameters
+ ----------
+ G : digraph
+ A NetworkX DiGraph
+
+ Returns
+ -------
+ all_triads : generator of DiGraphs
+ Generator of triads (order-3 DiGraphs)
+
+ Examples
+ --------
+ >>> G = nx.DiGraph([(1, 2), (2, 3), (3, 1), (3, 4), (4, 1), (4, 2)])
+ >>> for triad in nx.all_triads(G):
+ ... print(triad.edges)
+ [(1, 2), (2, 3), (3, 1)]
+ [(1, 2), (4, 1), (4, 2)]
+ [(3, 1), (3, 4), (4, 1)]
+ [(2, 3), (3, 4), (4, 2)]
+
+ """
+ triplets = combinations(G.nodes(), 3)
+ for triplet in triplets:
+ yield G.subgraph(triplet).copy()
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def triads_by_type(G):
+ """Returns a list of all triads for each triad type in a directed graph.
+ There are exactly 16 different types of triads possible. Suppose 1, 2, 3 are three
+ nodes, they will be classified as a particular triad type if their connections
+ are as follows:
+
+ - 003: 1, 2, 3
+ - 012: 1 -> 2, 3
+ - 102: 1 <-> 2, 3
+ - 021D: 1 <- 2 -> 3
+ - 021U: 1 -> 2 <- 3
+ - 021C: 1 -> 2 -> 3
+ - 111D: 1 <-> 2 <- 3
+ - 111U: 1 <-> 2 -> 3
+ - 030T: 1 -> 2 -> 3, 1 -> 3
+ - 030C: 1 <- 2 <- 3, 1 -> 3
+ - 201: 1 <-> 2 <-> 3
+ - 120D: 1 <- 2 -> 3, 1 <-> 3
+ - 120U: 1 -> 2 <- 3, 1 <-> 3
+ - 120C: 1 -> 2 -> 3, 1 <-> 3
+ - 210: 1 -> 2 <-> 3, 1 <-> 3
+ - 300: 1 <-> 2 <-> 3, 1 <-> 3
+
+ Refer to the :doc:`example gallery `
+ for visual examples of the triad types.
+
+ Parameters
+ ----------
+ G : digraph
+ A NetworkX DiGraph
+
+ Returns
+ -------
+ tri_by_type : dict
+ Dictionary with triad types as keys and lists of triads as values.
+
+ Examples
+ --------
+ >>> G = nx.DiGraph([(1, 2), (1, 3), (2, 3), (3, 1), (5, 6), (5, 4), (6, 7)])
+ >>> dict = nx.triads_by_type(G)
+ >>> dict["120C"][0].edges()
+ OutEdgeView([(1, 2), (1, 3), (2, 3), (3, 1)])
+ >>> dict["012"][0].edges()
+ OutEdgeView([(1, 2)])
+
+ References
+ ----------
+ .. [1] Snijders, T. (2012). "Transitivity and triads." University of
+ Oxford.
+ https://web.archive.org/web/20170830032057/http://www.stats.ox.ac.uk/~snijders/Trans_Triads_ha.pdf
+ """
+ # num_triads = o * (o - 1) * (o - 2) // 6
+ # if num_triads > TRIAD_LIMIT: print(WARNING)
+ all_tri = all_triads(G)
+ tri_by_type = defaultdict(list)
+ for triad in all_tri:
+ name = triad_type(triad)
+ tri_by_type[name].append(triad)
+ return tri_by_type
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def triad_type(G):
+ """Returns the sociological triad type for a triad.
+
+ Parameters
+ ----------
+ G : digraph
+ A NetworkX DiGraph with 3 nodes
+
+ Returns
+ -------
+ triad_type : str
+ A string identifying the triad type
+
+ Examples
+ --------
+ >>> G = nx.DiGraph([(1, 2), (2, 3), (3, 1)])
+ >>> nx.triad_type(G)
+ '030C'
+ >>> G.add_edge(1, 3)
+ >>> nx.triad_type(G)
+ '120C'
+
+ Notes
+ -----
+ There can be 6 unique edges in a triad (order-3 DiGraph) (so 2^^6=64 unique
+ triads given 3 nodes). These 64 triads each display exactly 1 of 16
+ topologies of triads (topologies can be permuted). These topologies are
+ identified by the following notation:
+
+ {m}{a}{n}{type} (for example: 111D, 210, 102)
+
+ Here:
+
+ {m} = number of mutual ties (takes 0, 1, 2, 3); a mutual tie is (0,1)
+ AND (1,0)
+ {a} = number of asymmetric ties (takes 0, 1, 2, 3); an asymmetric tie
+ is (0,1) BUT NOT (1,0) or vice versa
+ {n} = number of null ties (takes 0, 1, 2, 3); a null tie is NEITHER
+ (0,1) NOR (1,0)
+ {type} = a letter (takes U, D, C, T) corresponding to up, down, cyclical
+ and transitive. This is only used for topologies that can have
+ more than one form (eg: 021D and 021U).
+
+ References
+ ----------
+ .. [1] Snijders, T. (2012). "Transitivity and triads." University of
+ Oxford.
+ https://web.archive.org/web/20170830032057/http://www.stats.ox.ac.uk/~snijders/Trans_Triads_ha.pdf
+ """
+ if not is_triad(G):
+ raise nx.NetworkXAlgorithmError("G is not a triad (order-3 DiGraph)")
+ num_edges = len(G.edges())
+ if num_edges == 0:
+ return "003"
+ elif num_edges == 1:
+ return "012"
+ elif num_edges == 2:
+ e1, e2 = G.edges()
+ if set(e1) == set(e2):
+ return "102"
+ elif e1[0] == e2[0]:
+ return "021D"
+ elif e1[1] == e2[1]:
+ return "021U"
+ elif e1[1] == e2[0] or e2[1] == e1[0]:
+ return "021C"
+ elif num_edges == 3:
+ for e1, e2, e3 in permutations(G.edges(), 3):
+ if set(e1) == set(e2):
+ if e3[0] in e1:
+ return "111U"
+ # e3[1] in e1:
+ return "111D"
+ elif set(e1).symmetric_difference(set(e2)) == set(e3):
+ if {e1[0], e2[0], e3[0]} == {e1[0], e2[0], e3[0]} == set(G.nodes()):
+ return "030C"
+ # e3 == (e1[0], e2[1]) and e2 == (e1[1], e3[1]):
+ return "030T"
+ elif num_edges == 4:
+ for e1, e2, e3, e4 in permutations(G.edges(), 4):
+ if set(e1) == set(e2):
+ # identify pair of symmetric edges (which necessarily exists)
+ if set(e3) == set(e4):
+ return "201"
+ if {e3[0]} == {e4[0]} == set(e3).intersection(set(e4)):
+ return "120D"
+ if {e3[1]} == {e4[1]} == set(e3).intersection(set(e4)):
+ return "120U"
+ if e3[1] == e4[0]:
+ return "120C"
+ elif num_edges == 5:
+ return "210"
+ elif num_edges == 6:
+ return "300"
+
+
+@not_implemented_for("undirected")
+@py_random_state(1)
+@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
+def random_triad(G, seed=None):
+ """Returns a random triad from a directed graph.
+
+ .. deprecated:: 3.3
+
+ random_triad is deprecated and will be removed in version 3.5.
+ Use random sampling directly instead::
+
+ G.subgraph(random.sample(list(G), 3))
+
+ Parameters
+ ----------
+ G : digraph
+ A NetworkX DiGraph
+ seed : integer, random_state, or None (default)
+ Indicator of random number generation state.
+ See :ref:`Randomness`.
+
+ Returns
+ -------
+ G2 : subgraph
+ A randomly selected triad (order-3 NetworkX DiGraph)
+
+ Raises
+ ------
+ NetworkXError
+ If the input Graph has less than 3 nodes.
+
+ Examples
+ --------
+ >>> G = nx.DiGraph([(1, 2), (1, 3), (2, 3), (3, 1), (5, 6), (5, 4), (6, 7)])
+ >>> triad = nx.random_triad(G, seed=1)
+ >>> triad.edges
+ OutEdgeView([(1, 2)])
+
+ """
+ import warnings
+
+ warnings.warn(
+ (
+ "\n\nrandom_triad is deprecated and will be removed in NetworkX v3.5.\n"
+ "Use random.sample instead, e.g.::\n\n"
+ "\tG.subgraph(random.sample(list(G), 3))\n"
+ ),
+ category=DeprecationWarning,
+ stacklevel=5,
+ )
+ if len(G) < 3:
+ raise nx.NetworkXError(
+ f"G needs at least 3 nodes to form a triad; (it has {len(G)} nodes)"
+ )
+ nodes = seed.sample(list(G.nodes()), 3)
+ G2 = G.subgraph(nodes)
+ return G2
diff --git a/llava_next/lib/python3.10/site-packages/networkx/conftest.py b/llava_next/lib/python3.10/site-packages/networkx/conftest.py
new file mode 100644
index 0000000000000000000000000000000000000000..4a261c6d0d6f1a31c55349c2cee6776f9b2ba6c4
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/networkx/conftest.py
@@ -0,0 +1,284 @@
+"""
+Testing
+=======
+
+General guidelines for writing good tests:
+
+- doctests always assume ``import networkx as nx`` so don't add that
+- prefer pytest fixtures over classes with setup methods.
+- use the ``@pytest.mark.parametrize`` decorator
+- use ``pytest.importorskip`` for numpy, scipy, pandas, and matplotlib b/c of PyPy.
+ and add the module to the relevant entries below.
+
+"""
+
+import os
+import sys
+import warnings
+from importlib.metadata import entry_points
+
+import pytest
+
+import networkx
+
+
+def pytest_addoption(parser):
+ parser.addoption(
+ "--runslow", action="store_true", default=False, help="run slow tests"
+ )
+ parser.addoption(
+ "--backend",
+ action="store",
+ default=None,
+ help="Run tests with a backend by auto-converting nx graphs to backend graphs",
+ )
+ parser.addoption(
+ "--fallback-to-nx",
+ action="store_true",
+ default=False,
+ help="Run nx function if a backend doesn't implement a dispatchable function"
+ " (use with --backend)",
+ )
+
+
+def pytest_configure(config):
+ config.addinivalue_line("markers", "slow: mark test as slow to run")
+ backend = config.getoption("--backend")
+ if backend is None:
+ backend = os.environ.get("NETWORKX_TEST_BACKEND")
+ # nx_loopback backend is only available when testing with a backend
+ loopback_ep = entry_points(name="nx_loopback", group="networkx.backends")
+ if not loopback_ep:
+ warnings.warn(
+ "\n\n WARNING: Mixed NetworkX configuration! \n\n"
+ " This environment has mixed configuration for networkx.\n"
+ " The test object nx_loopback is not configured correctly.\n"
+ " You should not be seeing this message.\n"
+ " Try `pip install -e .`, or change your PYTHONPATH\n"
+ " Make sure python finds the networkx repo you are testing\n\n"
+ )
+ config.backend = backend
+ if backend:
+ # We will update `networkx.config.backend_priority` below in `*_modify_items`
+ # to allow tests to get set up with normal networkx graphs.
+ networkx.utils.backends.backends["nx_loopback"] = loopback_ep["nx_loopback"]
+ networkx.utils.backends.backend_info["nx_loopback"] = {}
+ networkx.config.backends = networkx.utils.Config(
+ nx_loopback=networkx.utils.Config(),
+ **networkx.config.backends,
+ )
+ fallback_to_nx = config.getoption("--fallback-to-nx")
+ if not fallback_to_nx:
+ fallback_to_nx = os.environ.get("NETWORKX_FALLBACK_TO_NX")
+ networkx.config.fallback_to_nx = bool(fallback_to_nx)
+
+
+def pytest_collection_modifyitems(config, items):
+ # Setting this to True here allows tests to be set up before dispatching
+ # any function call to a backend.
+ if config.backend:
+ # Allow pluggable backends to add markers to tests (such as skip or xfail)
+ # when running in auto-conversion test mode
+ backend_name = config.backend
+ if backend_name != "networkx":
+ networkx.utils.backends._dispatchable._is_testing = True
+ networkx.config.backend_priority.algos = [backend_name]
+ networkx.config.backend_priority.generators = [backend_name]
+ backend = networkx.utils.backends.backends[backend_name].load()
+ if hasattr(backend, "on_start_tests"):
+ getattr(backend, "on_start_tests")(items)
+
+ if config.getoption("--runslow"):
+ # --runslow given in cli: do not skip slow tests
+ return
+ skip_slow = pytest.mark.skip(reason="need --runslow option to run")
+ for item in items:
+ if "slow" in item.keywords:
+ item.add_marker(skip_slow)
+
+
+# TODO: The warnings below need to be dealt with, but for now we silence them.
+@pytest.fixture(autouse=True)
+def set_warnings():
+ warnings.filterwarnings(
+ "ignore",
+ category=FutureWarning,
+ message="\n\nsingle_target_shortest_path_length",
+ )
+ warnings.filterwarnings(
+ "ignore",
+ category=FutureWarning,
+ message="\n\nshortest_path",
+ )
+ warnings.filterwarnings(
+ "ignore", category=DeprecationWarning, message="\n\nThe `normalized`"
+ )
+ warnings.filterwarnings(
+ "ignore", category=DeprecationWarning, message="\n\nall_triplets"
+ )
+ warnings.filterwarnings(
+ "ignore", category=DeprecationWarning, message="\n\nrandom_triad"
+ )
+ warnings.filterwarnings(
+ "ignore", category=DeprecationWarning, message="minimal_d_separator"
+ )
+ warnings.filterwarnings(
+ "ignore", category=DeprecationWarning, message="d_separated"
+ )
+ warnings.filterwarnings("ignore", category=DeprecationWarning, message="\n\nk_core")
+ warnings.filterwarnings(
+ "ignore", category=DeprecationWarning, message="\n\nk_shell"
+ )
+ warnings.filterwarnings(
+ "ignore", category=DeprecationWarning, message="\n\nk_crust"
+ )
+ warnings.filterwarnings(
+ "ignore", category=DeprecationWarning, message="\n\nk_corona"
+ )
+ warnings.filterwarnings(
+ "ignore", category=DeprecationWarning, message="\n\ntotal_spanning_tree_weight"
+ )
+ warnings.filterwarnings(
+ "ignore", category=DeprecationWarning, message=r"\n\nThe 'create=matrix'"
+ )
+ warnings.filterwarnings(
+ "ignore", category=DeprecationWarning, message="\n\n`compute_v_structures"
+ )
+ warnings.filterwarnings(
+ "ignore", category=DeprecationWarning, message="Keyword argument 'link'"
+ )
+
+
+@pytest.fixture(autouse=True)
+def add_nx(doctest_namespace):
+ doctest_namespace["nx"] = networkx
+
+
+# What dependencies are installed?
+
+try:
+ import numpy
+
+ has_numpy = True
+except ImportError:
+ has_numpy = False
+
+try:
+ import scipy
+
+ has_scipy = True
+except ImportError:
+ has_scipy = False
+
+try:
+ import matplotlib
+
+ has_matplotlib = True
+except ImportError:
+ has_matplotlib = False
+
+try:
+ import pandas
+
+ has_pandas = True
+except ImportError:
+ has_pandas = False
+
+try:
+ import pygraphviz
+
+ has_pygraphviz = True
+except ImportError:
+ has_pygraphviz = False
+
+try:
+ import pydot
+
+ has_pydot = True
+except ImportError:
+ has_pydot = False
+
+try:
+ import sympy
+
+ has_sympy = True
+except ImportError:
+ has_sympy = False
+
+
+# List of files that pytest should ignore
+
+collect_ignore = []
+
+needs_numpy = [
+ "algorithms/approximation/traveling_salesman.py",
+ "algorithms/centrality/current_flow_closeness.py",
+ "algorithms/centrality/laplacian.py",
+ "algorithms/node_classification.py",
+ "algorithms/non_randomness.py",
+ "algorithms/polynomials.py",
+ "algorithms/shortest_paths/dense.py",
+ "algorithms/tree/mst.py",
+ "drawing/nx_latex.py",
+ "generators/expanders.py",
+ "linalg/bethehessianmatrix.py",
+ "linalg/laplacianmatrix.py",
+ "utils/misc.py",
+]
+needs_scipy = [
+ "algorithms/approximation/traveling_salesman.py",
+ "algorithms/assortativity/correlation.py",
+ "algorithms/assortativity/mixing.py",
+ "algorithms/assortativity/pairs.py",
+ "algorithms/bipartite/matrix.py",
+ "algorithms/bipartite/spectral.py",
+ "algorithms/centrality/current_flow_betweenness.py",
+ "algorithms/centrality/current_flow_betweenness_subset.py",
+ "algorithms/centrality/eigenvector.py",
+ "algorithms/centrality/katz.py",
+ "algorithms/centrality/laplacian.py",
+ "algorithms/centrality/second_order.py",
+ "algorithms/centrality/subgraph_alg.py",
+ "algorithms/communicability_alg.py",
+ "algorithms/community/divisive.py",
+ "algorithms/distance_measures.py",
+ "algorithms/link_analysis/hits_alg.py",
+ "algorithms/link_analysis/pagerank_alg.py",
+ "algorithms/node_classification.py",
+ "algorithms/similarity.py",
+ "algorithms/tree/mst.py",
+ "algorithms/walks.py",
+ "convert_matrix.py",
+ "drawing/layout.py",
+ "drawing/nx_pylab.py",
+ "generators/spectral_graph_forge.py",
+ "generators/expanders.py",
+ "linalg/algebraicconnectivity.py",
+ "linalg/attrmatrix.py",
+ "linalg/bethehessianmatrix.py",
+ "linalg/graphmatrix.py",
+ "linalg/laplacianmatrix.py",
+ "linalg/modularitymatrix.py",
+ "linalg/spectrum.py",
+ "utils/rcm.py",
+]
+needs_matplotlib = ["drawing/nx_pylab.py", "generators/classic.py"]
+needs_pandas = ["convert_matrix.py"]
+needs_pygraphviz = ["drawing/nx_agraph.py"]
+needs_pydot = ["drawing/nx_pydot.py"]
+needs_sympy = ["algorithms/polynomials.py"]
+
+if not has_numpy:
+ collect_ignore += needs_numpy
+if not has_scipy:
+ collect_ignore += needs_scipy
+if not has_matplotlib:
+ collect_ignore += needs_matplotlib
+if not has_pandas:
+ collect_ignore += needs_pandas
+if not has_pygraphviz:
+ collect_ignore += needs_pygraphviz
+if not has_pydot:
+ collect_ignore += needs_pydot
+if not has_sympy:
+ collect_ignore += needs_sympy
diff --git a/llava_next/lib/python3.10/site-packages/networkx/convert.py b/llava_next/lib/python3.10/site-packages/networkx/convert.py
new file mode 100644
index 0000000000000000000000000000000000000000..3f89c35d8067bf06768109f73caa3436d3719662
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/networkx/convert.py
@@ -0,0 +1,502 @@
+"""Functions to convert NetworkX graphs to and from other formats.
+
+The preferred way of converting data to a NetworkX graph is through the
+graph constructor. The constructor calls the to_networkx_graph() function
+which attempts to guess the input type and convert it automatically.
+
+Examples
+--------
+Create a graph with a single edge from a dictionary of dictionaries
+
+>>> d = {0: {1: 1}} # dict-of-dicts single edge (0,1)
+>>> G = nx.Graph(d)
+
+See Also
+--------
+nx_agraph, nx_pydot
+"""
+
+import warnings
+from collections.abc import Collection, Generator, Iterator
+
+import networkx as nx
+
+__all__ = [
+ "to_networkx_graph",
+ "from_dict_of_dicts",
+ "to_dict_of_dicts",
+ "from_dict_of_lists",
+ "to_dict_of_lists",
+ "from_edgelist",
+ "to_edgelist",
+]
+
+
+def to_networkx_graph(data, create_using=None, multigraph_input=False):
+ """Make a NetworkX graph from a known data structure.
+
+ The preferred way to call this is automatically
+ from the class constructor
+
+ >>> d = {0: {1: {"weight": 1}}} # dict-of-dicts single edge (0,1)
+ >>> G = nx.Graph(d)
+
+ instead of the equivalent
+
+ >>> G = nx.from_dict_of_dicts(d)
+
+ Parameters
+ ----------
+ data : object to be converted
+
+ Current known types are:
+ any NetworkX graph
+ dict-of-dicts
+ dict-of-lists
+ container (e.g. set, list, tuple) of edges
+ iterator (e.g. itertools.chain) that produces edges
+ generator of edges
+ Pandas DataFrame (row per edge)
+ 2D numpy array
+ scipy sparse array
+ pygraphviz agraph
+
+ create_using : NetworkX graph constructor, optional (default=nx.Graph)
+ Graph type to create. If graph instance, then cleared before populated.
+
+ multigraph_input : bool (default False)
+ If True and data is a dict_of_dicts,
+ try to create a multigraph assuming dict_of_dict_of_lists.
+ If data and create_using are both multigraphs then create
+ a multigraph from a multigraph.
+
+ """
+ # NX graph
+ if hasattr(data, "adj"):
+ try:
+ result = from_dict_of_dicts(
+ data.adj,
+ create_using=create_using,
+ multigraph_input=data.is_multigraph(),
+ )
+ # data.graph should be dict-like
+ result.graph.update(data.graph)
+ # data.nodes should be dict-like
+ # result.add_node_from(data.nodes.items()) possible but
+ # for custom node_attr_dict_factory which may be hashable
+ # will be unexpected behavior
+ for n, dd in data.nodes.items():
+ result._node[n].update(dd)
+ return result
+ except Exception as err:
+ raise nx.NetworkXError("Input is not a correct NetworkX graph.") from err
+
+ # dict of dicts/lists
+ if isinstance(data, dict):
+ try:
+ return from_dict_of_dicts(
+ data, create_using=create_using, multigraph_input=multigraph_input
+ )
+ except Exception as err1:
+ if multigraph_input is True:
+ raise nx.NetworkXError(
+ f"converting multigraph_input raised:\n{type(err1)}: {err1}"
+ )
+ try:
+ return from_dict_of_lists(data, create_using=create_using)
+ except Exception as err2:
+ raise TypeError("Input is not known type.") from err2
+
+ # edgelists
+ if isinstance(data, list | tuple | nx.reportviews.EdgeViewABC | Iterator):
+ try:
+ return from_edgelist(data, create_using=create_using)
+ except:
+ pass
+
+ # pygraphviz agraph
+ if hasattr(data, "is_strict"):
+ try:
+ return nx.nx_agraph.from_agraph(data, create_using=create_using)
+ except Exception as err:
+ raise nx.NetworkXError("Input is not a correct pygraphviz graph.") from err
+
+ # Pandas DataFrame
+ try:
+ import pandas as pd
+
+ if isinstance(data, pd.DataFrame):
+ if data.shape[0] == data.shape[1]:
+ try:
+ return nx.from_pandas_adjacency(data, create_using=create_using)
+ except Exception as err:
+ msg = "Input is not a correct Pandas DataFrame adjacency matrix."
+ raise nx.NetworkXError(msg) from err
+ else:
+ try:
+ return nx.from_pandas_edgelist(
+ data, edge_attr=True, create_using=create_using
+ )
+ except Exception as err:
+ msg = "Input is not a correct Pandas DataFrame edge-list."
+ raise nx.NetworkXError(msg) from err
+ except ImportError:
+ pass
+
+ # numpy array
+ try:
+ import numpy as np
+
+ if isinstance(data, np.ndarray):
+ try:
+ return nx.from_numpy_array(data, create_using=create_using)
+ except Exception as err:
+ raise nx.NetworkXError(
+ f"Failed to interpret array as an adjacency matrix."
+ ) from err
+ except ImportError:
+ pass
+
+ # scipy sparse array - any format
+ try:
+ import scipy
+
+ if hasattr(data, "format"):
+ try:
+ return nx.from_scipy_sparse_array(data, create_using=create_using)
+ except Exception as err:
+ raise nx.NetworkXError(
+ "Input is not a correct scipy sparse array type."
+ ) from err
+ except ImportError:
+ pass
+
+ # Note: most general check - should remain last in order of execution
+ # Includes containers (e.g. list, set, dict, etc.), generators, and
+ # iterators (e.g. itertools.chain) of edges
+
+ if isinstance(data, Collection | Generator | Iterator):
+ try:
+ return from_edgelist(data, create_using=create_using)
+ except Exception as err:
+ raise nx.NetworkXError("Input is not a valid edge list") from err
+
+ raise nx.NetworkXError("Input is not a known data type for conversion.")
+
+
+@nx._dispatchable
+def to_dict_of_lists(G, nodelist=None):
+ """Returns adjacency representation of graph as a dictionary of lists.
+
+ Parameters
+ ----------
+ G : graph
+ A NetworkX graph
+
+ nodelist : list
+ Use only nodes specified in nodelist
+
+ Notes
+ -----
+ Completely ignores edge data for MultiGraph and MultiDiGraph.
+
+ """
+ if nodelist is None:
+ nodelist = G
+
+ d = {}
+ for n in nodelist:
+ d[n] = [nbr for nbr in G.neighbors(n) if nbr in nodelist]
+ return d
+
+
+@nx._dispatchable(graphs=None, returns_graph=True)
+def from_dict_of_lists(d, create_using=None):
+ """Returns a graph from a dictionary of lists.
+
+ Parameters
+ ----------
+ d : dictionary of lists
+ A dictionary of lists adjacency representation.
+
+ create_using : NetworkX graph constructor, optional (default=nx.Graph)
+ Graph type to create. If graph instance, then cleared before populated.
+
+ Examples
+ --------
+ >>> dol = {0: [1]} # single edge (0,1)
+ >>> G = nx.from_dict_of_lists(dol)
+
+ or
+
+ >>> G = nx.Graph(dol) # use Graph constructor
+
+ """
+ G = nx.empty_graph(0, create_using)
+ G.add_nodes_from(d)
+ if G.is_multigraph() and not G.is_directed():
+ # a dict_of_lists can't show multiedges. BUT for undirected graphs,
+ # each edge shows up twice in the dict_of_lists.
+ # So we need to treat this case separately.
+ seen = {}
+ for node, nbrlist in d.items():
+ for nbr in nbrlist:
+ if nbr not in seen:
+ G.add_edge(node, nbr)
+ seen[node] = 1 # don't allow reverse edge to show up
+ else:
+ G.add_edges_from(
+ ((node, nbr) for node, nbrlist in d.items() for nbr in nbrlist)
+ )
+ return G
+
+
+def to_dict_of_dicts(G, nodelist=None, edge_data=None):
+ """Returns adjacency representation of graph as a dictionary of dictionaries.
+
+ Parameters
+ ----------
+ G : graph
+ A NetworkX graph
+
+ nodelist : list
+ Use only nodes specified in nodelist
+
+ edge_data : scalar, optional
+ If provided, the value of the dictionary will be set to `edge_data` for
+ all edges. Usual values could be `1` or `True`. If `edge_data` is
+ `None` (the default), the edgedata in `G` is used, resulting in a
+ dict-of-dict-of-dicts. If `G` is a MultiGraph, the result will be a
+ dict-of-dict-of-dict-of-dicts. See Notes for an approach to customize
+ handling edge data. `edge_data` should *not* be a container.
+
+ Returns
+ -------
+ dod : dict
+ A nested dictionary representation of `G`. Note that the level of
+ nesting depends on the type of `G` and the value of `edge_data`
+ (see Examples).
+
+ See Also
+ --------
+ from_dict_of_dicts, to_dict_of_lists
+
+ Notes
+ -----
+ For a more custom approach to handling edge data, try::
+
+ dod = {
+ n: {nbr: custom(n, nbr, dd) for nbr, dd in nbrdict.items()}
+ for n, nbrdict in G.adj.items()
+ }
+
+ where `custom` returns the desired edge data for each edge between `n` and
+ `nbr`, given existing edge data `dd`.
+
+ Examples
+ --------
+ >>> G = nx.path_graph(3)
+ >>> nx.to_dict_of_dicts(G)
+ {0: {1: {}}, 1: {0: {}, 2: {}}, 2: {1: {}}}
+
+ Edge data is preserved by default (``edge_data=None``), resulting
+ in dict-of-dict-of-dicts where the innermost dictionary contains the
+ edge data:
+
+ >>> G = nx.Graph()
+ >>> G.add_edges_from(
+ ... [
+ ... (0, 1, {"weight": 1.0}),
+ ... (1, 2, {"weight": 2.0}),
+ ... (2, 0, {"weight": 1.0}),
+ ... ]
+ ... )
+ >>> d = nx.to_dict_of_dicts(G)
+ >>> d # doctest: +SKIP
+ {0: {1: {'weight': 1.0}, 2: {'weight': 1.0}},
+ 1: {0: {'weight': 1.0}, 2: {'weight': 2.0}},
+ 2: {1: {'weight': 2.0}, 0: {'weight': 1.0}}}
+ >>> d[1][2]["weight"]
+ 2.0
+
+ If `edge_data` is not `None`, edge data in the original graph (if any) is
+ replaced:
+
+ >>> d = nx.to_dict_of_dicts(G, edge_data=1)
+ >>> d
+ {0: {1: 1, 2: 1}, 1: {0: 1, 2: 1}, 2: {1: 1, 0: 1}}
+ >>> d[1][2]
+ 1
+
+ This also applies to MultiGraphs: edge data is preserved by default:
+
+ >>> G = nx.MultiGraph()
+ >>> G.add_edge(0, 1, key="a", weight=1.0)
+ 'a'
+ >>> G.add_edge(0, 1, key="b", weight=5.0)
+ 'b'
+ >>> d = nx.to_dict_of_dicts(G)
+ >>> d # doctest: +SKIP
+ {0: {1: {'a': {'weight': 1.0}, 'b': {'weight': 5.0}}},
+ 1: {0: {'a': {'weight': 1.0}, 'b': {'weight': 5.0}}}}
+ >>> d[0][1]["b"]["weight"]
+ 5.0
+
+ But multi edge data is lost if `edge_data` is not `None`:
+
+ >>> d = nx.to_dict_of_dicts(G, edge_data=10)
+ >>> d
+ {0: {1: 10}, 1: {0: 10}}
+ """
+ dod = {}
+ if nodelist is None:
+ if edge_data is None:
+ for u, nbrdict in G.adjacency():
+ dod[u] = nbrdict.copy()
+ else: # edge_data is not None
+ for u, nbrdict in G.adjacency():
+ dod[u] = dod.fromkeys(nbrdict, edge_data)
+ else: # nodelist is not None
+ if edge_data is None:
+ for u in nodelist:
+ dod[u] = {}
+ for v, data in ((v, data) for v, data in G[u].items() if v in nodelist):
+ dod[u][v] = data
+ else: # nodelist and edge_data are not None
+ for u in nodelist:
+ dod[u] = {}
+ for v in (v for v in G[u] if v in nodelist):
+ dod[u][v] = edge_data
+ return dod
+
+
+@nx._dispatchable(graphs=None, returns_graph=True)
+def from_dict_of_dicts(d, create_using=None, multigraph_input=False):
+ """Returns a graph from a dictionary of dictionaries.
+
+ Parameters
+ ----------
+ d : dictionary of dictionaries
+ A dictionary of dictionaries adjacency representation.
+
+ create_using : NetworkX graph constructor, optional (default=nx.Graph)
+ Graph type to create. If graph instance, then cleared before populated.
+
+ multigraph_input : bool (default False)
+ When True, the dict `d` is assumed
+ to be a dict-of-dict-of-dict-of-dict structure keyed by
+ node to neighbor to edge keys to edge data for multi-edges.
+ Otherwise this routine assumes dict-of-dict-of-dict keyed by
+ node to neighbor to edge data.
+
+ Examples
+ --------
+ >>> dod = {0: {1: {"weight": 1}}} # single edge (0,1)
+ >>> G = nx.from_dict_of_dicts(dod)
+
+ or
+
+ >>> G = nx.Graph(dod) # use Graph constructor
+
+ """
+ G = nx.empty_graph(0, create_using)
+ G.add_nodes_from(d)
+ # does dict d represent a MultiGraph or MultiDiGraph?
+ if multigraph_input:
+ if G.is_directed():
+ if G.is_multigraph():
+ G.add_edges_from(
+ (u, v, key, data)
+ for u, nbrs in d.items()
+ for v, datadict in nbrs.items()
+ for key, data in datadict.items()
+ )
+ else:
+ G.add_edges_from(
+ (u, v, data)
+ for u, nbrs in d.items()
+ for v, datadict in nbrs.items()
+ for key, data in datadict.items()
+ )
+ else: # Undirected
+ if G.is_multigraph():
+ seen = set() # don't add both directions of undirected graph
+ for u, nbrs in d.items():
+ for v, datadict in nbrs.items():
+ if (u, v) not in seen:
+ G.add_edges_from(
+ (u, v, key, data) for key, data in datadict.items()
+ )
+ seen.add((v, u))
+ else:
+ seen = set() # don't add both directions of undirected graph
+ for u, nbrs in d.items():
+ for v, datadict in nbrs.items():
+ if (u, v) not in seen:
+ G.add_edges_from(
+ (u, v, data) for key, data in datadict.items()
+ )
+ seen.add((v, u))
+
+ else: # not a multigraph to multigraph transfer
+ if G.is_multigraph() and not G.is_directed():
+ # d can have both representations u-v, v-u in dict. Only add one.
+ # We don't need this check for digraphs since we add both directions,
+ # or for Graph() since it is done implicitly (parallel edges not allowed)
+ seen = set()
+ for u, nbrs in d.items():
+ for v, data in nbrs.items():
+ if (u, v) not in seen:
+ G.add_edge(u, v, key=0)
+ G[u][v][0].update(data)
+ seen.add((v, u))
+ else:
+ G.add_edges_from(
+ ((u, v, data) for u, nbrs in d.items() for v, data in nbrs.items())
+ )
+ return G
+
+
+@nx._dispatchable(preserve_edge_attrs=True)
+def to_edgelist(G, nodelist=None):
+ """Returns a list of edges in the graph.
+
+ Parameters
+ ----------
+ G : graph
+ A NetworkX graph
+
+ nodelist : list
+ Use only nodes specified in nodelist
+
+ """
+ if nodelist is None:
+ return G.edges(data=True)
+ return G.edges(nodelist, data=True)
+
+
+@nx._dispatchable(graphs=None, returns_graph=True)
+def from_edgelist(edgelist, create_using=None):
+ """Returns a graph from a list of edges.
+
+ Parameters
+ ----------
+ edgelist : list or iterator
+ Edge tuples
+
+ create_using : NetworkX graph constructor, optional (default=nx.Graph)
+ Graph type to create. If graph instance, then cleared before populated.
+
+ Examples
+ --------
+ >>> edgelist = [(0, 1)] # single edge (0,1)
+ >>> G = nx.from_edgelist(edgelist)
+
+ or
+
+ >>> G = nx.Graph(edgelist) # use Graph constructor
+
+ """
+ G = nx.empty_graph(0, create_using)
+ G.add_edges_from(edgelist)
+ return G
diff --git a/llava_next/lib/python3.10/site-packages/networkx/convert_matrix.py b/llava_next/lib/python3.10/site-packages/networkx/convert_matrix.py
new file mode 100644
index 0000000000000000000000000000000000000000..8992627cbac970e46ca7dce0557611a51cea2c26
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/networkx/convert_matrix.py
@@ -0,0 +1,1317 @@
+"""Functions to convert NetworkX graphs to and from common data containers
+like numpy arrays, scipy sparse arrays, and pandas DataFrames.
+
+The preferred way of converting data to a NetworkX graph is through the
+graph constructor. The constructor calls the `~networkx.convert.to_networkx_graph`
+function which attempts to guess the input type and convert it automatically.
+
+Examples
+--------
+Create a 10 node random graph from a numpy array
+
+>>> import numpy as np
+>>> rng = np.random.default_rng()
+>>> a = rng.integers(low=0, high=2, size=(10, 10))
+>>> DG = nx.from_numpy_array(a, create_using=nx.DiGraph)
+
+or equivalently:
+
+>>> DG = nx.DiGraph(a)
+
+which calls `from_numpy_array` internally based on the type of ``a``.
+
+See Also
+--------
+nx_agraph, nx_pydot
+"""
+
+import itertools
+from collections import defaultdict
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+__all__ = [
+ "from_pandas_adjacency",
+ "to_pandas_adjacency",
+ "from_pandas_edgelist",
+ "to_pandas_edgelist",
+ "from_scipy_sparse_array",
+ "to_scipy_sparse_array",
+ "from_numpy_array",
+ "to_numpy_array",
+]
+
+
+@nx._dispatchable(edge_attrs="weight")
+def to_pandas_adjacency(
+ G,
+ nodelist=None,
+ dtype=None,
+ order=None,
+ multigraph_weight=sum,
+ weight="weight",
+ nonedge=0.0,
+):
+ """Returns the graph adjacency matrix as a Pandas DataFrame.
+
+ Parameters
+ ----------
+ G : graph
+ The NetworkX graph used to construct the Pandas DataFrame.
+
+ nodelist : list, optional
+ The rows and columns are ordered according to the nodes in `nodelist`.
+ If `nodelist` is None, then the ordering is produced by G.nodes().
+
+ multigraph_weight : {sum, min, max}, optional
+ An operator that determines how weights in multigraphs are handled.
+ The default is to sum the weights of the multiple edges.
+
+ weight : string or None, optional
+ The edge attribute that holds the numerical value used for
+ the edge weight. If an edge does not have that attribute, then the
+ value 1 is used instead.
+
+ nonedge : float, optional
+ The matrix values corresponding to nonedges are typically set to zero.
+ However, this could be undesirable if there are matrix values
+ corresponding to actual edges that also have the value zero. If so,
+ one might prefer nonedges to have some other value, such as nan.
+
+ Returns
+ -------
+ df : Pandas DataFrame
+ Graph adjacency matrix
+
+ Notes
+ -----
+ For directed graphs, entry i,j corresponds to an edge from i to j.
+
+ The DataFrame entries are assigned to the weight edge attribute. When
+ an edge does not have a weight attribute, the value of the entry is set to
+ the number 1. For multiple (parallel) edges, the values of the entries
+ are determined by the 'multigraph_weight' parameter. The default is to
+ sum the weight attributes for each of the parallel edges.
+
+ When `nodelist` does not contain every node in `G`, the matrix is built
+ from the subgraph of `G` that is induced by the nodes in `nodelist`.
+
+ The convention used for self-loop edges in graphs is to assign the
+ diagonal matrix entry value to the weight attribute of the edge
+ (or the number 1 if the edge has no weight attribute). If the
+ alternate convention of doubling the edge weight is desired the
+ resulting Pandas DataFrame can be modified as follows::
+
+ >>> import pandas as pd
+ >>> G = nx.Graph([(1, 1), (2, 2)])
+ >>> df = nx.to_pandas_adjacency(G)
+ >>> df
+ 1 2
+ 1 1.0 0.0
+ 2 0.0 1.0
+ >>> diag_idx = list(range(len(df)))
+ >>> df.iloc[diag_idx, diag_idx] *= 2
+ >>> df
+ 1 2
+ 1 2.0 0.0
+ 2 0.0 2.0
+
+ Examples
+ --------
+ >>> G = nx.MultiDiGraph()
+ >>> G.add_edge(0, 1, weight=2)
+ 0
+ >>> G.add_edge(1, 0)
+ 0
+ >>> G.add_edge(2, 2, weight=3)
+ 0
+ >>> G.add_edge(2, 2)
+ 1
+ >>> nx.to_pandas_adjacency(G, nodelist=[0, 1, 2], dtype=int)
+ 0 1 2
+ 0 0 2 0
+ 1 1 0 0
+ 2 0 0 4
+
+ """
+ import pandas as pd
+
+ M = to_numpy_array(
+ G,
+ nodelist=nodelist,
+ dtype=dtype,
+ order=order,
+ multigraph_weight=multigraph_weight,
+ weight=weight,
+ nonedge=nonedge,
+ )
+ if nodelist is None:
+ nodelist = list(G)
+ return pd.DataFrame(data=M, index=nodelist, columns=nodelist)
+
+
+@nx._dispatchable(graphs=None, returns_graph=True)
+def from_pandas_adjacency(df, create_using=None):
+ r"""Returns a graph from Pandas DataFrame.
+
+ The Pandas DataFrame is interpreted as an adjacency matrix for the graph.
+
+ Parameters
+ ----------
+ df : Pandas DataFrame
+ An adjacency matrix representation of a graph
+
+ create_using : NetworkX graph constructor, optional (default=nx.Graph)
+ Graph type to create. If graph instance, then cleared before populated.
+
+ Notes
+ -----
+ For directed graphs, explicitly mention create_using=nx.DiGraph,
+ and entry i,j of df corresponds to an edge from i to j.
+
+ If `df` has a single data type for each entry it will be converted to an
+ appropriate Python data type.
+
+ If you have node attributes stored in a separate dataframe `df_nodes`,
+ you can load those attributes to the graph `G` using the following code:
+
+ ```
+ df_nodes = pd.DataFrame({"node_id": [1, 2, 3], "attribute1": ["A", "B", "C"]})
+ G.add_nodes_from((n, dict(d)) for n, d in df_nodes.iterrows())
+ ```
+
+ If `df` has a user-specified compound data type the names
+ of the data fields will be used as attribute keys in the resulting
+ NetworkX graph.
+
+ See Also
+ --------
+ to_pandas_adjacency
+
+ Examples
+ --------
+ Simple integer weights on edges:
+
+ >>> import pandas as pd
+ >>> pd.options.display.max_columns = 20
+ >>> df = pd.DataFrame([[1, 1], [2, 1]])
+ >>> df
+ 0 1
+ 0 1 1
+ 1 2 1
+ >>> G = nx.from_pandas_adjacency(df)
+ >>> G.name = "Graph from pandas adjacency matrix"
+ >>> print(G)
+ Graph named 'Graph from pandas adjacency matrix' with 2 nodes and 3 edges
+ """
+
+ try:
+ df = df[df.index]
+ except Exception as err:
+ missing = list(set(df.index).difference(set(df.columns)))
+ msg = f"{missing} not in columns"
+ raise nx.NetworkXError("Columns must match Indices.", msg) from err
+
+ A = df.values
+ G = from_numpy_array(A, create_using=create_using, nodelist=df.columns)
+
+ return G
+
+
+@nx._dispatchable(preserve_edge_attrs=True)
+def to_pandas_edgelist(
+ G,
+ source="source",
+ target="target",
+ nodelist=None,
+ dtype=None,
+ edge_key=None,
+):
+ """Returns the graph edge list as a Pandas DataFrame.
+
+ Parameters
+ ----------
+ G : graph
+ The NetworkX graph used to construct the Pandas DataFrame.
+
+ source : str or int, optional
+ A valid column name (string or integer) for the source nodes (for the
+ directed case).
+
+ target : str or int, optional
+ A valid column name (string or integer) for the target nodes (for the
+ directed case).
+
+ nodelist : list, optional
+ Use only nodes specified in nodelist
+
+ dtype : dtype, default None
+ Use to create the DataFrame. Data type to force.
+ Only a single dtype is allowed. If None, infer.
+
+ edge_key : str or int or None, optional (default=None)
+ A valid column name (string or integer) for the edge keys (for the
+ multigraph case). If None, edge keys are not stored in the DataFrame.
+
+ Returns
+ -------
+ df : Pandas DataFrame
+ Graph edge list
+
+ Examples
+ --------
+ >>> G = nx.Graph(
+ ... [
+ ... ("A", "B", {"cost": 1, "weight": 7}),
+ ... ("C", "E", {"cost": 9, "weight": 10}),
+ ... ]
+ ... )
+ >>> df = nx.to_pandas_edgelist(G, nodelist=["A", "C"])
+ >>> df[["source", "target", "cost", "weight"]]
+ source target cost weight
+ 0 A B 1 7
+ 1 C E 9 10
+
+ >>> G = nx.MultiGraph([("A", "B", {"cost": 1}), ("A", "B", {"cost": 9})])
+ >>> df = nx.to_pandas_edgelist(G, nodelist=["A", "C"], edge_key="ekey")
+ >>> df[["source", "target", "cost", "ekey"]]
+ source target cost ekey
+ 0 A B 1 0
+ 1 A B 9 1
+
+ """
+ import pandas as pd
+
+ if nodelist is None:
+ edgelist = G.edges(data=True)
+ else:
+ edgelist = G.edges(nodelist, data=True)
+ source_nodes = [s for s, _, _ in edgelist]
+ target_nodes = [t for _, t, _ in edgelist]
+
+ all_attrs = set().union(*(d.keys() for _, _, d in edgelist))
+ if source in all_attrs:
+ raise nx.NetworkXError(f"Source name {source!r} is an edge attr name")
+ if target in all_attrs:
+ raise nx.NetworkXError(f"Target name {target!r} is an edge attr name")
+
+ nan = float("nan")
+ edge_attr = {k: [d.get(k, nan) for _, _, d in edgelist] for k in all_attrs}
+
+ if G.is_multigraph() and edge_key is not None:
+ if edge_key in all_attrs:
+ raise nx.NetworkXError(f"Edge key name {edge_key!r} is an edge attr name")
+ edge_keys = [k for _, _, k in G.edges(keys=True)]
+ edgelistdict = {source: source_nodes, target: target_nodes, edge_key: edge_keys}
+ else:
+ edgelistdict = {source: source_nodes, target: target_nodes}
+
+ edgelistdict.update(edge_attr)
+ return pd.DataFrame(edgelistdict, dtype=dtype)
+
+
+@nx._dispatchable(graphs=None, returns_graph=True)
+def from_pandas_edgelist(
+ df,
+ source="source",
+ target="target",
+ edge_attr=None,
+ create_using=None,
+ edge_key=None,
+):
+ """Returns a graph from Pandas DataFrame containing an edge list.
+
+ The Pandas DataFrame should contain at least two columns of node names and
+ zero or more columns of edge attributes. Each row will be processed as one
+ edge instance.
+
+ Note: This function iterates over DataFrame.values, which is not
+ guaranteed to retain the data type across columns in the row. This is only
+ a problem if your row is entirely numeric and a mix of ints and floats. In
+ that case, all values will be returned as floats. See the
+ DataFrame.iterrows documentation for an example.
+
+ Parameters
+ ----------
+ df : Pandas DataFrame
+ An edge list representation of a graph
+
+ source : str or int
+ A valid column name (string or integer) for the source nodes (for the
+ directed case).
+
+ target : str or int
+ A valid column name (string or integer) for the target nodes (for the
+ directed case).
+
+ edge_attr : str or int, iterable, True, or None
+ A valid column name (str or int) or iterable of column names that are
+ used to retrieve items and add them to the graph as edge attributes.
+ If `True`, all columns will be added except `source`, `target` and `edge_key`.
+ If `None`, no edge attributes are added to the graph.
+
+ create_using : NetworkX graph constructor, optional (default=nx.Graph)
+ Graph type to create. If graph instance, then cleared before populated.
+
+ edge_key : str or None, optional (default=None)
+ A valid column name for the edge keys (for a MultiGraph). The values in
+ this column are used for the edge keys when adding edges if create_using
+ is a multigraph.
+
+ If you have node attributes stored in a separate dataframe `df_nodes`,
+ you can load those attributes to the graph `G` using the following code:
+
+ ```
+ df_nodes = pd.DataFrame({"node_id": [1, 2, 3], "attribute1": ["A", "B", "C"]})
+ G.add_nodes_from((n, dict(d)) for n, d in df_nodes.iterrows())
+ ```
+
+ See Also
+ --------
+ to_pandas_edgelist
+
+ Examples
+ --------
+ Simple integer weights on edges:
+
+ >>> import pandas as pd
+ >>> pd.options.display.max_columns = 20
+ >>> import numpy as np
+ >>> rng = np.random.RandomState(seed=5)
+ >>> ints = rng.randint(1, 11, size=(3, 2))
+ >>> a = ["A", "B", "C"]
+ >>> b = ["D", "A", "E"]
+ >>> df = pd.DataFrame(ints, columns=["weight", "cost"])
+ >>> df[0] = a
+ >>> df["b"] = b
+ >>> df[["weight", "cost", 0, "b"]]
+ weight cost 0 b
+ 0 4 7 A D
+ 1 7 1 B A
+ 2 10 9 C E
+ >>> G = nx.from_pandas_edgelist(df, 0, "b", ["weight", "cost"])
+ >>> G["E"]["C"]["weight"]
+ 10
+ >>> G["E"]["C"]["cost"]
+ 9
+ >>> edges = pd.DataFrame(
+ ... {
+ ... "source": [0, 1, 2],
+ ... "target": [2, 2, 3],
+ ... "weight": [3, 4, 5],
+ ... "color": ["red", "blue", "blue"],
+ ... }
+ ... )
+ >>> G = nx.from_pandas_edgelist(edges, edge_attr=True)
+ >>> G[0][2]["color"]
+ 'red'
+
+ Build multigraph with custom keys:
+
+ >>> edges = pd.DataFrame(
+ ... {
+ ... "source": [0, 1, 2, 0],
+ ... "target": [2, 2, 3, 2],
+ ... "my_edge_key": ["A", "B", "C", "D"],
+ ... "weight": [3, 4, 5, 6],
+ ... "color": ["red", "blue", "blue", "blue"],
+ ... }
+ ... )
+ >>> G = nx.from_pandas_edgelist(
+ ... edges,
+ ... edge_key="my_edge_key",
+ ... edge_attr=["weight", "color"],
+ ... create_using=nx.MultiGraph(),
+ ... )
+ >>> G[0][2]
+ AtlasView({'A': {'weight': 3, 'color': 'red'}, 'D': {'weight': 6, 'color': 'blue'}})
+
+
+ """
+ g = nx.empty_graph(0, create_using)
+
+ if edge_attr is None:
+ if g.is_multigraph() and edge_key is not None:
+ for u, v, k in zip(df[source], df[target], df[edge_key]):
+ g.add_edge(u, v, k)
+ else:
+ g.add_edges_from(zip(df[source], df[target]))
+ return g
+
+ reserved_columns = [source, target]
+ if g.is_multigraph() and edge_key is not None:
+ reserved_columns.append(edge_key)
+
+ # Additional columns requested
+ attr_col_headings = []
+ attribute_data = []
+ if edge_attr is True:
+ attr_col_headings = [c for c in df.columns if c not in reserved_columns]
+ elif isinstance(edge_attr, list | tuple):
+ attr_col_headings = edge_attr
+ else:
+ attr_col_headings = [edge_attr]
+ if len(attr_col_headings) == 0:
+ raise nx.NetworkXError(
+ f"Invalid edge_attr argument: No columns found with name: {attr_col_headings}"
+ )
+
+ try:
+ attribute_data = zip(*[df[col] for col in attr_col_headings])
+ except (KeyError, TypeError) as err:
+ msg = f"Invalid edge_attr argument: {edge_attr}"
+ raise nx.NetworkXError(msg) from err
+
+ if g.is_multigraph():
+ # => append the edge keys from the df to the bundled data
+ if edge_key is not None:
+ try:
+ multigraph_edge_keys = df[edge_key]
+ attribute_data = zip(attribute_data, multigraph_edge_keys)
+ except (KeyError, TypeError) as err:
+ msg = f"Invalid edge_key argument: {edge_key}"
+ raise nx.NetworkXError(msg) from err
+
+ for s, t, attrs in zip(df[source], df[target], attribute_data):
+ if edge_key is not None:
+ attrs, multigraph_edge_key = attrs
+ key = g.add_edge(s, t, key=multigraph_edge_key)
+ else:
+ key = g.add_edge(s, t)
+
+ g[s][t][key].update(zip(attr_col_headings, attrs))
+ else:
+ for s, t, attrs in zip(df[source], df[target], attribute_data):
+ g.add_edge(s, t)
+ g[s][t].update(zip(attr_col_headings, attrs))
+
+ return g
+
+
+@nx._dispatchable(edge_attrs="weight")
+def to_scipy_sparse_array(G, nodelist=None, dtype=None, weight="weight", format="csr"):
+ """Returns the graph adjacency matrix as a SciPy sparse array.
+
+ Parameters
+ ----------
+ G : graph
+ The NetworkX graph used to construct the sparse array.
+
+ nodelist : list, optional
+ The rows and columns are ordered according to the nodes in `nodelist`.
+ If `nodelist` is None, then the ordering is produced by ``G.nodes()``.
+
+ dtype : NumPy data-type, optional
+ A valid NumPy dtype used to initialize the array. If None, then the
+ NumPy default is used.
+
+ weight : string or None, optional (default='weight')
+ The edge attribute that holds the numerical value used for
+ the edge weight. If None then all edge weights are 1.
+
+ format : str in {'bsr', 'csr', 'csc', 'coo', 'lil', 'dia', 'dok'}
+ The format of the sparse array to be returned (default 'csr'). For
+ some algorithms different implementations of sparse arrays
+ can perform better. See [1]_ for details.
+
+ Returns
+ -------
+ A : SciPy sparse array
+ Graph adjacency matrix.
+
+ Notes
+ -----
+ For directed graphs, matrix entry ``i, j`` corresponds to an edge from
+ ``i`` to ``j``.
+
+ The values of the adjacency matrix are populated using the edge attribute held in
+ parameter `weight`. When an edge does not have that attribute, the
+ value of the entry is 1.
+
+ For multiple edges the matrix values are the sums of the edge weights.
+
+ When `nodelist` does not contain every node in `G`, the adjacency matrix
+ is built from the subgraph of `G` that is induced by the nodes in
+ `nodelist`.
+
+ The convention used for self-loop edges in graphs is to assign the
+ diagonal matrix entry value to the weight attribute of the edge
+ (or the number 1 if the edge has no weight attribute). If the
+ alternate convention of doubling the edge weight is desired the
+ resulting array can be modified as follows::
+
+ >>> G = nx.Graph([(1, 1)])
+ >>> A = nx.to_scipy_sparse_array(G)
+ >>> A.toarray()
+ array([[1]])
+ >>> A.setdiag(A.diagonal() * 2)
+ >>> A.toarray()
+ array([[2]])
+
+ Examples
+ --------
+
+ Basic usage:
+
+ >>> G = nx.path_graph(4)
+ >>> A = nx.to_scipy_sparse_array(G)
+ >>> A # doctest: +SKIP
+
+
+ >>> A.toarray()
+ array([[0, 1, 0, 0],
+ [1, 0, 1, 0],
+ [0, 1, 0, 1],
+ [0, 0, 1, 0]])
+
+ .. note:: The `toarray` method is used in these examples to better visualize
+ the adjacancy matrix. For a dense representation of the adjaceny matrix,
+ use `to_numpy_array` instead.
+
+ Directed graphs:
+
+ >>> G = nx.DiGraph([(0, 1), (1, 2), (2, 3)])
+ >>> nx.to_scipy_sparse_array(G).toarray()
+ array([[0, 1, 0, 0],
+ [0, 0, 1, 0],
+ [0, 0, 0, 1],
+ [0, 0, 0, 0]])
+
+ >>> H = G.reverse()
+ >>> H.edges
+ OutEdgeView([(1, 0), (2, 1), (3, 2)])
+ >>> nx.to_scipy_sparse_array(H).toarray()
+ array([[0, 0, 0, 0],
+ [1, 0, 0, 0],
+ [0, 1, 0, 0],
+ [0, 0, 1, 0]])
+
+ By default, the order of the rows/columns of the adjacency matrix is determined
+ by the ordering of the nodes in `G`:
+
+ >>> G = nx.Graph()
+ >>> G.add_nodes_from([3, 5, 0, 1])
+ >>> G.add_edges_from([(1, 3), (1, 5)])
+ >>> nx.to_scipy_sparse_array(G).toarray()
+ array([[0, 0, 0, 1],
+ [0, 0, 0, 1],
+ [0, 0, 0, 0],
+ [1, 1, 0, 0]])
+
+ The ordering of the rows can be changed with `nodelist`:
+
+ >>> ordered = [0, 1, 3, 5]
+ >>> nx.to_scipy_sparse_array(G, nodelist=ordered).toarray()
+ array([[0, 0, 0, 0],
+ [0, 0, 1, 1],
+ [0, 1, 0, 0],
+ [0, 1, 0, 0]])
+
+ If `nodelist` contains a subset of the nodes in `G`, the adjacency matrix
+ for the node-induced subgraph is produced:
+
+ >>> nx.to_scipy_sparse_array(G, nodelist=[1, 3, 5]).toarray()
+ array([[0, 1, 1],
+ [1, 0, 0],
+ [1, 0, 0]])
+
+ The values of the adjacency matrix are drawn from the edge attribute
+ specified by the `weight` parameter:
+
+ >>> G = nx.path_graph(4)
+ >>> nx.set_edge_attributes(
+ ... G, values={(0, 1): 1, (1, 2): 10, (2, 3): 2}, name="weight"
+ ... )
+ >>> nx.set_edge_attributes(
+ ... G, values={(0, 1): 50, (1, 2): 35, (2, 3): 10}, name="capacity"
+ ... )
+ >>> nx.to_scipy_sparse_array(G).toarray() # Default weight="weight"
+ array([[ 0, 1, 0, 0],
+ [ 1, 0, 10, 0],
+ [ 0, 10, 0, 2],
+ [ 0, 0, 2, 0]])
+ >>> nx.to_scipy_sparse_array(G, weight="capacity").toarray()
+ array([[ 0, 50, 0, 0],
+ [50, 0, 35, 0],
+ [ 0, 35, 0, 10],
+ [ 0, 0, 10, 0]])
+
+ Any edges that don't have a `weight` attribute default to 1:
+
+ >>> G[1][2].pop("capacity")
+ 35
+ >>> nx.to_scipy_sparse_array(G, weight="capacity").toarray()
+ array([[ 0, 50, 0, 0],
+ [50, 0, 1, 0],
+ [ 0, 1, 0, 10],
+ [ 0, 0, 10, 0]])
+
+ When `G` is a multigraph, the values in the adjacency matrix are given by
+ the sum of the `weight` edge attribute over each edge key:
+
+ >>> G = nx.MultiDiGraph([(0, 1), (0, 1), (0, 1), (2, 0)])
+ >>> nx.to_scipy_sparse_array(G).toarray()
+ array([[0, 3, 0],
+ [0, 0, 0],
+ [1, 0, 0]])
+
+ References
+ ----------
+ .. [1] Scipy Dev. References, "Sparse Arrays",
+ https://docs.scipy.org/doc/scipy/reference/sparse.html
+ """
+ import scipy as sp
+
+ if len(G) == 0:
+ raise nx.NetworkXError("Graph has no nodes or edges")
+
+ if nodelist is None:
+ nodelist = list(G)
+ nlen = len(G)
+ else:
+ nlen = len(nodelist)
+ if nlen == 0:
+ raise nx.NetworkXError("nodelist has no nodes")
+ nodeset = set(G.nbunch_iter(nodelist))
+ if nlen != len(nodeset):
+ for n in nodelist:
+ if n not in G:
+ raise nx.NetworkXError(f"Node {n} in nodelist is not in G")
+ raise nx.NetworkXError("nodelist contains duplicates.")
+ if nlen < len(G):
+ G = G.subgraph(nodelist)
+
+ index = dict(zip(nodelist, range(nlen)))
+ coefficients = zip(
+ *((index[u], index[v], wt) for u, v, wt in G.edges(data=weight, default=1))
+ )
+ try:
+ row, col, data = coefficients
+ except ValueError:
+ # there is no edge in the subgraph
+ row, col, data = [], [], []
+
+ if G.is_directed():
+ A = sp.sparse.coo_array((data, (row, col)), shape=(nlen, nlen), dtype=dtype)
+ else:
+ # symmetrize matrix
+ d = data + data
+ r = row + col
+ c = col + row
+ # selfloop entries get double counted when symmetrizing
+ # so we subtract the data on the diagonal
+ selfloops = list(nx.selfloop_edges(G, data=weight, default=1))
+ if selfloops:
+ diag_index, diag_data = zip(*((index[u], -wt) for u, v, wt in selfloops))
+ d += diag_data
+ r += diag_index
+ c += diag_index
+ A = sp.sparse.coo_array((d, (r, c)), shape=(nlen, nlen), dtype=dtype)
+ try:
+ return A.asformat(format)
+ except ValueError as err:
+ raise nx.NetworkXError(f"Unknown sparse matrix format: {format}") from err
+
+
+def _csr_gen_triples(A):
+ """Converts a SciPy sparse array in **Compressed Sparse Row** format to
+ an iterable of weighted edge triples.
+
+ """
+ nrows = A.shape[0]
+ indptr, dst_indices, data = A.indptr, A.indices, A.data
+ import numpy as np
+
+ src_indices = np.repeat(np.arange(nrows), np.diff(indptr))
+ return zip(src_indices.tolist(), dst_indices.tolist(), A.data.tolist())
+
+
+def _csc_gen_triples(A):
+ """Converts a SciPy sparse array in **Compressed Sparse Column** format to
+ an iterable of weighted edge triples.
+
+ """
+ ncols = A.shape[1]
+ indptr, src_indices, data = A.indptr, A.indices, A.data
+ import numpy as np
+
+ dst_indices = np.repeat(np.arange(ncols), np.diff(indptr))
+ return zip(src_indices.tolist(), dst_indices.tolist(), A.data.tolist())
+
+
+def _coo_gen_triples(A):
+ """Converts a SciPy sparse array in **Coordinate** format to an iterable
+ of weighted edge triples.
+
+ """
+ return zip(A.row.tolist(), A.col.tolist(), A.data.tolist())
+
+
+def _dok_gen_triples(A):
+ """Converts a SciPy sparse array in **Dictionary of Keys** format to an
+ iterable of weighted edge triples.
+
+ """
+ for (r, c), v in A.items():
+ # Use `v.item()` to convert a NumPy scalar to the appropriate Python scalar
+ yield int(r), int(c), v.item()
+
+
+def _generate_weighted_edges(A):
+ """Returns an iterable over (u, v, w) triples, where u and v are adjacent
+ vertices and w is the weight of the edge joining u and v.
+
+ `A` is a SciPy sparse array (in any format).
+
+ """
+ if A.format == "csr":
+ return _csr_gen_triples(A)
+ if A.format == "csc":
+ return _csc_gen_triples(A)
+ if A.format == "dok":
+ return _dok_gen_triples(A)
+ # If A is in any other format (including COO), convert it to COO format.
+ return _coo_gen_triples(A.tocoo())
+
+
+@nx._dispatchable(graphs=None, returns_graph=True)
+def from_scipy_sparse_array(
+ A, parallel_edges=False, create_using=None, edge_attribute="weight"
+):
+ """Creates a new graph from an adjacency matrix given as a SciPy sparse
+ array.
+
+ Parameters
+ ----------
+ A: scipy.sparse array
+ An adjacency matrix representation of a graph
+
+ parallel_edges : Boolean
+ If this is True, `create_using` is a multigraph, and `A` is an
+ integer matrix, then entry *(i, j)* in the matrix is interpreted as the
+ number of parallel edges joining vertices *i* and *j* in the graph.
+ If it is False, then the entries in the matrix are interpreted as
+ the weight of a single edge joining the vertices.
+
+ create_using : NetworkX graph constructor, optional (default=nx.Graph)
+ Graph type to create. If graph instance, then cleared before populated.
+
+ edge_attribute: string
+ Name of edge attribute to store matrix numeric value. The data will
+ have the same type as the matrix entry (int, float, (real,imag)).
+
+ Notes
+ -----
+ For directed graphs, explicitly mention create_using=nx.DiGraph,
+ and entry i,j of A corresponds to an edge from i to j.
+
+ If `create_using` is :class:`networkx.MultiGraph` or
+ :class:`networkx.MultiDiGraph`, `parallel_edges` is True, and the
+ entries of `A` are of type :class:`int`, then this function returns a
+ multigraph (constructed from `create_using`) with parallel edges.
+ In this case, `edge_attribute` will be ignored.
+
+ If `create_using` indicates an undirected multigraph, then only the edges
+ indicated by the upper triangle of the matrix `A` will be added to the
+ graph.
+
+ Examples
+ --------
+ >>> import scipy as sp
+ >>> A = sp.sparse.eye(2, 2, 1)
+ >>> G = nx.from_scipy_sparse_array(A)
+
+ If `create_using` indicates a multigraph and the matrix has only integer
+ entries and `parallel_edges` is False, then the entries will be treated
+ as weights for edges joining the nodes (without creating parallel edges):
+
+ >>> A = sp.sparse.csr_array([[1, 1], [1, 2]])
+ >>> G = nx.from_scipy_sparse_array(A, create_using=nx.MultiGraph)
+ >>> G[1][1]
+ AtlasView({0: {'weight': 2}})
+
+ If `create_using` indicates a multigraph and the matrix has only integer
+ entries and `parallel_edges` is True, then the entries will be treated
+ as the number of parallel edges joining those two vertices:
+
+ >>> A = sp.sparse.csr_array([[1, 1], [1, 2]])
+ >>> G = nx.from_scipy_sparse_array(
+ ... A, parallel_edges=True, create_using=nx.MultiGraph
+ ... )
+ >>> G[1][1]
+ AtlasView({0: {'weight': 1}, 1: {'weight': 1}})
+
+ """
+ G = nx.empty_graph(0, create_using)
+ n, m = A.shape
+ if n != m:
+ raise nx.NetworkXError(f"Adjacency matrix not square: nx,ny={A.shape}")
+ # Make sure we get even the isolated nodes of the graph.
+ G.add_nodes_from(range(n))
+ # Create an iterable over (u, v, w) triples and for each triple, add an
+ # edge from u to v with weight w.
+ triples = _generate_weighted_edges(A)
+ # If the entries in the adjacency matrix are integers, the graph is a
+ # multigraph, and parallel_edges is True, then create parallel edges, each
+ # with weight 1, for each entry in the adjacency matrix. Otherwise, create
+ # one edge for each positive entry in the adjacency matrix and set the
+ # weight of that edge to be the entry in the matrix.
+ if A.dtype.kind in ("i", "u") and G.is_multigraph() and parallel_edges:
+ chain = itertools.chain.from_iterable
+ # The following line is equivalent to:
+ #
+ # for (u, v) in edges:
+ # for d in range(A[u, v]):
+ # G.add_edge(u, v, weight=1)
+ #
+ triples = chain(((u, v, 1) for d in range(w)) for (u, v, w) in triples)
+ # If we are creating an undirected multigraph, only add the edges from the
+ # upper triangle of the matrix. Otherwise, add all the edges. This relies
+ # on the fact that the vertices created in the
+ # `_generated_weighted_edges()` function are actually the row/column
+ # indices for the matrix `A`.
+ #
+ # Without this check, we run into a problem where each edge is added twice
+ # when `G.add_weighted_edges_from()` is invoked below.
+ if G.is_multigraph() and not G.is_directed():
+ triples = ((u, v, d) for u, v, d in triples if u <= v)
+ G.add_weighted_edges_from(triples, weight=edge_attribute)
+ return G
+
+
+@nx._dispatchable(edge_attrs="weight") # edge attrs may also be obtained from `dtype`
+def to_numpy_array(
+ G,
+ nodelist=None,
+ dtype=None,
+ order=None,
+ multigraph_weight=sum,
+ weight="weight",
+ nonedge=0.0,
+):
+ """Returns the graph adjacency matrix as a NumPy array.
+
+ Parameters
+ ----------
+ G : graph
+ The NetworkX graph used to construct the NumPy array.
+
+ nodelist : list, optional
+ The rows and columns are ordered according to the nodes in `nodelist`.
+ If `nodelist` is ``None``, then the ordering is produced by ``G.nodes()``.
+
+ dtype : NumPy data type, optional
+ A NumPy data type used to initialize the array. If None, then the NumPy
+ default is used. The dtype can be structured if `weight=None`, in which
+ case the dtype field names are used to look up edge attributes. The
+ result is a structured array where each named field in the dtype
+ corresponds to the adjacency for that edge attribute. See examples for
+ details.
+
+ order : {'C', 'F'}, optional
+ Whether to store multidimensional data in C- or Fortran-contiguous
+ (row- or column-wise) order in memory. If None, then the NumPy default
+ is used.
+
+ multigraph_weight : callable, optional
+ An function that determines how weights in multigraphs are handled.
+ The function should accept a sequence of weights and return a single
+ value. The default is to sum the weights of the multiple edges.
+
+ weight : string or None optional (default = 'weight')
+ The edge attribute that holds the numerical value used for
+ the edge weight. If an edge does not have that attribute, then the
+ value 1 is used instead. `weight` must be ``None`` if a structured
+ dtype is used.
+
+ nonedge : array_like (default = 0.0)
+ The value used to represent non-edges in the adjacency matrix.
+ The array values corresponding to nonedges are typically set to zero.
+ However, this could be undesirable if there are array values
+ corresponding to actual edges that also have the value zero. If so,
+ one might prefer nonedges to have some other value, such as ``nan``.
+
+ Returns
+ -------
+ A : NumPy ndarray
+ Graph adjacency matrix
+
+ Raises
+ ------
+ NetworkXError
+ If `dtype` is a structured dtype and `G` is a multigraph
+ ValueError
+ If `dtype` is a structured dtype and `weight` is not `None`
+
+ See Also
+ --------
+ from_numpy_array
+
+ Notes
+ -----
+ For directed graphs, entry ``i, j`` corresponds to an edge from ``i`` to ``j``.
+
+ Entries in the adjacency matrix are given by the `weight` edge attribute.
+ When an edge does not have a weight attribute, the value of the entry is
+ set to the number 1. For multiple (parallel) edges, the values of the
+ entries are determined by the `multigraph_weight` parameter. The default is
+ to sum the weight attributes for each of the parallel edges.
+
+ When `nodelist` does not contain every node in `G`, the adjacency matrix is
+ built from the subgraph of `G` that is induced by the nodes in `nodelist`.
+
+ The convention used for self-loop edges in graphs is to assign the
+ diagonal array entry value to the weight attribute of the edge
+ (or the number 1 if the edge has no weight attribute). If the
+ alternate convention of doubling the edge weight is desired the
+ resulting NumPy array can be modified as follows:
+
+ >>> import numpy as np
+ >>> G = nx.Graph([(1, 1)])
+ >>> A = nx.to_numpy_array(G)
+ >>> A
+ array([[1.]])
+ >>> A[np.diag_indices_from(A)] *= 2
+ >>> A
+ array([[2.]])
+
+ Examples
+ --------
+ >>> G = nx.MultiDiGraph()
+ >>> G.add_edge(0, 1, weight=2)
+ 0
+ >>> G.add_edge(1, 0)
+ 0
+ >>> G.add_edge(2, 2, weight=3)
+ 0
+ >>> G.add_edge(2, 2)
+ 1
+ >>> nx.to_numpy_array(G, nodelist=[0, 1, 2])
+ array([[0., 2., 0.],
+ [1., 0., 0.],
+ [0., 0., 4.]])
+
+ When `nodelist` argument is used, nodes of `G` which do not appear in the `nodelist`
+ and their edges are not included in the adjacency matrix. Here is an example:
+
+ >>> G = nx.Graph()
+ >>> G.add_edge(3, 1)
+ >>> G.add_edge(2, 0)
+ >>> G.add_edge(2, 1)
+ >>> G.add_edge(3, 0)
+ >>> nx.to_numpy_array(G, nodelist=[1, 2, 3])
+ array([[0., 1., 1.],
+ [1., 0., 0.],
+ [1., 0., 0.]])
+
+ This function can also be used to create adjacency matrices for multiple
+ edge attributes with structured dtypes:
+
+ >>> G = nx.Graph()
+ >>> G.add_edge(0, 1, weight=10)
+ >>> G.add_edge(1, 2, cost=5)
+ >>> G.add_edge(2, 3, weight=3, cost=-4.0)
+ >>> dtype = np.dtype([("weight", int), ("cost", float)])
+ >>> A = nx.to_numpy_array(G, dtype=dtype, weight=None)
+ >>> A["weight"]
+ array([[ 0, 10, 0, 0],
+ [10, 0, 1, 0],
+ [ 0, 1, 0, 3],
+ [ 0, 0, 3, 0]])
+ >>> A["cost"]
+ array([[ 0., 1., 0., 0.],
+ [ 1., 0., 5., 0.],
+ [ 0., 5., 0., -4.],
+ [ 0., 0., -4., 0.]])
+
+ As stated above, the argument "nonedge" is useful especially when there are
+ actually edges with weight 0 in the graph. Setting a nonedge value different than 0,
+ makes it much clearer to differentiate such 0-weighted edges and actual nonedge values.
+
+ >>> G = nx.Graph()
+ >>> G.add_edge(3, 1, weight=2)
+ >>> G.add_edge(2, 0, weight=0)
+ >>> G.add_edge(2, 1, weight=0)
+ >>> G.add_edge(3, 0, weight=1)
+ >>> nx.to_numpy_array(G, nonedge=-1.0)
+ array([[-1., 2., -1., 1.],
+ [ 2., -1., 0., -1.],
+ [-1., 0., -1., 0.],
+ [ 1., -1., 0., -1.]])
+ """
+ import numpy as np
+
+ if nodelist is None:
+ nodelist = list(G)
+ nlen = len(nodelist)
+
+ # Input validation
+ nodeset = set(nodelist)
+ if nodeset - set(G):
+ raise nx.NetworkXError(f"Nodes {nodeset - set(G)} in nodelist is not in G")
+ if len(nodeset) < nlen:
+ raise nx.NetworkXError("nodelist contains duplicates.")
+
+ A = np.full((nlen, nlen), fill_value=nonedge, dtype=dtype, order=order)
+
+ # Corner cases: empty nodelist or graph without any edges
+ if nlen == 0 or G.number_of_edges() == 0:
+ return A
+
+ # If dtype is structured and weight is None, use dtype field names as
+ # edge attributes
+ edge_attrs = None # Only single edge attribute by default
+ if A.dtype.names:
+ if weight is None:
+ edge_attrs = dtype.names
+ else:
+ raise ValueError(
+ "Specifying `weight` not supported for structured dtypes\n."
+ "To create adjacency matrices from structured dtypes, use `weight=None`."
+ )
+
+ # Map nodes to row/col in matrix
+ idx = dict(zip(nodelist, range(nlen)))
+ if len(nodelist) < len(G):
+ G = G.subgraph(nodelist).copy()
+
+ # Collect all edge weights and reduce with `multigraph_weights`
+ if G.is_multigraph():
+ if edge_attrs:
+ raise nx.NetworkXError(
+ "Structured arrays are not supported for MultiGraphs"
+ )
+ d = defaultdict(list)
+ for u, v, wt in G.edges(data=weight, default=1.0):
+ d[(idx[u], idx[v])].append(wt)
+ i, j = np.array(list(d.keys())).T # indices
+ wts = [multigraph_weight(ws) for ws in d.values()] # reduced weights
+ else:
+ i, j, wts = [], [], []
+
+ # Special branch: multi-attr adjacency from structured dtypes
+ if edge_attrs:
+ # Extract edges with all data
+ for u, v, data in G.edges(data=True):
+ i.append(idx[u])
+ j.append(idx[v])
+ wts.append(data)
+ # Map each attribute to the appropriate named field in the
+ # structured dtype
+ for attr in edge_attrs:
+ attr_data = [wt.get(attr, 1.0) for wt in wts]
+ A[attr][i, j] = attr_data
+ if not G.is_directed():
+ A[attr][j, i] = attr_data
+ return A
+
+ for u, v, wt in G.edges(data=weight, default=1.0):
+ i.append(idx[u])
+ j.append(idx[v])
+ wts.append(wt)
+
+ # Set array values with advanced indexing
+ A[i, j] = wts
+ if not G.is_directed():
+ A[j, i] = wts
+
+ return A
+
+
+@nx._dispatchable(graphs=None, returns_graph=True)
+def from_numpy_array(
+ A, parallel_edges=False, create_using=None, edge_attr="weight", *, nodelist=None
+):
+ """Returns a graph from a 2D NumPy array.
+
+ The 2D NumPy array is interpreted as an adjacency matrix for the graph.
+
+ Parameters
+ ----------
+ A : a 2D numpy.ndarray
+ An adjacency matrix representation of a graph
+
+ parallel_edges : Boolean
+ If this is True, `create_using` is a multigraph, and `A` is an
+ integer array, then entry *(i, j)* in the array is interpreted as the
+ number of parallel edges joining vertices *i* and *j* in the graph.
+ If it is False, then the entries in the array are interpreted as
+ the weight of a single edge joining the vertices.
+
+ create_using : NetworkX graph constructor, optional (default=nx.Graph)
+ Graph type to create. If graph instance, then cleared before populated.
+
+ edge_attr : String, optional (default="weight")
+ The attribute to which the array values are assigned on each edge. If
+ it is None, edge attributes will not be assigned.
+
+ nodelist : sequence of nodes, optional
+ A sequence of objects to use as the nodes in the graph. If provided, the
+ list of nodes must be the same length as the dimensions of `A`. The
+ default is `None`, in which case the nodes are drawn from ``range(n)``.
+
+ Notes
+ -----
+ For directed graphs, explicitly mention create_using=nx.DiGraph,
+ and entry i,j of A corresponds to an edge from i to j.
+
+ If `create_using` is :class:`networkx.MultiGraph` or
+ :class:`networkx.MultiDiGraph`, `parallel_edges` is True, and the
+ entries of `A` are of type :class:`int`, then this function returns a
+ multigraph (of the same type as `create_using`) with parallel edges.
+
+ If `create_using` indicates an undirected multigraph, then only the edges
+ indicated by the upper triangle of the array `A` will be added to the
+ graph.
+
+ If `edge_attr` is Falsy (False or None), edge attributes will not be
+ assigned, and the array data will be treated like a binary mask of
+ edge presence or absence. Otherwise, the attributes will be assigned
+ as follows:
+
+ If the NumPy array has a single data type for each array entry it
+ will be converted to an appropriate Python data type.
+
+ If the NumPy array has a user-specified compound data type the names
+ of the data fields will be used as attribute keys in the resulting
+ NetworkX graph.
+
+ See Also
+ --------
+ to_numpy_array
+
+ Examples
+ --------
+ Simple integer weights on edges:
+
+ >>> import numpy as np
+ >>> A = np.array([[1, 1], [2, 1]])
+ >>> G = nx.from_numpy_array(A)
+ >>> G.edges(data=True)
+ EdgeDataView([(0, 0, {'weight': 1}), (0, 1, {'weight': 2}), (1, 1, {'weight': 1})])
+
+ If `create_using` indicates a multigraph and the array has only integer
+ entries and `parallel_edges` is False, then the entries will be treated
+ as weights for edges joining the nodes (without creating parallel edges):
+
+ >>> A = np.array([[1, 1], [1, 2]])
+ >>> G = nx.from_numpy_array(A, create_using=nx.MultiGraph)
+ >>> G[1][1]
+ AtlasView({0: {'weight': 2}})
+
+ If `create_using` indicates a multigraph and the array has only integer
+ entries and `parallel_edges` is True, then the entries will be treated
+ as the number of parallel edges joining those two vertices:
+
+ >>> A = np.array([[1, 1], [1, 2]])
+ >>> temp = nx.MultiGraph()
+ >>> G = nx.from_numpy_array(A, parallel_edges=True, create_using=temp)
+ >>> G[1][1]
+ AtlasView({0: {'weight': 1}, 1: {'weight': 1}})
+
+ User defined compound data type on edges:
+
+ >>> dt = [("weight", float), ("cost", int)]
+ >>> A = np.array([[(1.0, 2)]], dtype=dt)
+ >>> G = nx.from_numpy_array(A)
+ >>> G.edges()
+ EdgeView([(0, 0)])
+ >>> G[0][0]["cost"]
+ 2
+ >>> G[0][0]["weight"]
+ 1.0
+
+ """
+ kind_to_python_type = {
+ "f": float,
+ "i": int,
+ "u": int,
+ "b": bool,
+ "c": complex,
+ "S": str,
+ "U": str,
+ "V": "void",
+ }
+ G = nx.empty_graph(0, create_using)
+ if A.ndim != 2:
+ raise nx.NetworkXError(f"Input array must be 2D, not {A.ndim}")
+ n, m = A.shape
+ if n != m:
+ raise nx.NetworkXError(f"Adjacency matrix not square: nx,ny={A.shape}")
+ dt = A.dtype
+ try:
+ python_type = kind_to_python_type[dt.kind]
+ except Exception as err:
+ raise TypeError(f"Unknown numpy data type: {dt}") from err
+ if _default_nodes := (nodelist is None):
+ nodelist = range(n)
+ else:
+ if len(nodelist) != n:
+ raise ValueError("nodelist must have the same length as A.shape[0]")
+
+ # Make sure we get even the isolated nodes of the graph.
+ G.add_nodes_from(nodelist)
+ # Get a list of all the entries in the array with nonzero entries. These
+ # coordinates become edges in the graph. (convert to int from np.int64)
+ edges = ((int(e[0]), int(e[1])) for e in zip(*A.nonzero()))
+ # handle numpy constructed data type
+ if python_type == "void":
+ # Sort the fields by their offset, then by dtype, then by name.
+ fields = sorted(
+ (offset, dtype, name) for name, (dtype, offset) in A.dtype.fields.items()
+ )
+ triples = (
+ (
+ u,
+ v,
+ {}
+ if edge_attr in [False, None]
+ else {
+ name: kind_to_python_type[dtype.kind](val)
+ for (_, dtype, name), val in zip(fields, A[u, v])
+ },
+ )
+ for u, v in edges
+ )
+ # If the entries in the adjacency matrix are integers, the graph is a
+ # multigraph, and parallel_edges is True, then create parallel edges, each
+ # with weight 1, for each entry in the adjacency matrix. Otherwise, create
+ # one edge for each positive entry in the adjacency matrix and set the
+ # weight of that edge to be the entry in the matrix.
+ elif python_type is int and G.is_multigraph() and parallel_edges:
+ chain = itertools.chain.from_iterable
+ # The following line is equivalent to:
+ #
+ # for (u, v) in edges:
+ # for d in range(A[u, v]):
+ # G.add_edge(u, v, weight=1)
+ #
+ if edge_attr in [False, None]:
+ triples = chain(((u, v, {}) for d in range(A[u, v])) for (u, v) in edges)
+ else:
+ triples = chain(
+ ((u, v, {edge_attr: 1}) for d in range(A[u, v])) for (u, v) in edges
+ )
+ else: # basic data type
+ if edge_attr in [False, None]:
+ triples = ((u, v, {}) for u, v in edges)
+ else:
+ triples = ((u, v, {edge_attr: python_type(A[u, v])}) for u, v in edges)
+ # If we are creating an undirected multigraph, only add the edges from the
+ # upper triangle of the matrix. Otherwise, add all the edges. This relies
+ # on the fact that the vertices created in the
+ # `_generated_weighted_edges()` function are actually the row/column
+ # indices for the matrix `A`.
+ #
+ # Without this check, we run into a problem where each edge is added twice
+ # when `G.add_edges_from()` is invoked below.
+ if G.is_multigraph() and not G.is_directed():
+ triples = ((u, v, d) for u, v, d in triples if u <= v)
+ # Remap nodes if user provided custom `nodelist`
+ if not _default_nodes:
+ idx_to_node = dict(enumerate(nodelist))
+ triples = ((idx_to_node[u], idx_to_node[v], d) for u, v, d in triples)
+ G.add_edges_from(triples)
+ return G
diff --git a/llava_next/lib/python3.10/site-packages/networkx/exception.py b/llava_next/lib/python3.10/site-packages/networkx/exception.py
new file mode 100644
index 0000000000000000000000000000000000000000..c960cf13fd5a8e4da0ca68c66350b8baa1728c34
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/networkx/exception.py
@@ -0,0 +1,131 @@
+"""
+**********
+Exceptions
+**********
+
+Base exceptions and errors for NetworkX.
+"""
+
+__all__ = [
+ "HasACycle",
+ "NodeNotFound",
+ "PowerIterationFailedConvergence",
+ "ExceededMaxIterations",
+ "AmbiguousSolution",
+ "NetworkXAlgorithmError",
+ "NetworkXException",
+ "NetworkXError",
+ "NetworkXNoCycle",
+ "NetworkXNoPath",
+ "NetworkXNotImplemented",
+ "NetworkXPointlessConcept",
+ "NetworkXUnbounded",
+ "NetworkXUnfeasible",
+]
+
+
+class NetworkXException(Exception):
+ """Base class for exceptions in NetworkX."""
+
+
+class NetworkXError(NetworkXException):
+ """Exception for a serious error in NetworkX"""
+
+
+class NetworkXPointlessConcept(NetworkXException):
+ """Raised when a null graph is provided as input to an algorithm
+ that cannot use it.
+
+ The null graph is sometimes considered a pointless concept [1]_,
+ thus the name of the exception.
+
+ Notes
+ -----
+ Null graphs and empty graphs are often used interchangeably but they
+ are well defined in NetworkX. An ``empty_graph`` is a graph with ``n`` nodes
+ and 0 edges, and a ``null_graph`` is a graph with 0 nodes and 0 edges.
+
+ References
+ ----------
+ .. [1] Harary, F. and Read, R. "Is the Null Graph a Pointless
+ Concept?" In Graphs and Combinatorics Conference, George
+ Washington University. New York: Springer-Verlag, 1973.
+
+ """
+
+
+class NetworkXAlgorithmError(NetworkXException):
+ """Exception for unexpected termination of algorithms."""
+
+
+class NetworkXUnfeasible(NetworkXAlgorithmError):
+ """Exception raised by algorithms trying to solve a problem
+ instance that has no feasible solution."""
+
+
+class NetworkXNoPath(NetworkXUnfeasible):
+ """Exception for algorithms that should return a path when running
+ on graphs where such a path does not exist."""
+
+
+class NetworkXNoCycle(NetworkXUnfeasible):
+ """Exception for algorithms that should return a cycle when running
+ on graphs where such a cycle does not exist."""
+
+
+class HasACycle(NetworkXException):
+ """Raised if a graph has a cycle when an algorithm expects that it
+ will have no cycles.
+
+ """
+
+
+class NetworkXUnbounded(NetworkXAlgorithmError):
+ """Exception raised by algorithms trying to solve a maximization
+ or a minimization problem instance that is unbounded."""
+
+
+class NetworkXNotImplemented(NetworkXException):
+ """Exception raised by algorithms not implemented for a type of graph."""
+
+
+class NodeNotFound(NetworkXException):
+ """Exception raised if requested node is not present in the graph"""
+
+
+class AmbiguousSolution(NetworkXException):
+ """Raised if more than one valid solution exists for an intermediary step
+ of an algorithm.
+
+ In the face of ambiguity, refuse the temptation to guess.
+ This may occur, for example, when trying to determine the
+ bipartite node sets in a disconnected bipartite graph when
+ computing bipartite matchings.
+
+ """
+
+
+class ExceededMaxIterations(NetworkXException):
+ """Raised if a loop iterates too many times without breaking.
+
+ This may occur, for example, in an algorithm that computes
+ progressively better approximations to a value but exceeds an
+ iteration bound specified by the user.
+
+ """
+
+
+class PowerIterationFailedConvergence(ExceededMaxIterations):
+ """Raised when the power iteration method fails to converge within a
+ specified iteration limit.
+
+ `num_iterations` is the number of iterations that have been
+ completed when this exception was raised.
+
+ """
+
+ def __init__(self, num_iterations, *args, **kw):
+ msg = f"power iteration failed to converge within {num_iterations} iterations"
+ exception_message = msg
+ superinit = super().__init__
+ superinit(self, exception_message, *args, **kw)
diff --git a/llava_next/lib/python3.10/site-packages/networkx/lazy_imports.py b/llava_next/lib/python3.10/site-packages/networkx/lazy_imports.py
new file mode 100644
index 0000000000000000000000000000000000000000..396404ba38f5885bfcc65af36d7b4655e94ccc27
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/networkx/lazy_imports.py
@@ -0,0 +1,188 @@
+import importlib
+import importlib.util
+import inspect
+import os
+import sys
+import types
+
+__all__ = ["attach", "_lazy_import"]
+
+
+def attach(module_name, submodules=None, submod_attrs=None):
+ """Attach lazily loaded submodules, and functions or other attributes.
+
+ Typically, modules import submodules and attributes as follows::
+
+ import mysubmodule
+ import anothersubmodule
+
+ from .foo import someattr
+
+ The idea of this function is to replace the `__init__.py`
+ module's `__getattr__`, `__dir__`, and `__all__` attributes such that
+ all imports work exactly the way they normally would, except that the
+ actual import is delayed until the resulting module object is first used.
+
+ The typical way to call this function, replacing the above imports, is::
+
+ __getattr__, __lazy_dir__, __all__ = lazy.attach(
+ __name__, ["mysubmodule", "anothersubmodule"], {"foo": "someattr"}
+ )
+
+ This functionality requires Python 3.7 or higher.
+
+ Parameters
+ ----------
+ module_name : str
+ Typically use __name__.
+ submodules : set
+ List of submodules to lazily import.
+ submod_attrs : dict
+ Dictionary of submodule -> list of attributes / functions.
+ These attributes are imported as they are used.
+
+ Returns
+ -------
+ __getattr__, __dir__, __all__
+
+ """
+ if submod_attrs is None:
+ submod_attrs = {}
+
+ if submodules is None:
+ submodules = set()
+ else:
+ submodules = set(submodules)
+
+ attr_to_modules = {
+ attr: mod for mod, attrs in submod_attrs.items() for attr in attrs
+ }
+
+ __all__ = list(submodules | attr_to_modules.keys())
+
+ def __getattr__(name):
+ if name in submodules:
+ return importlib.import_module(f"{module_name}.{name}")
+ elif name in attr_to_modules:
+ submod = importlib.import_module(f"{module_name}.{attr_to_modules[name]}")
+ return getattr(submod, name)
+ else:
+ raise AttributeError(f"No {module_name} attribute {name}")
+
+ def __dir__():
+ return __all__
+
+ if os.environ.get("EAGER_IMPORT", ""):
+ for attr in set(attr_to_modules.keys()) | submodules:
+ __getattr__(attr)
+
+ return __getattr__, __dir__, list(__all__)
+
+
+class DelayedImportErrorModule(types.ModuleType):
+ def __init__(self, frame_data, *args, **kwargs):
+ self.__frame_data = frame_data
+ super().__init__(*args, **kwargs)
+
+ def __getattr__(self, x):
+ if x in ("__class__", "__file__", "__frame_data"):
+ super().__getattr__(x)
+ else:
+ fd = self.__frame_data
+ raise ModuleNotFoundError(
+ f"No module named '{fd['spec']}'\n\n"
+ "This error is lazily reported, having originally occurred in\n"
+ f' File {fd["filename"]}, line {fd["lineno"]}, in {fd["function"]}\n\n'
+ f'----> {"".join(fd["code_context"] or "").strip()}'
+ )
+
+
+def _lazy_import(fullname):
+ """Return a lazily imported proxy for a module or library.
+
+ Warning
+ -------
+ Importing using this function can currently cause trouble
+ when the user tries to import from a subpackage of a module before
+ the package is fully imported. In particular, this idiom may not work:
+
+ np = lazy_import("numpy")
+ from numpy.lib import recfunctions
+
+ This is due to a difference in the way Python's LazyLoader handles
+ subpackage imports compared to the normal import process. Hopefully
+ we will get Python's LazyLoader to fix this, or find a workaround.
+ In the meantime, this is a potential problem.
+
+ The workaround is to import numpy before importing from the subpackage.
+
+ Notes
+ -----
+ We often see the following pattern::
+
+ def myfunc():
+ import scipy as sp
+ sp.argmin(...)
+ ....
+
+ This is to prevent a library, in this case `scipy`, from being
+ imported at function definition time, since that can be slow.
+
+ This function provides a proxy module that, upon access, imports
+ the actual module. So the idiom equivalent to the above example is::
+
+ sp = lazy.load("scipy")
+
+ def myfunc():
+ sp.argmin(...)
+ ....
+
+ The initial import time is fast because the actual import is delayed
+ until the first attribute is requested. The overall import time may
+ decrease as well for users that don't make use of large portions
+ of the library.
+
+ Parameters
+ ----------
+ fullname : str
+ The full name of the package or subpackage to import. For example::
+
+ sp = lazy.load("scipy") # import scipy as sp
+ spla = lazy.load("scipy.linalg") # import scipy.linalg as spla
+
+ Returns
+ -------
+ pm : importlib.util._LazyModule
+ Proxy module. Can be used like any regularly imported module.
+ Actual loading of the module occurs upon first attribute request.
+
+ """
+ try:
+ return sys.modules[fullname]
+ except:
+ pass
+
+ # Not previously loaded -- look it up
+ spec = importlib.util.find_spec(fullname)
+
+ if spec is None:
+ try:
+ parent = inspect.stack()[1]
+ frame_data = {
+ "spec": fullname,
+ "filename": parent.filename,
+ "lineno": parent.lineno,
+ "function": parent.function,
+ "code_context": parent.code_context,
+ }
+ return DelayedImportErrorModule(frame_data, "DelayedImportErrorModule")
+ finally:
+ del parent
+
+ module = importlib.util.module_from_spec(spec)
+ sys.modules[fullname] = module
+
+ loader = importlib.util.LazyLoader(spec.loader)
+ loader.exec_module(module)
+
+ return module
diff --git a/llava_next/lib/python3.10/site-packages/networkx/relabel.py b/llava_next/lib/python3.10/site-packages/networkx/relabel.py
new file mode 100644
index 0000000000000000000000000000000000000000..4b870f726ef42e0bcaa7bf724e2ae6ab4145f288
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/networkx/relabel.py
@@ -0,0 +1,285 @@
+import networkx as nx
+
+__all__ = ["convert_node_labels_to_integers", "relabel_nodes"]
+
+
+@nx._dispatchable(
+ preserve_all_attrs=True, mutates_input={"not copy": 2}, returns_graph=True
+)
+def relabel_nodes(G, mapping, copy=True):
+ """Relabel the nodes of the graph G according to a given mapping.
+
+ The original node ordering may not be preserved if `copy` is `False` and the
+ mapping includes overlap between old and new labels.
+
+ Parameters
+ ----------
+ G : graph
+ A NetworkX graph
+
+ mapping : dictionary
+ A dictionary with the old labels as keys and new labels as values.
+ A partial mapping is allowed. Mapping 2 nodes to a single node is allowed.
+ Any non-node keys in the mapping are ignored.
+
+ copy : bool (optional, default=True)
+ If True return a copy, or if False relabel the nodes in place.
+
+ Examples
+ --------
+ To create a new graph with nodes relabeled according to a given
+ dictionary:
+
+ >>> G = nx.path_graph(3)
+ >>> sorted(G)
+ [0, 1, 2]
+ >>> mapping = {0: "a", 1: "b", 2: "c"}
+ >>> H = nx.relabel_nodes(G, mapping)
+ >>> sorted(H)
+ ['a', 'b', 'c']
+
+ Nodes can be relabeled with any hashable object, including numbers
+ and strings:
+
+ >>> import string
+ >>> G = nx.path_graph(26) # nodes are integers 0 through 25
+ >>> sorted(G)[:3]
+ [0, 1, 2]
+ >>> mapping = dict(zip(G, string.ascii_lowercase))
+ >>> G = nx.relabel_nodes(G, mapping) # nodes are characters a through z
+ >>> sorted(G)[:3]
+ ['a', 'b', 'c']
+ >>> mapping = dict(zip(G, range(1, 27)))
+ >>> G = nx.relabel_nodes(G, mapping) # nodes are integers 1 through 26
+ >>> sorted(G)[:3]
+ [1, 2, 3]
+
+ To perform a partial in-place relabeling, provide a dictionary
+ mapping only a subset of the nodes, and set the `copy` keyword
+ argument to False:
+
+ >>> G = nx.path_graph(3) # nodes 0-1-2
+ >>> mapping = {0: "a", 1: "b"} # 0->'a' and 1->'b'
+ >>> G = nx.relabel_nodes(G, mapping, copy=False)
+ >>> sorted(G, key=str)
+ [2, 'a', 'b']
+
+ A mapping can also be given as a function:
+
+ >>> G = nx.path_graph(3)
+ >>> H = nx.relabel_nodes(G, lambda x: x**2)
+ >>> list(H)
+ [0, 1, 4]
+
+ In a multigraph, relabeling two or more nodes to the same new node
+ will retain all edges, but may change the edge keys in the process:
+
+ >>> G = nx.MultiGraph()
+ >>> G.add_edge(0, 1, value="a") # returns the key for this edge
+ 0
+ >>> G.add_edge(0, 2, value="b")
+ 0
+ >>> G.add_edge(0, 3, value="c")
+ 0
+ >>> mapping = {1: 4, 2: 4, 3: 4}
+ >>> H = nx.relabel_nodes(G, mapping, copy=True)
+ >>> print(H[0])
+ {4: {0: {'value': 'a'}, 1: {'value': 'b'}, 2: {'value': 'c'}}}
+
+ This works for in-place relabeling too:
+
+ >>> G = nx.relabel_nodes(G, mapping, copy=False)
+ >>> print(G[0])
+ {4: {0: {'value': 'a'}, 1: {'value': 'b'}, 2: {'value': 'c'}}}
+
+ Notes
+ -----
+ Only the nodes specified in the mapping will be relabeled.
+ Any non-node keys in the mapping are ignored.
+
+ The keyword setting copy=False modifies the graph in place.
+ Relabel_nodes avoids naming collisions by building a
+ directed graph from ``mapping`` which specifies the order of
+ relabelings. Naming collisions, such as a->b, b->c, are ordered
+ such that "b" gets renamed to "c" before "a" gets renamed "b".
+ In cases of circular mappings (e.g. a->b, b->a), modifying the
+ graph is not possible in-place and an exception is raised.
+ In that case, use copy=True.
+
+ If a relabel operation on a multigraph would cause two or more
+ edges to have the same source, target and key, the second edge must
+ be assigned a new key to retain all edges. The new key is set
+ to the lowest non-negative integer not already used as a key
+ for edges between these two nodes. Note that this means non-numeric
+ keys may be replaced by numeric keys.
+
+ See Also
+ --------
+ convert_node_labels_to_integers
+ """
+ # you can pass any callable e.g. f(old_label) -> new_label or
+ # e.g. str(old_label) -> new_label, but we'll just make a dictionary here regardless
+ m = {n: mapping(n) for n in G} if callable(mapping) else mapping
+
+ if copy:
+ return _relabel_copy(G, m)
+ else:
+ return _relabel_inplace(G, m)
+
+
+def _relabel_inplace(G, mapping):
+ if len(mapping.keys() & mapping.values()) > 0:
+ # labels sets overlap
+ # can we topological sort and still do the relabeling?
+ D = nx.DiGraph(list(mapping.items()))
+ D.remove_edges_from(nx.selfloop_edges(D))
+ try:
+ nodes = reversed(list(nx.topological_sort(D)))
+ except nx.NetworkXUnfeasible as err:
+ raise nx.NetworkXUnfeasible(
+ "The node label sets are overlapping and no ordering can "
+ "resolve the mapping. Use copy=True."
+ ) from err
+ else:
+ # non-overlapping label sets, sort them in the order of G nodes
+ nodes = [n for n in G if n in mapping]
+
+ multigraph = G.is_multigraph()
+ directed = G.is_directed()
+
+ for old in nodes:
+ # Test that old is in both mapping and G, otherwise ignore.
+ try:
+ new = mapping[old]
+ G.add_node(new, **G.nodes[old])
+ except KeyError:
+ continue
+ if new == old:
+ continue
+ if multigraph:
+ new_edges = [
+ (new, new if old == target else target, key, data)
+ for (_, target, key, data) in G.edges(old, data=True, keys=True)
+ ]
+ if directed:
+ new_edges += [
+ (new if old == source else source, new, key, data)
+ for (source, _, key, data) in G.in_edges(old, data=True, keys=True)
+ ]
+ # Ensure new edges won't overwrite existing ones
+ seen = set()
+ for i, (source, target, key, data) in enumerate(new_edges):
+ if target in G[source] and key in G[source][target]:
+ new_key = 0 if not isinstance(key, int | float) else key
+ while new_key in G[source][target] or (target, new_key) in seen:
+ new_key += 1
+ new_edges[i] = (source, target, new_key, data)
+ seen.add((target, new_key))
+ else:
+ new_edges = [
+ (new, new if old == target else target, data)
+ for (_, target, data) in G.edges(old, data=True)
+ ]
+ if directed:
+ new_edges += [
+ (new if old == source else source, new, data)
+ for (source, _, data) in G.in_edges(old, data=True)
+ ]
+ G.remove_node(old)
+ G.add_edges_from(new_edges)
+ return G
+
+
+def _relabel_copy(G, mapping):
+ H = G.__class__()
+ H.add_nodes_from(mapping.get(n, n) for n in G)
+ H._node.update((mapping.get(n, n), d.copy()) for n, d in G.nodes.items())
+ if G.is_multigraph():
+ new_edges = [
+ (mapping.get(n1, n1), mapping.get(n2, n2), k, d.copy())
+ for (n1, n2, k, d) in G.edges(keys=True, data=True)
+ ]
+
+ # check for conflicting edge-keys
+ undirected = not G.is_directed()
+ seen_edges = set()
+ for i, (source, target, key, data) in enumerate(new_edges):
+ while (source, target, key) in seen_edges:
+ if not isinstance(key, int | float):
+ key = 0
+ key += 1
+ seen_edges.add((source, target, key))
+ if undirected:
+ seen_edges.add((target, source, key))
+ new_edges[i] = (source, target, key, data)
+
+ H.add_edges_from(new_edges)
+ else:
+ H.add_edges_from(
+ (mapping.get(n1, n1), mapping.get(n2, n2), d.copy())
+ for (n1, n2, d) in G.edges(data=True)
+ )
+ H.graph.update(G.graph)
+ return H
+
+
+@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
+def convert_node_labels_to_integers(
+ G, first_label=0, ordering="default", label_attribute=None
+):
+ """Returns a copy of the graph G with the nodes relabeled using
+ consecutive integers.
+
+ Parameters
+ ----------
+ G : graph
+ A NetworkX graph
+
+ first_label : int, optional (default=0)
+ An integer specifying the starting offset in numbering nodes.
+ The new integer labels are numbered first_label, ..., n-1+first_label.
+
+ ordering : string
+ "default" : inherit node ordering from G.nodes()
+ "sorted" : inherit node ordering from sorted(G.nodes())
+ "increasing degree" : nodes are sorted by increasing degree
+ "decreasing degree" : nodes are sorted by decreasing degree
+
+ label_attribute : string, optional (default=None)
+ Name of node attribute to store old label. If None no attribute
+ is created.
+
+ Notes
+ -----
+ Node and edge attribute data are copied to the new (relabeled) graph.
+
+ There is no guarantee that the relabeling of nodes to integers will
+ give the same two integers for two (even identical graphs).
+ Use the `ordering` argument to try to preserve the order.
+
+ See Also
+ --------
+ relabel_nodes
+ """
+ N = G.number_of_nodes() + first_label
+ if ordering == "default":
+ mapping = dict(zip(G.nodes(), range(first_label, N)))
+ elif ordering == "sorted":
+ nlist = sorted(G.nodes())
+ mapping = dict(zip(nlist, range(first_label, N)))
+ elif ordering == "increasing degree":
+ dv_pairs = [(d, n) for (n, d) in G.degree()]
+ dv_pairs.sort() # in-place sort from lowest to highest degree
+ mapping = dict(zip([n for d, n in dv_pairs], range(first_label, N)))
+ elif ordering == "decreasing degree":
+ dv_pairs = [(d, n) for (n, d) in G.degree()]
+ dv_pairs.sort() # in-place sort from lowest to highest degree
+ dv_pairs.reverse()
+ mapping = dict(zip([n for d, n in dv_pairs], range(first_label, N)))
+ else:
+ raise nx.NetworkXError(f"Unknown node ordering: {ordering}")
+ H = relabel_nodes(G, mapping)
+ # create node attribute with the old label
+ if label_attribute is not None:
+ nx.set_node_attributes(H, {v: k for k, v in mapping.items()}, label_attribute)
+ return H
diff --git a/llava_next/lib/python3.10/site-packages/pillow.libs/libXau-154567c4.so.6.0.0 b/llava_next/lib/python3.10/site-packages/pillow.libs/libXau-154567c4.so.6.0.0
new file mode 100644
index 0000000000000000000000000000000000000000..ff06a58be7b9ff80cee9b8eb45d5e9a28cf67d1b
Binary files /dev/null and b/llava_next/lib/python3.10/site-packages/pillow.libs/libXau-154567c4.so.6.0.0 differ
diff --git a/llava_next/lib/python3.10/site-packages/pillow.libs/libbrotlicommon-3ecfe81c.so.1 b/llava_next/lib/python3.10/site-packages/pillow.libs/libbrotlicommon-3ecfe81c.so.1
new file mode 100644
index 0000000000000000000000000000000000000000..8775162cc70b2bc5fde2d51d50d0d8b3a4fdf4c9
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/pillow.libs/libbrotlicommon-3ecfe81c.so.1
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:024bc7605502cffd45b3f7c3f37fe043694cc3b4b4cb7f39af3b9a72793e4c2e
+size 144425
diff --git a/llava_next/lib/python3.10/site-packages/pillow.libs/libbrotlidec-ba690955.so.1 b/llava_next/lib/python3.10/site-packages/pillow.libs/libbrotlidec-ba690955.so.1
new file mode 100644
index 0000000000000000000000000000000000000000..b59afaea94084ff905a85348024ff00eecc7bb1f
Binary files /dev/null and b/llava_next/lib/python3.10/site-packages/pillow.libs/libbrotlidec-ba690955.so.1 differ
diff --git a/llava_next/lib/python3.10/site-packages/pillow.libs/libfreetype-be14bf51.so.6.20.1 b/llava_next/lib/python3.10/site-packages/pillow.libs/libfreetype-be14bf51.so.6.20.1
new file mode 100644
index 0000000000000000000000000000000000000000..0586adefe0ce8fc32a6a565c7460ef30304a7efc
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/pillow.libs/libfreetype-be14bf51.so.6.20.1
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2a2178a93a108ae14521328258296ab896a77f5f520ea2a5cb66d75838cc2f5f
+size 1422625
diff --git a/llava_next/lib/python3.10/site-packages/pillow.libs/libsharpyuv-898c0cb5.so.0.1.0 b/llava_next/lib/python3.10/site-packages/pillow.libs/libsharpyuv-898c0cb5.so.0.1.0
new file mode 100644
index 0000000000000000000000000000000000000000..d384f32efa5b631d5c0635cc2e2562ef53a040e7
Binary files /dev/null and b/llava_next/lib/python3.10/site-packages/pillow.libs/libsharpyuv-898c0cb5.so.0.1.0 differ
diff --git a/llava_next/lib/python3.10/site-packages/pillow.libs/libwebpdemux-f2642bcc.so.2.0.15 b/llava_next/lib/python3.10/site-packages/pillow.libs/libwebpdemux-f2642bcc.so.2.0.15
new file mode 100644
index 0000000000000000000000000000000000000000..cf0f8f4584bc85fa4bc52792adffc0721fee85ec
Binary files /dev/null and b/llava_next/lib/python3.10/site-packages/pillow.libs/libwebpdemux-f2642bcc.so.2.0.15 differ
diff --git a/llava_next/lib/python3.10/site-packages/pillow.libs/libwebpmux-d524b4d5.so.3.1.0 b/llava_next/lib/python3.10/site-packages/pillow.libs/libwebpmux-d524b4d5.so.3.1.0
new file mode 100644
index 0000000000000000000000000000000000000000..2c9c58f53e72b061479681be1d445e4a4ba80557
Binary files /dev/null and b/llava_next/lib/python3.10/site-packages/pillow.libs/libwebpmux-d524b4d5.so.3.1.0 differ
diff --git a/llava_next/lib/python3.10/site-packages/pillow.libs/libxcb-b8a56d01.so.1.1.0 b/llava_next/lib/python3.10/site-packages/pillow.libs/libxcb-b8a56d01.so.1.1.0
new file mode 100644
index 0000000000000000000000000000000000000000..f33db3946ee98a09ed39c8664e1ee1046dc29992
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/pillow.libs/libxcb-b8a56d01.so.1.1.0
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:633eef394415421c697c6b986ba99ade7f7c0364d5c8a90e3de01a44d8247f1b
+size 251425
diff --git a/llava_next/lib/python3.10/site-packages/rpds/__init__.py b/llava_next/lib/python3.10/site-packages/rpds/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..257da6a7bd439c46cb2409e77531dc4a4dc6295c
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/rpds/__init__.py
@@ -0,0 +1,5 @@
+from .rpds import *
+
+__doc__ = rpds.__doc__
+if hasattr(rpds, "__all__"):
+ __all__ = rpds.__all__
\ No newline at end of file
diff --git a/llava_next/lib/python3.10/site-packages/rpds/__pycache__/__init__.cpython-310.pyc b/llava_next/lib/python3.10/site-packages/rpds/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6282e42498cf0e904fe7daf776c75dde49e3e2be
Binary files /dev/null and b/llava_next/lib/python3.10/site-packages/rpds/__pycache__/__init__.cpython-310.pyc differ
diff --git a/llava_next/lib/python3.10/site-packages/rpds/py.typed b/llava_next/lib/python3.10/site-packages/rpds/py.typed
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/llava_next/lib/python3.10/site-packages/setuptools-75.8.0-py3.10.egg-info/PKG-INFO b/llava_next/lib/python3.10/site-packages/setuptools-75.8.0-py3.10.egg-info/PKG-INFO
new file mode 100644
index 0000000000000000000000000000000000000000..0c941771c526fbff823bfa8501a27faab8f2e993
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/setuptools-75.8.0-py3.10.egg-info/PKG-INFO
@@ -0,0 +1,142 @@
+Metadata-Version: 2.2
+Name: setuptools
+Version: 75.8.0
+Summary: Easily download, build, install, upgrade, and uninstall Python packages
+Author-email: Python Packaging Authority
+Project-URL: Source, https://github.com/pypa/setuptools
+Project-URL: Documentation, https://setuptools.pypa.io/
+Project-URL: Changelog, https://setuptools.pypa.io/en/stable/history.html
+Keywords: CPAN PyPI distutils eggs package management
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: System :: Archiving :: Packaging
+Classifier: Topic :: System :: Systems Administration
+Classifier: Topic :: Utilities
+Requires-Python: >=3.9
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+Provides-Extra: test
+Requires-Dist: pytest!=8.1.*,>=6; extra == "test"
+Requires-Dist: virtualenv>=13.0.0; extra == "test"
+Requires-Dist: wheel>=0.44.0; extra == "test"
+Requires-Dist: pip>=19.1; extra == "test"
+Requires-Dist: packaging>=24.2; extra == "test"
+Requires-Dist: jaraco.envs>=2.2; extra == "test"
+Requires-Dist: pytest-xdist>=3; extra == "test"
+Requires-Dist: jaraco.path>=3.7.2; extra == "test"
+Requires-Dist: build[virtualenv]>=1.0.3; extra == "test"
+Requires-Dist: filelock>=3.4.0; extra == "test"
+Requires-Dist: ini2toml[lite]>=0.14; extra == "test"
+Requires-Dist: tomli-w>=1.0.0; extra == "test"
+Requires-Dist: pytest-timeout; extra == "test"
+Requires-Dist: pytest-perf; sys_platform != "cygwin" and extra == "test"
+Requires-Dist: jaraco.develop>=7.21; (python_version >= "3.9" and sys_platform != "cygwin") and extra == "test"
+Requires-Dist: pytest-home>=0.5; extra == "test"
+Requires-Dist: pytest-subprocess; extra == "test"
+Requires-Dist: pyproject-hooks!=1.1; extra == "test"
+Requires-Dist: jaraco.test>=5.5; extra == "test"
+Provides-Extra: doc
+Requires-Dist: sphinx>=3.5; extra == "doc"
+Requires-Dist: jaraco.packaging>=9.3; extra == "doc"
+Requires-Dist: rst.linker>=1.9; extra == "doc"
+Requires-Dist: furo; extra == "doc"
+Requires-Dist: sphinx-lint; extra == "doc"
+Requires-Dist: jaraco.tidelift>=1.4; extra == "doc"
+Requires-Dist: pygments-github-lexers==0.0.5; extra == "doc"
+Requires-Dist: sphinx-favicon; extra == "doc"
+Requires-Dist: sphinx-inline-tabs; extra == "doc"
+Requires-Dist: sphinx-reredirects; extra == "doc"
+Requires-Dist: sphinxcontrib-towncrier; extra == "doc"
+Requires-Dist: sphinx-notfound-page<2,>=1; extra == "doc"
+Requires-Dist: pyproject-hooks!=1.1; extra == "doc"
+Requires-Dist: towncrier<24.7; extra == "doc"
+Provides-Extra: ssl
+Provides-Extra: certs
+Provides-Extra: core
+Requires-Dist: packaging>=24.2; extra == "core"
+Requires-Dist: more_itertools>=8.8; extra == "core"
+Requires-Dist: jaraco.text>=3.7; extra == "core"
+Requires-Dist: importlib_metadata>=6; python_version < "3.10" and extra == "core"
+Requires-Dist: tomli>=2.0.1; python_version < "3.11" and extra == "core"
+Requires-Dist: wheel>=0.43.0; extra == "core"
+Requires-Dist: platformdirs>=4.2.2; extra == "core"
+Requires-Dist: jaraco.collections; extra == "core"
+Requires-Dist: jaraco.functools>=4; extra == "core"
+Requires-Dist: packaging; extra == "core"
+Requires-Dist: more_itertools; extra == "core"
+Provides-Extra: check
+Requires-Dist: pytest-checkdocs>=2.4; extra == "check"
+Requires-Dist: pytest-ruff>=0.2.1; sys_platform != "cygwin" and extra == "check"
+Requires-Dist: ruff>=0.8.0; sys_platform != "cygwin" and extra == "check"
+Provides-Extra: cover
+Requires-Dist: pytest-cov; extra == "cover"
+Provides-Extra: enabler
+Requires-Dist: pytest-enabler>=2.2; extra == "enabler"
+Provides-Extra: type
+Requires-Dist: pytest-mypy; extra == "type"
+Requires-Dist: mypy==1.14.*; extra == "type"
+Requires-Dist: importlib_metadata>=7.0.2; python_version < "3.10" and extra == "type"
+Requires-Dist: jaraco.develop>=7.21; sys_platform != "cygwin" and extra == "type"
+
+.. |pypi-version| image:: https://img.shields.io/pypi/v/setuptools.svg
+ :target: https://pypi.org/project/setuptools
+
+.. |py-version| image:: https://img.shields.io/pypi/pyversions/setuptools.svg
+
+.. |test-badge| image:: https://github.com/pypa/setuptools/actions/workflows/main.yml/badge.svg
+ :target: https://github.com/pypa/setuptools/actions?query=workflow%3A%22tests%22
+ :alt: tests
+
+.. |ruff-badge| image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
+ :target: https://github.com/astral-sh/ruff
+ :alt: Ruff
+
+.. |docs-badge| image:: https://img.shields.io/readthedocs/setuptools/latest.svg
+ :target: https://setuptools.pypa.io
+
+.. |skeleton-badge| image:: https://img.shields.io/badge/skeleton-2024-informational
+ :target: https://blog.jaraco.com/skeleton
+
+.. |codecov-badge| image:: https://img.shields.io/codecov/c/github/pypa/setuptools/master.svg?logo=codecov&logoColor=white
+ :target: https://codecov.io/gh/pypa/setuptools
+
+.. |tidelift-badge| image:: https://tidelift.com/badges/github/pypa/setuptools?style=flat
+ :target: https://tidelift.com/subscription/pkg/pypi-setuptools?utm_source=pypi-setuptools&utm_medium=readme
+
+.. |discord-badge| image:: https://img.shields.io/discord/803025117553754132
+ :target: https://discord.com/channels/803025117553754132/815945031150993468
+ :alt: Discord
+
+|pypi-version| |py-version| |test-badge| |ruff-badge| |docs-badge| |skeleton-badge| |codecov-badge| |discord-badge|
+
+See the `Quickstart `_
+and the `User's Guide `_ for
+instructions on how to use Setuptools.
+
+Questions and comments should be directed to `GitHub Discussions
+`_.
+Bug reports and especially tested patches may be
+submitted directly to the `bug tracker
+`_.
+
+
+Code of Conduct
+===============
+
+Everyone interacting in the setuptools project's codebases, issue trackers,
+chat rooms, and fora is expected to follow the
+`PSF Code of Conduct `_.
+
+
+For Enterprise
+==============
+
+Available as part of the Tidelift Subscription.
+
+Setuptools and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
+
+`Learn more `_.
diff --git a/llava_next/lib/python3.10/site-packages/setuptools-75.8.0-py3.10.egg-info/SOURCES.txt b/llava_next/lib/python3.10/site-packages/setuptools-75.8.0-py3.10.egg-info/SOURCES.txt
new file mode 100644
index 0000000000000000000000000000000000000000..29082151c0f8f791aadab0533d15c3e345d23835
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/setuptools-75.8.0-py3.10.egg-info/SOURCES.txt
@@ -0,0 +1,571 @@
+LICENSE
+MANIFEST.in
+NEWS.rst
+README.rst
+conftest.py
+exercises.py
+launcher.c
+mypy.ini
+pyproject.toml
+pytest.ini
+setup.cfg
+setup.py
+tox.ini
+_distutils_hack/__init__.py
+_distutils_hack/override.py
+docs/artwork.rst
+docs/build_meta.rst
+docs/conf.py
+docs/history.rst
+docs/index.rst
+docs/pkg_resources.rst
+docs/python 2 sunset.rst
+docs/roadmap.rst
+docs/setuptools.rst
+docs/deprecated/changed_keywords.rst
+docs/deprecated/commands.rst
+docs/deprecated/dependency_links.rst
+docs/deprecated/distutils-legacy.rst
+docs/deprecated/easy_install.rst
+docs/deprecated/functionalities.rst
+docs/deprecated/index.rst
+docs/deprecated/python_eggs.rst
+docs/deprecated/resource_extraction.rst
+docs/deprecated/zip_safe.rst
+docs/deprecated/distutils/_setuptools_disclaimer.rst
+docs/deprecated/distutils/apiref.rst
+docs/deprecated/distutils/builtdist.rst
+docs/deprecated/distutils/commandref.rst
+docs/deprecated/distutils/configfile.rst
+docs/deprecated/distutils/examples.rst
+docs/deprecated/distutils/extending.rst
+docs/deprecated/distutils/index.rst
+docs/deprecated/distutils/introduction.rst
+docs/deprecated/distutils/packageindex.rst
+docs/deprecated/distutils/setupscript.rst
+docs/deprecated/distutils/sourcedist.rst
+docs/deprecated/distutils/uploading.rst
+docs/development/developer-guide.rst
+docs/development/index.rst
+docs/development/releases.rst
+docs/references/keywords.rst
+docs/userguide/datafiles.rst
+docs/userguide/declarative_config.rst
+docs/userguide/dependency_management.rst
+docs/userguide/development_mode.rst
+docs/userguide/distribution.rst
+docs/userguide/entry_point.rst
+docs/userguide/ext_modules.rst
+docs/userguide/extension.rst
+docs/userguide/index.rst
+docs/userguide/miscellaneous.rst
+docs/userguide/package_discovery.rst
+docs/userguide/pyproject_config.rst
+docs/userguide/quickstart.rst
+newsfragments/.gitignore
+newsfragments/README.rst
+pkg_resources/__init__.py
+pkg_resources/api_tests.txt
+pkg_resources/py.typed
+pkg_resources/tests/__init__.py
+pkg_resources/tests/test_find_distributions.py
+pkg_resources/tests/test_integration_zope_interface.py
+pkg_resources/tests/test_markers.py
+pkg_resources/tests/test_pkg_resources.py
+pkg_resources/tests/test_resources.py
+pkg_resources/tests/test_working_set.py
+pkg_resources/tests/data/my-test-package-source/setup.cfg
+pkg_resources/tests/data/my-test-package-source/setup.py
+pkg_resources/tests/data/my-test-package-zip/my-test-package.zip
+pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/PKG-INFO
+pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/SOURCES.txt
+pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/dependency_links.txt
+pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/top_level.txt
+pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/zip-safe
+pkg_resources/tests/data/my-test-package_zipped-egg/my_test_package-1.0-py3.7.egg
+setuptools/__init__.py
+setuptools/_core_metadata.py
+setuptools/_entry_points.py
+setuptools/_imp.py
+setuptools/_importlib.py
+setuptools/_itertools.py
+setuptools/_normalization.py
+setuptools/_path.py
+setuptools/_reqs.py
+setuptools/_shutil.py
+setuptools/_static.py
+setuptools/archive_util.py
+setuptools/build_meta.py
+setuptools/cli-32.exe
+setuptools/cli-64.exe
+setuptools/cli-arm64.exe
+setuptools/cli.exe
+setuptools/depends.py
+setuptools/discovery.py
+setuptools/dist.py
+setuptools/errors.py
+setuptools/extension.py
+setuptools/glob.py
+setuptools/gui-32.exe
+setuptools/gui-64.exe
+setuptools/gui-arm64.exe
+setuptools/gui.exe
+setuptools/installer.py
+setuptools/launch.py
+setuptools/logging.py
+setuptools/modified.py
+setuptools/monkey.py
+setuptools/msvc.py
+setuptools/namespaces.py
+setuptools/package_index.py
+setuptools/sandbox.py
+setuptools/script (dev).tmpl
+setuptools/script.tmpl
+setuptools/unicode_utils.py
+setuptools/version.py
+setuptools/warnings.py
+setuptools/wheel.py
+setuptools/windows_support.py
+setuptools.egg-info/PKG-INFO
+setuptools.egg-info/SOURCES.txt
+setuptools.egg-info/dependency_links.txt
+setuptools.egg-info/entry_points.txt
+setuptools.egg-info/requires.txt
+setuptools.egg-info/top_level.txt
+setuptools/_distutils/__init__.py
+setuptools/_distutils/_log.py
+setuptools/_distutils/_macos_compat.py
+setuptools/_distutils/_modified.py
+setuptools/_distutils/_msvccompiler.py
+setuptools/_distutils/archive_util.py
+setuptools/_distutils/ccompiler.py
+setuptools/_distutils/cmd.py
+setuptools/_distutils/core.py
+setuptools/_distutils/cygwinccompiler.py
+setuptools/_distutils/debug.py
+setuptools/_distutils/dep_util.py
+setuptools/_distutils/dir_util.py
+setuptools/_distutils/dist.py
+setuptools/_distutils/errors.py
+setuptools/_distutils/extension.py
+setuptools/_distutils/fancy_getopt.py
+setuptools/_distutils/file_util.py
+setuptools/_distutils/filelist.py
+setuptools/_distutils/log.py
+setuptools/_distutils/spawn.py
+setuptools/_distutils/sysconfig.py
+setuptools/_distutils/text_file.py
+setuptools/_distutils/unixccompiler.py
+setuptools/_distutils/util.py
+setuptools/_distutils/version.py
+setuptools/_distutils/versionpredicate.py
+setuptools/_distutils/zosccompiler.py
+setuptools/_distutils/command/__init__.py
+setuptools/_distutils/command/_framework_compat.py
+setuptools/_distutils/command/bdist.py
+setuptools/_distutils/command/bdist_dumb.py
+setuptools/_distutils/command/bdist_rpm.py
+setuptools/_distutils/command/build.py
+setuptools/_distutils/command/build_clib.py
+setuptools/_distutils/command/build_ext.py
+setuptools/_distutils/command/build_py.py
+setuptools/_distutils/command/build_scripts.py
+setuptools/_distutils/command/check.py
+setuptools/_distutils/command/clean.py
+setuptools/_distutils/command/config.py
+setuptools/_distutils/command/install.py
+setuptools/_distutils/command/install_data.py
+setuptools/_distutils/command/install_egg_info.py
+setuptools/_distutils/command/install_headers.py
+setuptools/_distutils/command/install_lib.py
+setuptools/_distutils/command/install_scripts.py
+setuptools/_distutils/command/sdist.py
+setuptools/_distutils/compat/__init__.py
+setuptools/_distutils/compat/py39.py
+setuptools/_distutils/tests/__init__.py
+setuptools/_distutils/tests/support.py
+setuptools/_distutils/tests/test_archive_util.py
+setuptools/_distutils/tests/test_bdist.py
+setuptools/_distutils/tests/test_bdist_dumb.py
+setuptools/_distutils/tests/test_bdist_rpm.py
+setuptools/_distutils/tests/test_build.py
+setuptools/_distutils/tests/test_build_clib.py
+setuptools/_distutils/tests/test_build_ext.py
+setuptools/_distutils/tests/test_build_py.py
+setuptools/_distutils/tests/test_build_scripts.py
+setuptools/_distutils/tests/test_ccompiler.py
+setuptools/_distutils/tests/test_check.py
+setuptools/_distutils/tests/test_clean.py
+setuptools/_distutils/tests/test_cmd.py
+setuptools/_distutils/tests/test_config_cmd.py
+setuptools/_distutils/tests/test_core.py
+setuptools/_distutils/tests/test_cygwinccompiler.py
+setuptools/_distutils/tests/test_dir_util.py
+setuptools/_distutils/tests/test_dist.py
+setuptools/_distutils/tests/test_extension.py
+setuptools/_distutils/tests/test_file_util.py
+setuptools/_distutils/tests/test_filelist.py
+setuptools/_distutils/tests/test_install.py
+setuptools/_distutils/tests/test_install_data.py
+setuptools/_distutils/tests/test_install_headers.py
+setuptools/_distutils/tests/test_install_lib.py
+setuptools/_distutils/tests/test_install_scripts.py
+setuptools/_distutils/tests/test_log.py
+setuptools/_distutils/tests/test_mingwccompiler.py
+setuptools/_distutils/tests/test_modified.py
+setuptools/_distutils/tests/test_msvccompiler.py
+setuptools/_distutils/tests/test_sdist.py
+setuptools/_distutils/tests/test_spawn.py
+setuptools/_distutils/tests/test_sysconfig.py
+setuptools/_distutils/tests/test_text_file.py
+setuptools/_distutils/tests/test_unixccompiler.py
+setuptools/_distutils/tests/test_util.py
+setuptools/_distutils/tests/test_version.py
+setuptools/_distutils/tests/test_versionpredicate.py
+setuptools/_distutils/tests/unix_compat.py
+setuptools/_distutils/tests/compat/__init__.py
+setuptools/_distutils/tests/compat/py39.py
+setuptools/_vendor/ruff.toml
+setuptools/_vendor/typing_extensions.py
+setuptools/_vendor/autocommand/__init__.py
+setuptools/_vendor/autocommand/autoasync.py
+setuptools/_vendor/autocommand/autocommand.py
+setuptools/_vendor/autocommand/automain.py
+setuptools/_vendor/autocommand/autoparse.py
+setuptools/_vendor/autocommand/errors.py
+setuptools/_vendor/autocommand-2.2.2.dist-info/INSTALLER
+setuptools/_vendor/autocommand-2.2.2.dist-info/LICENSE
+setuptools/_vendor/autocommand-2.2.2.dist-info/METADATA
+setuptools/_vendor/autocommand-2.2.2.dist-info/RECORD
+setuptools/_vendor/autocommand-2.2.2.dist-info/WHEEL
+setuptools/_vendor/autocommand-2.2.2.dist-info/top_level.txt
+setuptools/_vendor/backports/__init__.py
+setuptools/_vendor/backports.tarfile-1.2.0.dist-info/INSTALLER
+setuptools/_vendor/backports.tarfile-1.2.0.dist-info/LICENSE
+setuptools/_vendor/backports.tarfile-1.2.0.dist-info/METADATA
+setuptools/_vendor/backports.tarfile-1.2.0.dist-info/RECORD
+setuptools/_vendor/backports.tarfile-1.2.0.dist-info/REQUESTED
+setuptools/_vendor/backports.tarfile-1.2.0.dist-info/WHEEL
+setuptools/_vendor/backports.tarfile-1.2.0.dist-info/top_level.txt
+setuptools/_vendor/backports/tarfile/__init__.py
+setuptools/_vendor/backports/tarfile/__main__.py
+setuptools/_vendor/backports/tarfile/compat/__init__.py
+setuptools/_vendor/backports/tarfile/compat/py38.py
+setuptools/_vendor/importlib_metadata/__init__.py
+setuptools/_vendor/importlib_metadata/_adapters.py
+setuptools/_vendor/importlib_metadata/_collections.py
+setuptools/_vendor/importlib_metadata/_compat.py
+setuptools/_vendor/importlib_metadata/_functools.py
+setuptools/_vendor/importlib_metadata/_itertools.py
+setuptools/_vendor/importlib_metadata/_meta.py
+setuptools/_vendor/importlib_metadata/_text.py
+setuptools/_vendor/importlib_metadata/diagnose.py
+setuptools/_vendor/importlib_metadata/py.typed
+setuptools/_vendor/importlib_metadata-8.0.0.dist-info/INSTALLER
+setuptools/_vendor/importlib_metadata-8.0.0.dist-info/LICENSE
+setuptools/_vendor/importlib_metadata-8.0.0.dist-info/METADATA
+setuptools/_vendor/importlib_metadata-8.0.0.dist-info/RECORD
+setuptools/_vendor/importlib_metadata-8.0.0.dist-info/REQUESTED
+setuptools/_vendor/importlib_metadata-8.0.0.dist-info/WHEEL
+setuptools/_vendor/importlib_metadata-8.0.0.dist-info/top_level.txt
+setuptools/_vendor/importlib_metadata/compat/__init__.py
+setuptools/_vendor/importlib_metadata/compat/py311.py
+setuptools/_vendor/importlib_metadata/compat/py39.py
+setuptools/_vendor/inflect/__init__.py
+setuptools/_vendor/inflect/py.typed
+setuptools/_vendor/inflect-7.3.1.dist-info/INSTALLER
+setuptools/_vendor/inflect-7.3.1.dist-info/LICENSE
+setuptools/_vendor/inflect-7.3.1.dist-info/METADATA
+setuptools/_vendor/inflect-7.3.1.dist-info/RECORD
+setuptools/_vendor/inflect-7.3.1.dist-info/WHEEL
+setuptools/_vendor/inflect-7.3.1.dist-info/top_level.txt
+setuptools/_vendor/inflect/compat/__init__.py
+setuptools/_vendor/inflect/compat/py38.py
+setuptools/_vendor/jaraco/context.py
+setuptools/_vendor/jaraco.collections-5.1.0.dist-info/INSTALLER
+setuptools/_vendor/jaraco.collections-5.1.0.dist-info/LICENSE
+setuptools/_vendor/jaraco.collections-5.1.0.dist-info/METADATA
+setuptools/_vendor/jaraco.collections-5.1.0.dist-info/RECORD
+setuptools/_vendor/jaraco.collections-5.1.0.dist-info/REQUESTED
+setuptools/_vendor/jaraco.collections-5.1.0.dist-info/WHEEL
+setuptools/_vendor/jaraco.collections-5.1.0.dist-info/top_level.txt
+setuptools/_vendor/jaraco.context-5.3.0.dist-info/INSTALLER
+setuptools/_vendor/jaraco.context-5.3.0.dist-info/LICENSE
+setuptools/_vendor/jaraco.context-5.3.0.dist-info/METADATA
+setuptools/_vendor/jaraco.context-5.3.0.dist-info/RECORD
+setuptools/_vendor/jaraco.context-5.3.0.dist-info/WHEEL
+setuptools/_vendor/jaraco.context-5.3.0.dist-info/top_level.txt
+setuptools/_vendor/jaraco.functools-4.0.1.dist-info/INSTALLER
+setuptools/_vendor/jaraco.functools-4.0.1.dist-info/LICENSE
+setuptools/_vendor/jaraco.functools-4.0.1.dist-info/METADATA
+setuptools/_vendor/jaraco.functools-4.0.1.dist-info/RECORD
+setuptools/_vendor/jaraco.functools-4.0.1.dist-info/WHEEL
+setuptools/_vendor/jaraco.functools-4.0.1.dist-info/top_level.txt
+setuptools/_vendor/jaraco.text-3.12.1.dist-info/INSTALLER
+setuptools/_vendor/jaraco.text-3.12.1.dist-info/LICENSE
+setuptools/_vendor/jaraco.text-3.12.1.dist-info/METADATA
+setuptools/_vendor/jaraco.text-3.12.1.dist-info/RECORD
+setuptools/_vendor/jaraco.text-3.12.1.dist-info/REQUESTED
+setuptools/_vendor/jaraco.text-3.12.1.dist-info/WHEEL
+setuptools/_vendor/jaraco.text-3.12.1.dist-info/top_level.txt
+setuptools/_vendor/jaraco/collections/__init__.py
+setuptools/_vendor/jaraco/collections/py.typed
+setuptools/_vendor/jaraco/functools/__init__.py
+setuptools/_vendor/jaraco/functools/__init__.pyi
+setuptools/_vendor/jaraco/functools/py.typed
+setuptools/_vendor/jaraco/text/Lorem ipsum.txt
+setuptools/_vendor/jaraco/text/__init__.py
+setuptools/_vendor/jaraco/text/layouts.py
+setuptools/_vendor/jaraco/text/show-newlines.py
+setuptools/_vendor/jaraco/text/strip-prefix.py
+setuptools/_vendor/jaraco/text/to-dvorak.py
+setuptools/_vendor/jaraco/text/to-qwerty.py
+setuptools/_vendor/more_itertools/__init__.py
+setuptools/_vendor/more_itertools/__init__.pyi
+setuptools/_vendor/more_itertools/more.py
+setuptools/_vendor/more_itertools/more.pyi
+setuptools/_vendor/more_itertools/py.typed
+setuptools/_vendor/more_itertools/recipes.py
+setuptools/_vendor/more_itertools/recipes.pyi
+setuptools/_vendor/more_itertools-10.3.0.dist-info/INSTALLER
+setuptools/_vendor/more_itertools-10.3.0.dist-info/LICENSE
+setuptools/_vendor/more_itertools-10.3.0.dist-info/METADATA
+setuptools/_vendor/more_itertools-10.3.0.dist-info/RECORD
+setuptools/_vendor/more_itertools-10.3.0.dist-info/REQUESTED
+setuptools/_vendor/more_itertools-10.3.0.dist-info/WHEEL
+setuptools/_vendor/packaging/__init__.py
+setuptools/_vendor/packaging/_elffile.py
+setuptools/_vendor/packaging/_manylinux.py
+setuptools/_vendor/packaging/_musllinux.py
+setuptools/_vendor/packaging/_parser.py
+setuptools/_vendor/packaging/_structures.py
+setuptools/_vendor/packaging/_tokenizer.py
+setuptools/_vendor/packaging/markers.py
+setuptools/_vendor/packaging/metadata.py
+setuptools/_vendor/packaging/py.typed
+setuptools/_vendor/packaging/requirements.py
+setuptools/_vendor/packaging/specifiers.py
+setuptools/_vendor/packaging/tags.py
+setuptools/_vendor/packaging/utils.py
+setuptools/_vendor/packaging/version.py
+setuptools/_vendor/packaging-24.2.dist-info/INSTALLER
+setuptools/_vendor/packaging-24.2.dist-info/LICENSE
+setuptools/_vendor/packaging-24.2.dist-info/LICENSE.APACHE
+setuptools/_vendor/packaging-24.2.dist-info/LICENSE.BSD
+setuptools/_vendor/packaging-24.2.dist-info/METADATA
+setuptools/_vendor/packaging-24.2.dist-info/RECORD
+setuptools/_vendor/packaging-24.2.dist-info/REQUESTED
+setuptools/_vendor/packaging-24.2.dist-info/WHEEL
+setuptools/_vendor/packaging/licenses/__init__.py
+setuptools/_vendor/packaging/licenses/_spdx.py
+setuptools/_vendor/platformdirs/__init__.py
+setuptools/_vendor/platformdirs/__main__.py
+setuptools/_vendor/platformdirs/android.py
+setuptools/_vendor/platformdirs/api.py
+setuptools/_vendor/platformdirs/macos.py
+setuptools/_vendor/platformdirs/py.typed
+setuptools/_vendor/platformdirs/unix.py
+setuptools/_vendor/platformdirs/version.py
+setuptools/_vendor/platformdirs/windows.py
+setuptools/_vendor/platformdirs-4.2.2.dist-info/INSTALLER
+setuptools/_vendor/platformdirs-4.2.2.dist-info/METADATA
+setuptools/_vendor/platformdirs-4.2.2.dist-info/RECORD
+setuptools/_vendor/platformdirs-4.2.2.dist-info/REQUESTED
+setuptools/_vendor/platformdirs-4.2.2.dist-info/WHEEL
+setuptools/_vendor/platformdirs-4.2.2.dist-info/licenses/LICENSE
+setuptools/_vendor/tomli/__init__.py
+setuptools/_vendor/tomli/_parser.py
+setuptools/_vendor/tomli/_re.py
+setuptools/_vendor/tomli/_types.py
+setuptools/_vendor/tomli/py.typed
+setuptools/_vendor/tomli-2.0.1.dist-info/INSTALLER
+setuptools/_vendor/tomli-2.0.1.dist-info/LICENSE
+setuptools/_vendor/tomli-2.0.1.dist-info/METADATA
+setuptools/_vendor/tomli-2.0.1.dist-info/RECORD
+setuptools/_vendor/tomli-2.0.1.dist-info/REQUESTED
+setuptools/_vendor/tomli-2.0.1.dist-info/WHEEL
+setuptools/_vendor/typeguard/__init__.py
+setuptools/_vendor/typeguard/_checkers.py
+setuptools/_vendor/typeguard/_config.py
+setuptools/_vendor/typeguard/_decorators.py
+setuptools/_vendor/typeguard/_exceptions.py
+setuptools/_vendor/typeguard/_functions.py
+setuptools/_vendor/typeguard/_importhook.py
+setuptools/_vendor/typeguard/_memo.py
+setuptools/_vendor/typeguard/_pytest_plugin.py
+setuptools/_vendor/typeguard/_suppression.py
+setuptools/_vendor/typeguard/_transformer.py
+setuptools/_vendor/typeguard/_union_transformer.py
+setuptools/_vendor/typeguard/_utils.py
+setuptools/_vendor/typeguard/py.typed
+setuptools/_vendor/typeguard-4.3.0.dist-info/INSTALLER
+setuptools/_vendor/typeguard-4.3.0.dist-info/LICENSE
+setuptools/_vendor/typeguard-4.3.0.dist-info/METADATA
+setuptools/_vendor/typeguard-4.3.0.dist-info/RECORD
+setuptools/_vendor/typeguard-4.3.0.dist-info/WHEEL
+setuptools/_vendor/typeguard-4.3.0.dist-info/entry_points.txt
+setuptools/_vendor/typeguard-4.3.0.dist-info/top_level.txt
+setuptools/_vendor/typing_extensions-4.12.2.dist-info/INSTALLER
+setuptools/_vendor/typing_extensions-4.12.2.dist-info/LICENSE
+setuptools/_vendor/typing_extensions-4.12.2.dist-info/METADATA
+setuptools/_vendor/typing_extensions-4.12.2.dist-info/RECORD
+setuptools/_vendor/typing_extensions-4.12.2.dist-info/WHEEL
+setuptools/_vendor/wheel/__init__.py
+setuptools/_vendor/wheel/__main__.py
+setuptools/_vendor/wheel/_setuptools_logging.py
+setuptools/_vendor/wheel/bdist_wheel.py
+setuptools/_vendor/wheel/macosx_libfile.py
+setuptools/_vendor/wheel/metadata.py
+setuptools/_vendor/wheel/util.py
+setuptools/_vendor/wheel/wheelfile.py
+setuptools/_vendor/wheel-0.43.0.dist-info/INSTALLER
+setuptools/_vendor/wheel-0.43.0.dist-info/LICENSE.txt
+setuptools/_vendor/wheel-0.43.0.dist-info/METADATA
+setuptools/_vendor/wheel-0.43.0.dist-info/RECORD
+setuptools/_vendor/wheel-0.43.0.dist-info/REQUESTED
+setuptools/_vendor/wheel-0.43.0.dist-info/WHEEL
+setuptools/_vendor/wheel-0.43.0.dist-info/entry_points.txt
+setuptools/_vendor/wheel/cli/__init__.py
+setuptools/_vendor/wheel/cli/convert.py
+setuptools/_vendor/wheel/cli/pack.py
+setuptools/_vendor/wheel/cli/tags.py
+setuptools/_vendor/wheel/cli/unpack.py
+setuptools/_vendor/wheel/vendored/__init__.py
+setuptools/_vendor/wheel/vendored/vendor.txt
+setuptools/_vendor/wheel/vendored/packaging/__init__.py
+setuptools/_vendor/wheel/vendored/packaging/_elffile.py
+setuptools/_vendor/wheel/vendored/packaging/_manylinux.py
+setuptools/_vendor/wheel/vendored/packaging/_musllinux.py
+setuptools/_vendor/wheel/vendored/packaging/_parser.py
+setuptools/_vendor/wheel/vendored/packaging/_structures.py
+setuptools/_vendor/wheel/vendored/packaging/_tokenizer.py
+setuptools/_vendor/wheel/vendored/packaging/markers.py
+setuptools/_vendor/wheel/vendored/packaging/requirements.py
+setuptools/_vendor/wheel/vendored/packaging/specifiers.py
+setuptools/_vendor/wheel/vendored/packaging/tags.py
+setuptools/_vendor/wheel/vendored/packaging/utils.py
+setuptools/_vendor/wheel/vendored/packaging/version.py
+setuptools/_vendor/zipp/__init__.py
+setuptools/_vendor/zipp/glob.py
+setuptools/_vendor/zipp-3.19.2.dist-info/INSTALLER
+setuptools/_vendor/zipp-3.19.2.dist-info/LICENSE
+setuptools/_vendor/zipp-3.19.2.dist-info/METADATA
+setuptools/_vendor/zipp-3.19.2.dist-info/RECORD
+setuptools/_vendor/zipp-3.19.2.dist-info/REQUESTED
+setuptools/_vendor/zipp-3.19.2.dist-info/WHEEL
+setuptools/_vendor/zipp-3.19.2.dist-info/top_level.txt
+setuptools/_vendor/zipp/compat/__init__.py
+setuptools/_vendor/zipp/compat/py310.py
+setuptools/command/__init__.py
+setuptools/command/_requirestxt.py
+setuptools/command/alias.py
+setuptools/command/bdist_egg.py
+setuptools/command/bdist_rpm.py
+setuptools/command/bdist_wheel.py
+setuptools/command/build.py
+setuptools/command/build_clib.py
+setuptools/command/build_ext.py
+setuptools/command/build_py.py
+setuptools/command/develop.py
+setuptools/command/dist_info.py
+setuptools/command/easy_install.py
+setuptools/command/editable_wheel.py
+setuptools/command/egg_info.py
+setuptools/command/install.py
+setuptools/command/install_egg_info.py
+setuptools/command/install_lib.py
+setuptools/command/install_scripts.py
+setuptools/command/launcher manifest.xml
+setuptools/command/rotate.py
+setuptools/command/saveopts.py
+setuptools/command/sdist.py
+setuptools/command/setopt.py
+setuptools/command/test.py
+setuptools/compat/__init__.py
+setuptools/compat/py310.py
+setuptools/compat/py311.py
+setuptools/compat/py312.py
+setuptools/compat/py39.py
+setuptools/config/NOTICE
+setuptools/config/__init__.py
+setuptools/config/_apply_pyprojecttoml.py
+setuptools/config/distutils.schema.json
+setuptools/config/expand.py
+setuptools/config/pyprojecttoml.py
+setuptools/config/setupcfg.py
+setuptools/config/setuptools.schema.json
+setuptools/config/_validate_pyproject/NOTICE
+setuptools/config/_validate_pyproject/__init__.py
+setuptools/config/_validate_pyproject/error_reporting.py
+setuptools/config/_validate_pyproject/extra_validations.py
+setuptools/config/_validate_pyproject/fastjsonschema_exceptions.py
+setuptools/config/_validate_pyproject/fastjsonschema_validations.py
+setuptools/config/_validate_pyproject/formats.py
+setuptools/tests/__init__.py
+setuptools/tests/contexts.py
+setuptools/tests/environment.py
+setuptools/tests/fixtures.py
+setuptools/tests/mod_with_constant.py
+setuptools/tests/namespaces.py
+setuptools/tests/script-with-bom.py
+setuptools/tests/server.py
+setuptools/tests/test_archive_util.py
+setuptools/tests/test_bdist_deprecations.py
+setuptools/tests/test_bdist_egg.py
+setuptools/tests/test_bdist_wheel.py
+setuptools/tests/test_build.py
+setuptools/tests/test_build_clib.py
+setuptools/tests/test_build_ext.py
+setuptools/tests/test_build_meta.py
+setuptools/tests/test_build_py.py
+setuptools/tests/test_config_discovery.py
+setuptools/tests/test_core_metadata.py
+setuptools/tests/test_depends.py
+setuptools/tests/test_develop.py
+setuptools/tests/test_dist.py
+setuptools/tests/test_dist_info.py
+setuptools/tests/test_distutils_adoption.py
+setuptools/tests/test_easy_install.py
+setuptools/tests/test_editable_install.py
+setuptools/tests/test_egg_info.py
+setuptools/tests/test_extern.py
+setuptools/tests/test_find_packages.py
+setuptools/tests/test_find_py_modules.py
+setuptools/tests/test_glob.py
+setuptools/tests/test_install_scripts.py
+setuptools/tests/test_logging.py
+setuptools/tests/test_manifest.py
+setuptools/tests/test_namespaces.py
+setuptools/tests/test_packageindex.py
+setuptools/tests/test_sandbox.py
+setuptools/tests/test_sdist.py
+setuptools/tests/test_setopt.py
+setuptools/tests/test_setuptools.py
+setuptools/tests/test_shutil_wrapper.py
+setuptools/tests/test_unicode_utils.py
+setuptools/tests/test_virtualenv.py
+setuptools/tests/test_warnings.py
+setuptools/tests/test_wheel.py
+setuptools/tests/test_windows_wrappers.py
+setuptools/tests/text.py
+setuptools/tests/textwrap.py
+setuptools/tests/compat/__init__.py
+setuptools/tests/compat/py39.py
+setuptools/tests/config/__init__.py
+setuptools/tests/config/setupcfg_examples.txt
+setuptools/tests/config/test_apply_pyprojecttoml.py
+setuptools/tests/config/test_expand.py
+setuptools/tests/config/test_pyprojecttoml.py
+setuptools/tests/config/test_pyprojecttoml_dynamic_deps.py
+setuptools/tests/config/test_setupcfg.py
+setuptools/tests/config/downloads/__init__.py
+setuptools/tests/config/downloads/preload.py
+setuptools/tests/indexes/test_links_priority/external.html
+setuptools/tests/indexes/test_links_priority/simple/foobar/index.html
+setuptools/tests/integration/__init__.py
+setuptools/tests/integration/helpers.py
+setuptools/tests/integration/test_pip_install_sdist.py
+tools/build_launchers.py
+tools/finalize.py
+tools/generate_validation_code.py
+tools/vendored.py
\ No newline at end of file
diff --git a/llava_next/lib/python3.10/site-packages/setuptools-75.8.0-py3.10.egg-info/dependency_links.txt b/llava_next/lib/python3.10/site-packages/setuptools-75.8.0-py3.10.egg-info/dependency_links.txt
new file mode 100644
index 0000000000000000000000000000000000000000..8b137891791fe96927ad78e64b0aad7bded08bdc
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/setuptools-75.8.0-py3.10.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/llava_next/lib/python3.10/site-packages/setuptools-75.8.0-py3.10.egg-info/entry_points.txt b/llava_next/lib/python3.10/site-packages/setuptools-75.8.0-py3.10.egg-info/entry_points.txt
new file mode 100644
index 0000000000000000000000000000000000000000..0db0a6c8f1b8d9c0ad4a25db6892e29f8988fcf2
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/setuptools-75.8.0-py3.10.egg-info/entry_points.txt
@@ -0,0 +1,51 @@
+[distutils.commands]
+alias = setuptools.command.alias:alias
+bdist_egg = setuptools.command.bdist_egg:bdist_egg
+bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm
+bdist_wheel = setuptools.command.bdist_wheel:bdist_wheel
+build = setuptools.command.build:build
+build_clib = setuptools.command.build_clib:build_clib
+build_ext = setuptools.command.build_ext:build_ext
+build_py = setuptools.command.build_py:build_py
+develop = setuptools.command.develop:develop
+dist_info = setuptools.command.dist_info:dist_info
+easy_install = setuptools.command.easy_install:easy_install
+editable_wheel = setuptools.command.editable_wheel:editable_wheel
+egg_info = setuptools.command.egg_info:egg_info
+install = setuptools.command.install:install
+install_egg_info = setuptools.command.install_egg_info:install_egg_info
+install_lib = setuptools.command.install_lib:install_lib
+install_scripts = setuptools.command.install_scripts:install_scripts
+rotate = setuptools.command.rotate:rotate
+saveopts = setuptools.command.saveopts:saveopts
+sdist = setuptools.command.sdist:sdist
+setopt = setuptools.command.setopt:setopt
+
+[distutils.setup_keywords]
+dependency_links = setuptools.dist:assert_string_list
+eager_resources = setuptools.dist:assert_string_list
+entry_points = setuptools.dist:check_entry_points
+exclude_package_data = setuptools.dist:check_package_data
+extras_require = setuptools.dist:check_extras
+include_package_data = setuptools.dist:assert_bool
+install_requires = setuptools.dist:check_requirements
+namespace_packages = setuptools.dist:check_nsp
+package_data = setuptools.dist:check_package_data
+packages = setuptools.dist:check_packages
+python_requires = setuptools.dist:check_specifier
+setup_requires = setuptools.dist:check_requirements
+use_2to3 = setuptools.dist:invalid_unless_false
+zip_safe = setuptools.dist:assert_bool
+
+[egg_info.writers]
+PKG-INFO = setuptools.command.egg_info:write_pkg_info
+dependency_links.txt = setuptools.command.egg_info:overwrite_arg
+eager_resources.txt = setuptools.command.egg_info:overwrite_arg
+entry_points.txt = setuptools.command.egg_info:write_entries
+namespace_packages.txt = setuptools.command.egg_info:overwrite_arg
+requires.txt = setuptools.command.egg_info:write_requirements
+top_level.txt = setuptools.command.egg_info:write_toplevel_names
+
+[setuptools.finalize_distribution_options]
+keywords = setuptools.dist:Distribution._finalize_setup_keywords
+parent_finalize = setuptools.dist:_Distribution.finalize_options
diff --git a/llava_next/lib/python3.10/site-packages/setuptools-75.8.0-py3.10.egg-info/requires.txt b/llava_next/lib/python3.10/site-packages/setuptools-75.8.0-py3.10.egg-info/requires.txt
new file mode 100644
index 0000000000000000000000000000000000000000..4d40327a3cb30ab8ec29d99bb3f8a785174ae689
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/setuptools-75.8.0-py3.10.egg-info/requires.txt
@@ -0,0 +1,85 @@
+
+[certs]
+
+[check]
+pytest-checkdocs>=2.4
+
+[check:sys_platform != "cygwin"]
+pytest-ruff>=0.2.1
+ruff>=0.8.0
+
+[core]
+packaging>=24.2
+more_itertools>=8.8
+jaraco.text>=3.7
+wheel>=0.43.0
+platformdirs>=4.2.2
+jaraco.collections
+jaraco.functools>=4
+packaging
+more_itertools
+
+[core:python_version < "3.10"]
+importlib_metadata>=6
+
+[core:python_version < "3.11"]
+tomli>=2.0.1
+
+[cover]
+pytest-cov
+
+[doc]
+sphinx>=3.5
+jaraco.packaging>=9.3
+rst.linker>=1.9
+furo
+sphinx-lint
+jaraco.tidelift>=1.4
+pygments-github-lexers==0.0.5
+sphinx-favicon
+sphinx-inline-tabs
+sphinx-reredirects
+sphinxcontrib-towncrier
+sphinx-notfound-page<2,>=1
+pyproject-hooks!=1.1
+towncrier<24.7
+
+[enabler]
+pytest-enabler>=2.2
+
+[ssl]
+
+[test]
+pytest!=8.1.*,>=6
+virtualenv>=13.0.0
+wheel>=0.44.0
+pip>=19.1
+packaging>=24.2
+jaraco.envs>=2.2
+pytest-xdist>=3
+jaraco.path>=3.7.2
+build[virtualenv]>=1.0.3
+filelock>=3.4.0
+ini2toml[lite]>=0.14
+tomli-w>=1.0.0
+pytest-timeout
+pytest-home>=0.5
+pytest-subprocess
+pyproject-hooks!=1.1
+jaraco.test>=5.5
+
+[test:python_version >= "3.9" and sys_platform != "cygwin"]
+jaraco.develop>=7.21
+
+[test:sys_platform != "cygwin"]
+pytest-perf
+
+[type]
+pytest-mypy
+mypy==1.14.*
+
+[type:python_version < "3.10"]
+importlib_metadata>=7.0.2
+
+[type:sys_platform != "cygwin"]
+jaraco.develop>=7.21
diff --git a/llava_next/lib/python3.10/site-packages/setuptools-75.8.0-py3.10.egg-info/top_level.txt b/llava_next/lib/python3.10/site-packages/setuptools-75.8.0-py3.10.egg-info/top_level.txt
new file mode 100644
index 0000000000000000000000000000000000000000..b5ac1070294b478b7cc2ce677207ee08813bfa37
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/setuptools-75.8.0-py3.10.egg-info/top_level.txt
@@ -0,0 +1,3 @@
+_distutils_hack
+pkg_resources
+setuptools
diff --git a/llava_next/lib/python3.10/site-packages/shellingham/__init__.py b/llava_next/lib/python3.10/site-packages/shellingham/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..15f7a90cbd02e5c2cc933cf6aa0374cca68035f1
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/shellingham/__init__.py
@@ -0,0 +1,23 @@
+import importlib
+import os
+
+from ._core import ShellDetectionFailure
+
+__version__ = "1.5.4"
+
+
+def detect_shell(pid=None, max_depth=10):
+ name = os.name
+ try:
+ impl = importlib.import_module(".{}".format(name), __name__)
+ except ImportError:
+ message = "Shell detection not implemented for {0!r}".format(name)
+ raise RuntimeError(message)
+ try:
+ get_shell = impl.get_shell
+ except AttributeError:
+ raise RuntimeError("get_shell not implemented for {0!r}".format(name))
+ shell = get_shell(pid, max_depth=max_depth)
+ if shell:
+ return shell
+ raise ShellDetectionFailure()
diff --git a/llava_next/lib/python3.10/site-packages/shellingham/__pycache__/__init__.cpython-310.pyc b/llava_next/lib/python3.10/site-packages/shellingham/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d3f3d1a02c1fe2623989e97dd71c870e1ac9089b
Binary files /dev/null and b/llava_next/lib/python3.10/site-packages/shellingham/__pycache__/__init__.cpython-310.pyc differ
diff --git a/llava_next/lib/python3.10/site-packages/shellingham/posix/__init__.py b/llava_next/lib/python3.10/site-packages/shellingham/posix/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..5bd2070db27189e62a1867e4de49f16f8c8841ff
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/shellingham/posix/__init__.py
@@ -0,0 +1,112 @@
+import os
+import re
+
+from .._core import SHELL_NAMES, ShellDetectionFailure
+from . import proc, ps
+
+# Based on QEMU docs: https://www.qemu.org/docs/master/user/main.html
+QEMU_BIN_REGEX = re.compile(
+ r"""qemu-
+ (alpha
+ |armeb
+ |arm
+ |m68k
+ |cris
+ |i386
+ |x86_64
+ |microblaze
+ |mips
+ |mipsel
+ |mips64
+ |mips64el
+ |mipsn32
+ |mipsn32el
+ |nios2
+ |ppc64
+ |ppc
+ |sh4eb
+ |sh4
+ |sparc
+ |sparc32plus
+ |sparc64
+ )""",
+ re.VERBOSE,
+)
+
+
+def _iter_process_parents(pid, max_depth=10):
+ """Select a way to obtain process information from the system.
+
+ * `/proc` is used if supported.
+ * The system `ps` utility is used as a fallback option.
+ """
+ for impl in (proc, ps):
+ try:
+ iterator = impl.iter_process_parents(pid, max_depth)
+ except EnvironmentError:
+ continue
+ return iterator
+ raise ShellDetectionFailure("compatible proc fs or ps utility is required")
+
+
+def _get_login_shell(proc_cmd):
+ """Form shell information from SHELL environ if possible."""
+ login_shell = os.environ.get("SHELL", "")
+ if login_shell:
+ proc_cmd = login_shell
+ else:
+ proc_cmd = proc_cmd[1:]
+ return (os.path.basename(proc_cmd).lower(), proc_cmd)
+
+
+_INTERPRETER_SHELL_NAMES = [
+ (re.compile(r"^python(\d+(\.\d+)?)?$"), {"xonsh"}),
+]
+
+
+def _get_interpreter_shell(proc_name, proc_args):
+ """Get shell invoked via an interpreter.
+
+ Some shells are implemented on, and invoked with an interpreter, e.g. xonsh
+ is commonly executed with an executable Python script. This detects what
+ script the interpreter is actually running, and check whether that looks
+ like a shell.
+
+ See sarugaku/shellingham#26 for rational.
+ """
+ for pattern, shell_names in _INTERPRETER_SHELL_NAMES:
+ if not pattern.match(proc_name):
+ continue
+ for arg in proc_args:
+ name = os.path.basename(arg).lower()
+ if os.path.isfile(arg) and name in shell_names:
+ return (name, arg)
+ return None
+
+
+def _get_shell(cmd, *args):
+ if cmd.startswith("-"): # Login shell! Let's use this.
+ return _get_login_shell(cmd)
+ name = os.path.basename(cmd).lower()
+ if name == "rosetta" or QEMU_BIN_REGEX.fullmatch(name):
+ # If the current process is Rosetta or QEMU, this likely is a
+ # containerized process. Parse out the actual command instead.
+ cmd = args[0]
+ args = args[1:]
+ name = os.path.basename(cmd).lower()
+ if name in SHELL_NAMES: # Command looks like a shell.
+ return (name, cmd)
+ shell = _get_interpreter_shell(name, args)
+ if shell:
+ return shell
+ return None
+
+
+def get_shell(pid=None, max_depth=10):
+ """Get the shell that the supplied pid or os.getpid() is running in."""
+ pid = str(pid or os.getpid())
+ for proc_args, _, _ in _iter_process_parents(pid, max_depth):
+ shell = _get_shell(*proc_args)
+ if shell:
+ return shell
+ return None
diff --git a/llava_next/lib/python3.10/site-packages/shellingham/posix/__pycache__/__init__.cpython-310.pyc b/llava_next/lib/python3.10/site-packages/shellingham/posix/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1f89ac2c954567d8b84426d0f20926c7f9b8fe53
Binary files /dev/null and b/llava_next/lib/python3.10/site-packages/shellingham/posix/__pycache__/__init__.cpython-310.pyc differ
diff --git a/llava_next/lib/python3.10/site-packages/shellingham/posix/__pycache__/_core.cpython-310.pyc b/llava_next/lib/python3.10/site-packages/shellingham/posix/__pycache__/_core.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..cc474bc5e7bc2385659147a0737d40e9fec2fb70
Binary files /dev/null and b/llava_next/lib/python3.10/site-packages/shellingham/posix/__pycache__/_core.cpython-310.pyc differ
diff --git a/llava_next/lib/python3.10/site-packages/shellingham/posix/__pycache__/proc.cpython-310.pyc b/llava_next/lib/python3.10/site-packages/shellingham/posix/__pycache__/proc.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..47af4a4f55d90ca09190fffe2230942328235f05
Binary files /dev/null and b/llava_next/lib/python3.10/site-packages/shellingham/posix/__pycache__/proc.cpython-310.pyc differ
diff --git a/llava_next/lib/python3.10/site-packages/shellingham/posix/__pycache__/ps.cpython-310.pyc b/llava_next/lib/python3.10/site-packages/shellingham/posix/__pycache__/ps.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1e6d01adf82bbbe7d3615540cfb8ca7e923e83a2
Binary files /dev/null and b/llava_next/lib/python3.10/site-packages/shellingham/posix/__pycache__/ps.cpython-310.pyc differ
diff --git a/llava_next/lib/python3.10/site-packages/shellingham/posix/_core.py b/llava_next/lib/python3.10/site-packages/shellingham/posix/_core.py
new file mode 100644
index 0000000000000000000000000000000000000000..adc49e6e7a9d3edf062c55e0078136899f78d30d
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/shellingham/posix/_core.py
@@ -0,0 +1,3 @@
+import collections
+
+Process = collections.namedtuple("Process", "args pid ppid")
diff --git a/llava_next/lib/python3.10/site-packages/shellingham/posix/proc.py b/llava_next/lib/python3.10/site-packages/shellingham/posix/proc.py
new file mode 100644
index 0000000000000000000000000000000000000000..950f63228e5b328f82b70da8851ec60c6a2ff029
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/shellingham/posix/proc.py
@@ -0,0 +1,83 @@
+import io
+import os
+import re
+import sys
+
+from ._core import Process
+
+# FreeBSD: https://www.freebsd.org/cgi/man.cgi?query=procfs
+# NetBSD: https://man.netbsd.org/NetBSD-9.3-STABLE/mount_procfs.8
+# DragonFlyBSD: https://www.dragonflybsd.org/cgi/web-man?command=procfs
+BSD_STAT_PPID = 2
+
+# See https://docs.kernel.org/filesystems/proc.html
+LINUX_STAT_PPID = 3
+
+STAT_PATTERN = re.compile(r"\(.+\)|\S+")
+
+
+def detect_proc():
+ """Detect /proc filesystem style.
+
+ This checks the /proc/{pid} directory for possible formats. Returns one of
+ the following as str:
+
+ * `stat`: Linux-style, i.e. ``/proc/{pid}/stat``.
+ * `status`: BSD-style, i.e. ``/proc/{pid}/status``.
+ """
+ pid = os.getpid()
+ for name in ("stat", "status"):
+ if os.path.exists(os.path.join("/proc", str(pid), name)):
+ return name
+ raise ProcFormatError("unsupported proc format")
+
+
+def _use_bsd_stat_format():
+ try:
+ return os.uname().sysname.lower() in ("freebsd", "netbsd", "dragonfly")
+ except Exception:
+ return False
+
+
+def _get_ppid(pid, name):
+ path = os.path.join("/proc", str(pid), name)
+ with io.open(path, encoding="ascii", errors="replace") as f:
+ parts = STAT_PATTERN.findall(f.read())
+ # We only care about TTY and PPID -- both are numbers.
+ if _use_bsd_stat_format():
+ return parts[BSD_STAT_PPID]
+ return parts[LINUX_STAT_PPID]
+
+
+def _get_cmdline(pid):
+ path = os.path.join("/proc", str(pid), "cmdline")
+ encoding = sys.getfilesystemencoding() or "utf-8"
+ with io.open(path, encoding=encoding, errors="replace") as f:
+ # XXX: Command line arguments can be arbitrary byte sequences, not
+ # necessarily decodable. For Shellingham's purpose, however, we don't
+ # care. (pypa/pipenv#2820)
+ # cmdline appends an extra NULL at the end, hence the [:-1].
+ return tuple(f.read().split("\0")[:-1])
+
+
+class ProcFormatError(EnvironmentError):
+ pass
+
+
+def iter_process_parents(pid, max_depth=10):
+ """Try to look up the process tree via the /proc interface."""
+ stat_name = detect_proc()
+
+ # Inner generator function so we correctly throw an error eagerly if proc
+ # is not supported, rather than on the first call to the iterator. This
+ # allows the call site detects the correct implementation.
+ def _iter_process_parents(pid, max_depth):
+ for _ in range(max_depth):
+ ppid = _get_ppid(pid, stat_name)
+ args = _get_cmdline(pid)
+ yield Process(args=args, pid=pid, ppid=ppid)
+ if ppid == "0":
+ break
+ pid = ppid
+
+ return _iter_process_parents(pid, max_depth)
diff --git a/llava_next/lib/python3.10/site-packages/shellingham/posix/ps.py b/llava_next/lib/python3.10/site-packages/shellingham/posix/ps.py
new file mode 100644
index 0000000000000000000000000000000000000000..3bc39a74a56390c263e63bfead028f6bce4df3cb
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/shellingham/posix/ps.py
@@ -0,0 +1,51 @@
+import errno
+import subprocess
+import sys
+
+from ._core import Process
+
+
+class PsNotAvailable(EnvironmentError):
+ pass
+
+
+def iter_process_parents(pid, max_depth=10):
+ """Try to look up the process tree via the output of `ps`."""
+ try:
+ cmd = ["ps", "-ww", "-o", "pid=", "-o", "ppid=", "-o", "args="]
+ output = subprocess.check_output(cmd)
+ except OSError as e: # Python 2-compatible FileNotFoundError.
+ if e.errno != errno.ENOENT:
+ raise
+ raise PsNotAvailable("ps not found")
+ except subprocess.CalledProcessError as e:
+ # `ps` can return 1 if the process list is completely empty.
+ # (sarugaku/shellingham#15)
+ if not e.output.strip():
+ return
+ raise
+ if not isinstance(output, str):
+ encoding = sys.getfilesystemencoding() or sys.getdefaultencoding()
+ output = output.decode(encoding)
+
+ processes_mapping = {}
+ for line in output.split("\n"):
+ try:
+ _pid, ppid, args = line.strip().split(None, 2)
+ # XXX: This is not right, but we are really out of options.
+ # ps does not offer a sane way to decode the argument display,
+ # and this is "Good Enough" for obtaining shell names. Hopefully
+ # people don't name their shell with a space, or have something
+ # like "/usr/bin/xonsh is uber". (sarugaku/shellingham#14)
+ args = tuple(a.strip() for a in args.split(" "))
+ except ValueError:
+ continue
+ processes_mapping[_pid] = Process(args=args, pid=_pid, ppid=ppid)
+
+ for _ in range(max_depth):
+ try:
+ process = processes_mapping[pid]
+ except KeyError:
+ return
+ yield process
+ pid = process.ppid
diff --git a/llava_next/lib/python3.10/site-packages/torchvision/transforms/v2/__init__.py b/llava_next/lib/python3.10/site-packages/torchvision/transforms/v2/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..dbc0474d307a9fc87ab35067c08771adffe9d3b6
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/torchvision/transforms/v2/__init__.py
@@ -0,0 +1,57 @@
+from torchvision.transforms import AutoAugmentPolicy, InterpolationMode # usort: skip
+
+from . import functional # usort: skip
+
+from ._transform import Transform # usort: skip
+
+from ._augment import CutMix, MixUp, RandomErasing
+from ._auto_augment import AugMix, AutoAugment, RandAugment, TrivialAugmentWide
+from ._color import (
+ ColorJitter,
+ Grayscale,
+ RandomAdjustSharpness,
+ RandomAutocontrast,
+ RandomChannelPermutation,
+ RandomEqualize,
+ RandomGrayscale,
+ RandomInvert,
+ RandomPhotometricDistort,
+ RandomPosterize,
+ RandomSolarize,
+)
+from ._container import Compose, RandomApply, RandomChoice, RandomOrder
+from ._geometry import (
+ CenterCrop,
+ ElasticTransform,
+ FiveCrop,
+ Pad,
+ RandomAffine,
+ RandomCrop,
+ RandomHorizontalFlip,
+ RandomIoUCrop,
+ RandomPerspective,
+ RandomResize,
+ RandomResizedCrop,
+ RandomRotation,
+ RandomShortestSize,
+ RandomVerticalFlip,
+ RandomZoomOut,
+ Resize,
+ ScaleJitter,
+ TenCrop,
+)
+from ._meta import ClampBoundingBoxes, ConvertBoundingBoxFormat
+from ._misc import (
+ ConvertImageDtype,
+ GaussianBlur,
+ Identity,
+ Lambda,
+ LinearTransformation,
+ Normalize,
+ SanitizeBoundingBoxes,
+ ToDtype,
+)
+from ._temporal import UniformTemporalSubsample
+from ._type_conversion import PILToTensor, ToImage, ToPILImage, ToPureTensor
+
+from ._deprecated import ToTensor # usort: skip
diff --git a/llava_next/lib/python3.10/site-packages/torchvision/transforms/v2/_augment.py b/llava_next/lib/python3.10/site-packages/torchvision/transforms/v2/_augment.py
new file mode 100644
index 0000000000000000000000000000000000000000..ad7fb861be268ee7c6f205a608be90ee21b91639
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/torchvision/transforms/v2/_augment.py
@@ -0,0 +1,325 @@
+import math
+import numbers
+import warnings
+from typing import Any, Callable, Dict, List, Tuple
+
+import PIL.Image
+import torch
+from torch.nn.functional import one_hot
+from torch.utils._pytree import tree_flatten, tree_unflatten
+from torchvision import transforms as _transforms, tv_tensors
+from torchvision.transforms.v2 import functional as F
+
+from ._transform import _RandomApplyTransform, Transform
+from ._utils import _parse_labels_getter, has_any, is_pure_tensor, query_chw, query_size
+
+
+class RandomErasing(_RandomApplyTransform):
+ """[BETA] Randomly select a rectangle region in the input image or video and erase its pixels.
+
+ .. v2betastatus:: RandomErasing transform
+
+ This transform does not support PIL Image.
+ 'Random Erasing Data Augmentation' by Zhong et al. See https://arxiv.org/abs/1708.04896
+
+ Args:
+ p (float, optional): probability that the random erasing operation will be performed.
+ scale (tuple of float, optional): range of proportion of erased area against input image.
+ ratio (tuple of float, optional): range of aspect ratio of erased area.
+ value (number or tuple of numbers): erasing value. Default is 0. If a single int, it is used to
+ erase all pixels. If a tuple of length 3, it is used to erase
+ R, G, B channels respectively.
+ If a str of 'random', erasing each pixel with random values.
+ inplace (bool, optional): boolean to make this transform inplace. Default set to False.
+
+ Returns:
+ Erased input.
+
+ Example:
+ >>> from torchvision.transforms import v2 as transforms
+ >>>
+ >>> transform = transforms.Compose([
+ >>> transforms.RandomHorizontalFlip(),
+ >>> transforms.PILToTensor(),
+ >>> transforms.ConvertImageDtype(torch.float),
+ >>> transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)),
+ >>> transforms.RandomErasing(),
+ >>> ])
+ """
+
+ _v1_transform_cls = _transforms.RandomErasing
+
+ def _extract_params_for_v1_transform(self) -> Dict[str, Any]:
+ return dict(
+ super()._extract_params_for_v1_transform(),
+ value="random" if self.value is None else self.value,
+ )
+
+ def __init__(
+ self,
+ p: float = 0.5,
+ scale: Tuple[float, float] = (0.02, 0.33),
+ ratio: Tuple[float, float] = (0.3, 3.3),
+ value: float = 0.0,
+ inplace: bool = False,
+ ):
+ super().__init__(p=p)
+ if not isinstance(value, (numbers.Number, str, tuple, list)):
+ raise TypeError("Argument value should be either a number or str or a sequence")
+ if isinstance(value, str) and value != "random":
+ raise ValueError("If value is str, it should be 'random'")
+ if not isinstance(scale, (tuple, list)):
+ raise TypeError("Scale should be a sequence")
+ if not isinstance(ratio, (tuple, list)):
+ raise TypeError("Ratio should be a sequence")
+ if (scale[0] > scale[1]) or (ratio[0] > ratio[1]):
+ warnings.warn("Scale and ratio should be of kind (min, max)")
+ if scale[0] < 0 or scale[1] > 1:
+ raise ValueError("Scale should be between 0 and 1")
+ self.scale = scale
+ self.ratio = ratio
+ if isinstance(value, (int, float)):
+ self.value = [float(value)]
+ elif isinstance(value, str):
+ self.value = None
+ elif isinstance(value, (list, tuple)):
+ self.value = [float(v) for v in value]
+ else:
+ self.value = value
+ self.inplace = inplace
+
+ self._log_ratio = torch.log(torch.tensor(self.ratio))
+
+ def _call_kernel(self, functional: Callable, inpt: Any, *args: Any, **kwargs: Any) -> Any:
+ if isinstance(inpt, (tv_tensors.BoundingBoxes, tv_tensors.Mask)):
+ warnings.warn(
+ f"{type(self).__name__}() is currently passing through inputs of type "
+ f"tv_tensors.{type(inpt).__name__}. This will likely change in the future."
+ )
+ return super()._call_kernel(functional, inpt, *args, **kwargs)
+
+ def _get_params(self, flat_inputs: List[Any]) -> Dict[str, Any]:
+ img_c, img_h, img_w = query_chw(flat_inputs)
+
+ if self.value is not None and not (len(self.value) in (1, img_c)):
+ raise ValueError(
+ f"If value is a sequence, it should have either a single value or {img_c} (number of inpt channels)"
+ )
+
+ area = img_h * img_w
+
+ log_ratio = self._log_ratio
+ for _ in range(10):
+ erase_area = area * torch.empty(1).uniform_(self.scale[0], self.scale[1]).item()
+ aspect_ratio = torch.exp(
+ torch.empty(1).uniform_(
+ log_ratio[0], # type: ignore[arg-type]
+ log_ratio[1], # type: ignore[arg-type]
+ )
+ ).item()
+
+ h = int(round(math.sqrt(erase_area * aspect_ratio)))
+ w = int(round(math.sqrt(erase_area / aspect_ratio)))
+ if not (h < img_h and w < img_w):
+ continue
+
+ if self.value is None:
+ v = torch.empty([img_c, h, w], dtype=torch.float32).normal_()
+ else:
+ v = torch.tensor(self.value)[:, None, None]
+
+ i = torch.randint(0, img_h - h + 1, size=(1,)).item()
+ j = torch.randint(0, img_w - w + 1, size=(1,)).item()
+ break
+ else:
+ i, j, h, w, v = 0, 0, img_h, img_w, None
+
+ return dict(i=i, j=j, h=h, w=w, v=v)
+
+ def _transform(self, inpt: Any, params: Dict[str, Any]) -> Any:
+ if params["v"] is not None:
+ inpt = self._call_kernel(F.erase, inpt, **params, inplace=self.inplace)
+
+ return inpt
+
+
+class _BaseMixUpCutMix(Transform):
+ def __init__(self, *, alpha: float = 1.0, num_classes: int, labels_getter="default") -> None:
+ super().__init__()
+ self.alpha = float(alpha)
+ self._dist = torch.distributions.Beta(torch.tensor([alpha]), torch.tensor([alpha]))
+
+ self.num_classes = num_classes
+
+ self._labels_getter = _parse_labels_getter(labels_getter)
+
+ def forward(self, *inputs):
+ inputs = inputs if len(inputs) > 1 else inputs[0]
+ flat_inputs, spec = tree_flatten(inputs)
+ needs_transform_list = self._needs_transform_list(flat_inputs)
+
+ if has_any(flat_inputs, PIL.Image.Image, tv_tensors.BoundingBoxes, tv_tensors.Mask):
+ raise ValueError(f"{type(self).__name__}() does not support PIL images, bounding boxes and masks.")
+
+ labels = self._labels_getter(inputs)
+ if not isinstance(labels, torch.Tensor):
+ raise ValueError(f"The labels must be a tensor, but got {type(labels)} instead.")
+ elif labels.ndim != 1:
+ raise ValueError(
+ f"labels tensor should be of shape (batch_size,) " f"but got shape {labels.shape} instead."
+ )
+
+ params = {
+ "labels": labels,
+ "batch_size": labels.shape[0],
+ **self._get_params(
+ [inpt for (inpt, needs_transform) in zip(flat_inputs, needs_transform_list) if needs_transform]
+ ),
+ }
+
+ # By default, the labels will be False inside needs_transform_list, since they are a torch.Tensor coming
+ # after an image or video. However, we need to handle them in _transform, so we make sure to set them to True
+ needs_transform_list[next(idx for idx, inpt in enumerate(flat_inputs) if inpt is labels)] = True
+ flat_outputs = [
+ self._transform(inpt, params) if needs_transform else inpt
+ for (inpt, needs_transform) in zip(flat_inputs, needs_transform_list)
+ ]
+
+ return tree_unflatten(flat_outputs, spec)
+
+ def _check_image_or_video(self, inpt: torch.Tensor, *, batch_size: int):
+ expected_num_dims = 5 if isinstance(inpt, tv_tensors.Video) else 4
+ if inpt.ndim != expected_num_dims:
+ raise ValueError(
+ f"Expected a batched input with {expected_num_dims} dims, but got {inpt.ndim} dimensions instead."
+ )
+ if inpt.shape[0] != batch_size:
+ raise ValueError(
+ f"The batch size of the image or video does not match the batch size of the labels: "
+ f"{inpt.shape[0]} != {batch_size}."
+ )
+
+ def _mixup_label(self, label: torch.Tensor, *, lam: float) -> torch.Tensor:
+ label = one_hot(label, num_classes=self.num_classes)
+ if not label.dtype.is_floating_point:
+ label = label.float()
+ return label.roll(1, 0).mul_(1.0 - lam).add_(label.mul(lam))
+
+
+class MixUp(_BaseMixUpCutMix):
+ """[BETA] Apply MixUp to the provided batch of images and labels.
+
+ .. v2betastatus:: MixUp transform
+
+ Paper: `mixup: Beyond Empirical Risk Minimization `_.
+
+ .. note::
+ This transform is meant to be used on **batches** of samples, not
+ individual images. See
+ :ref:`sphx_glr_auto_examples_transforms_plot_cutmix_mixup.py` for detailed usage
+ examples.
+ The sample pairing is deterministic and done by matching consecutive
+ samples in the batch, so the batch needs to be shuffled (this is an
+ implementation detail, not a guaranteed convention.)
+
+ In the input, the labels are expected to be a tensor of shape ``(batch_size,)``. They will be transformed
+ into a tensor of shape ``(batch_size, num_classes)``.
+
+ Args:
+ alpha (float, optional): hyperparameter of the Beta distribution used for mixup. Default is 1.
+ num_classes (int): number of classes in the batch. Used for one-hot-encoding.
+ labels_getter (callable or "default", optional): indicates how to identify the labels in the input.
+ By default, this will pick the second parameter as the labels if it's a tensor. This covers the most
+ common scenario where this transform is called as ``MixUp()(imgs_batch, labels_batch)``.
+ It can also be a callable that takes the same input as the transform, and returns the labels.
+ """
+
+ def _get_params(self, flat_inputs: List[Any]) -> Dict[str, Any]:
+ return dict(lam=float(self._dist.sample(()))) # type: ignore[arg-type]
+
+ def _transform(self, inpt: Any, params: Dict[str, Any]) -> Any:
+ lam = params["lam"]
+
+ if inpt is params["labels"]:
+ return self._mixup_label(inpt, lam=lam)
+ elif isinstance(inpt, (tv_tensors.Image, tv_tensors.Video)) or is_pure_tensor(inpt):
+ self._check_image_or_video(inpt, batch_size=params["batch_size"])
+
+ output = inpt.roll(1, 0).mul_(1.0 - lam).add_(inpt.mul(lam))
+
+ if isinstance(inpt, (tv_tensors.Image, tv_tensors.Video)):
+ output = tv_tensors.wrap(output, like=inpt)
+
+ return output
+ else:
+ return inpt
+
+
+class CutMix(_BaseMixUpCutMix):
+ """[BETA] Apply CutMix to the provided batch of images and labels.
+
+ .. v2betastatus:: CutMix transform
+
+ Paper: `CutMix: Regularization Strategy to Train Strong Classifiers with Localizable Features
+ `_.
+
+ .. note::
+ This transform is meant to be used on **batches** of samples, not
+ individual images. See
+ :ref:`sphx_glr_auto_examples_transforms_plot_cutmix_mixup.py` for detailed usage
+ examples.
+ The sample pairing is deterministic and done by matching consecutive
+ samples in the batch, so the batch needs to be shuffled (this is an
+ implementation detail, not a guaranteed convention.)
+
+ In the input, the labels are expected to be a tensor of shape ``(batch_size,)``. They will be transformed
+ into a tensor of shape ``(batch_size, num_classes)``.
+
+ Args:
+ alpha (float, optional): hyperparameter of the Beta distribution used for mixup. Default is 1.
+ num_classes (int): number of classes in the batch. Used for one-hot-encoding.
+ labels_getter (callable or "default", optional): indicates how to identify the labels in the input.
+ By default, this will pick the second parameter as the labels if it's a tensor. This covers the most
+ common scenario where this transform is called as ``CutMix()(imgs_batch, labels_batch)``.
+ It can also be a callable that takes the same input as the transform, and returns the labels.
+ """
+
+ def _get_params(self, flat_inputs: List[Any]) -> Dict[str, Any]:
+ lam = float(self._dist.sample(())) # type: ignore[arg-type]
+
+ H, W = query_size(flat_inputs)
+
+ r_x = torch.randint(W, size=(1,))
+ r_y = torch.randint(H, size=(1,))
+
+ r = 0.5 * math.sqrt(1.0 - lam)
+ r_w_half = int(r * W)
+ r_h_half = int(r * H)
+
+ x1 = int(torch.clamp(r_x - r_w_half, min=0))
+ y1 = int(torch.clamp(r_y - r_h_half, min=0))
+ x2 = int(torch.clamp(r_x + r_w_half, max=W))
+ y2 = int(torch.clamp(r_y + r_h_half, max=H))
+ box = (x1, y1, x2, y2)
+
+ lam_adjusted = float(1.0 - (x2 - x1) * (y2 - y1) / (W * H))
+
+ return dict(box=box, lam_adjusted=lam_adjusted)
+
+ def _transform(self, inpt: Any, params: Dict[str, Any]) -> Any:
+ if inpt is params["labels"]:
+ return self._mixup_label(inpt, lam=params["lam_adjusted"])
+ elif isinstance(inpt, (tv_tensors.Image, tv_tensors.Video)) or is_pure_tensor(inpt):
+ self._check_image_or_video(inpt, batch_size=params["batch_size"])
+
+ x1, y1, x2, y2 = params["box"]
+ rolled = inpt.roll(1, 0)
+ output = inpt.clone()
+ output[..., y1:y2, x1:x2] = rolled[..., y1:y2, x1:x2]
+
+ if isinstance(inpt, (tv_tensors.Image, tv_tensors.Video)):
+ output = tv_tensors.wrap(output, like=inpt)
+
+ return output
+ else:
+ return inpt
diff --git a/llava_next/lib/python3.10/site-packages/torchvision/transforms/v2/_auto_augment.py b/llava_next/lib/python3.10/site-packages/torchvision/transforms/v2/_auto_augment.py
new file mode 100644
index 0000000000000000000000000000000000000000..8ddd5aacdc396a357723380e0fd2493cb9ce5cb7
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/torchvision/transforms/v2/_auto_augment.py
@@ -0,0 +1,635 @@
+import math
+from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union
+
+import PIL.Image
+import torch
+
+from torch.utils._pytree import tree_flatten, tree_unflatten, TreeSpec
+from torchvision import transforms as _transforms, tv_tensors
+from torchvision.transforms import _functional_tensor as _FT
+from torchvision.transforms.v2 import AutoAugmentPolicy, functional as F, InterpolationMode, Transform
+from torchvision.transforms.v2.functional._geometry import _check_interpolation
+from torchvision.transforms.v2.functional._meta import get_size
+from torchvision.transforms.v2.functional._utils import _FillType, _FillTypeJIT
+
+from ._utils import _get_fill, _setup_fill_arg, check_type, is_pure_tensor
+
+
+ImageOrVideo = Union[torch.Tensor, PIL.Image.Image, tv_tensors.Image, tv_tensors.Video]
+
+
+class _AutoAugmentBase(Transform):
+ def __init__(
+ self,
+ *,
+ interpolation: Union[InterpolationMode, int] = InterpolationMode.NEAREST,
+ fill: Union[_FillType, Dict[Union[Type, str], _FillType]] = None,
+ ) -> None:
+ super().__init__()
+ self.interpolation = _check_interpolation(interpolation)
+ self.fill = fill
+ self._fill = _setup_fill_arg(fill)
+
+ def _extract_params_for_v1_transform(self) -> Dict[str, Any]:
+ params = super()._extract_params_for_v1_transform()
+
+ if isinstance(params["fill"], dict):
+ raise ValueError(f"{type(self).__name__}() can not be scripted for when `fill` is a dictionary.")
+
+ return params
+
+ def _get_random_item(self, dct: Dict[str, Tuple[Callable, bool]]) -> Tuple[str, Tuple[Callable, bool]]:
+ keys = tuple(dct.keys())
+ key = keys[int(torch.randint(len(keys), ()))]
+ return key, dct[key]
+
+ def _flatten_and_extract_image_or_video(
+ self,
+ inputs: Any,
+ unsupported_types: Tuple[Type, ...] = (tv_tensors.BoundingBoxes, tv_tensors.Mask),
+ ) -> Tuple[Tuple[List[Any], TreeSpec, int], ImageOrVideo]:
+ flat_inputs, spec = tree_flatten(inputs if len(inputs) > 1 else inputs[0])
+ needs_transform_list = self._needs_transform_list(flat_inputs)
+
+ image_or_videos = []
+ for idx, (inpt, needs_transform) in enumerate(zip(flat_inputs, needs_transform_list)):
+ if needs_transform and check_type(
+ inpt,
+ (
+ tv_tensors.Image,
+ PIL.Image.Image,
+ is_pure_tensor,
+ tv_tensors.Video,
+ ),
+ ):
+ image_or_videos.append((idx, inpt))
+ elif isinstance(inpt, unsupported_types):
+ raise TypeError(f"Inputs of type {type(inpt).__name__} are not supported by {type(self).__name__}()")
+
+ if not image_or_videos:
+ raise TypeError("Found no image in the sample.")
+ if len(image_or_videos) > 1:
+ raise TypeError(
+ f"Auto augment transformations are only properly defined for a single image or video, "
+ f"but found {len(image_or_videos)}."
+ )
+
+ idx, image_or_video = image_or_videos[0]
+ return (flat_inputs, spec, idx), image_or_video
+
+ def _unflatten_and_insert_image_or_video(
+ self,
+ flat_inputs_with_spec: Tuple[List[Any], TreeSpec, int],
+ image_or_video: ImageOrVideo,
+ ) -> Any:
+ flat_inputs, spec, idx = flat_inputs_with_spec
+ flat_inputs[idx] = image_or_video
+ return tree_unflatten(flat_inputs, spec)
+
+ def _apply_image_or_video_transform(
+ self,
+ image: ImageOrVideo,
+ transform_id: str,
+ magnitude: float,
+ interpolation: Union[InterpolationMode, int],
+ fill: Dict[Union[Type, str], _FillTypeJIT],
+ ) -> ImageOrVideo:
+ fill_ = _get_fill(fill, type(image))
+
+ if transform_id == "Identity":
+ return image
+ elif transform_id == "ShearX":
+ # magnitude should be arctan(magnitude)
+ # official autoaug: (1, level, 0, 0, 1, 0)
+ # https://github.com/tensorflow/models/blob/dd02069717128186b88afa8d857ce57d17957f03/research/autoaugment/augmentation_transforms.py#L290
+ # compared to
+ # torchvision: (1, tan(level), 0, 0, 1, 0)
+ # https://github.com/pytorch/vision/blob/0c2373d0bba3499e95776e7936e207d8a1676e65/torchvision/transforms/functional.py#L976
+ return F.affine(
+ image,
+ angle=0.0,
+ translate=[0, 0],
+ scale=1.0,
+ shear=[math.degrees(math.atan(magnitude)), 0.0],
+ interpolation=interpolation,
+ fill=fill_,
+ center=[0, 0],
+ )
+ elif transform_id == "ShearY":
+ # magnitude should be arctan(magnitude)
+ # See above
+ return F.affine(
+ image,
+ angle=0.0,
+ translate=[0, 0],
+ scale=1.0,
+ shear=[0.0, math.degrees(math.atan(magnitude))],
+ interpolation=interpolation,
+ fill=fill_,
+ center=[0, 0],
+ )
+ elif transform_id == "TranslateX":
+ return F.affine(
+ image,
+ angle=0.0,
+ translate=[int(magnitude), 0],
+ scale=1.0,
+ interpolation=interpolation,
+ shear=[0.0, 0.0],
+ fill=fill_,
+ )
+ elif transform_id == "TranslateY":
+ return F.affine(
+ image,
+ angle=0.0,
+ translate=[0, int(magnitude)],
+ scale=1.0,
+ interpolation=interpolation,
+ shear=[0.0, 0.0],
+ fill=fill_,
+ )
+ elif transform_id == "Rotate":
+ return F.rotate(image, angle=magnitude, interpolation=interpolation, fill=fill_)
+ elif transform_id == "Brightness":
+ return F.adjust_brightness(image, brightness_factor=1.0 + magnitude)
+ elif transform_id == "Color":
+ return F.adjust_saturation(image, saturation_factor=1.0 + magnitude)
+ elif transform_id == "Contrast":
+ return F.adjust_contrast(image, contrast_factor=1.0 + magnitude)
+ elif transform_id == "Sharpness":
+ return F.adjust_sharpness(image, sharpness_factor=1.0 + magnitude)
+ elif transform_id == "Posterize":
+ return F.posterize(image, bits=int(magnitude))
+ elif transform_id == "Solarize":
+ bound = _FT._max_value(image.dtype) if isinstance(image, torch.Tensor) else 255.0
+ return F.solarize(image, threshold=bound * magnitude)
+ elif transform_id == "AutoContrast":
+ return F.autocontrast(image)
+ elif transform_id == "Equalize":
+ return F.equalize(image)
+ elif transform_id == "Invert":
+ return F.invert(image)
+ else:
+ raise ValueError(f"No transform available for {transform_id}")
+
+
+class AutoAugment(_AutoAugmentBase):
+ r"""[BETA] AutoAugment data augmentation method based on
+ `"AutoAugment: Learning Augmentation Strategies from Data" `_.
+
+ .. v2betastatus:: AutoAugment transform
+
+ This transformation works on images and videos only.
+
+ If the input is :class:`torch.Tensor`, it should be of type ``torch.uint8``, and it is expected
+ to have [..., 1 or 3, H, W] shape, where ... means an arbitrary number of leading dimensions.
+ If img is PIL Image, it is expected to be in mode "L" or "RGB".
+
+ Args:
+ policy (AutoAugmentPolicy, optional): Desired policy enum defined by
+ :class:`torchvision.transforms.autoaugment.AutoAugmentPolicy`. Default is ``AutoAugmentPolicy.IMAGENET``.
+ interpolation (InterpolationMode, optional): Desired interpolation enum defined by
+ :class:`torchvision.transforms.InterpolationMode`. Default is ``InterpolationMode.NEAREST``.
+ If input is Tensor, only ``InterpolationMode.NEAREST``, ``InterpolationMode.BILINEAR`` are supported.
+ fill (sequence or number, optional): Pixel fill value for the area outside the transformed
+ image. If given a number, the value is used for all bands respectively.
+ """
+ _v1_transform_cls = _transforms.AutoAugment
+
+ _AUGMENTATION_SPACE = {
+ "ShearX": (lambda num_bins, height, width: torch.linspace(0.0, 0.3, num_bins), True),
+ "ShearY": (lambda num_bins, height, width: torch.linspace(0.0, 0.3, num_bins), True),
+ "TranslateX": (
+ lambda num_bins, height, width: torch.linspace(0.0, 150.0 / 331.0 * width, num_bins),
+ True,
+ ),
+ "TranslateY": (
+ lambda num_bins, height, width: torch.linspace(0.0, 150.0 / 331.0 * height, num_bins),
+ True,
+ ),
+ "Rotate": (lambda num_bins, height, width: torch.linspace(0.0, 30.0, num_bins), True),
+ "Brightness": (lambda num_bins, height, width: torch.linspace(0.0, 0.9, num_bins), True),
+ "Color": (lambda num_bins, height, width: torch.linspace(0.0, 0.9, num_bins), True),
+ "Contrast": (lambda num_bins, height, width: torch.linspace(0.0, 0.9, num_bins), True),
+ "Sharpness": (lambda num_bins, height, width: torch.linspace(0.0, 0.9, num_bins), True),
+ "Posterize": (
+ lambda num_bins, height, width: (8 - (torch.arange(num_bins) / ((num_bins - 1) / 4))).round().int(),
+ False,
+ ),
+ "Solarize": (lambda num_bins, height, width: torch.linspace(1.0, 0.0, num_bins), False),
+ "AutoContrast": (lambda num_bins, height, width: None, False),
+ "Equalize": (lambda num_bins, height, width: None, False),
+ "Invert": (lambda num_bins, height, width: None, False),
+ }
+
+ def __init__(
+ self,
+ policy: AutoAugmentPolicy = AutoAugmentPolicy.IMAGENET,
+ interpolation: Union[InterpolationMode, int] = InterpolationMode.NEAREST,
+ fill: Union[_FillType, Dict[Union[Type, str], _FillType]] = None,
+ ) -> None:
+ super().__init__(interpolation=interpolation, fill=fill)
+ self.policy = policy
+ self._policies = self._get_policies(policy)
+
+ def _get_policies(
+ self, policy: AutoAugmentPolicy
+ ) -> List[Tuple[Tuple[str, float, Optional[int]], Tuple[str, float, Optional[int]]]]:
+ if policy == AutoAugmentPolicy.IMAGENET:
+ return [
+ (("Posterize", 0.4, 8), ("Rotate", 0.6, 9)),
+ (("Solarize", 0.6, 5), ("AutoContrast", 0.6, None)),
+ (("Equalize", 0.8, None), ("Equalize", 0.6, None)),
+ (("Posterize", 0.6, 7), ("Posterize", 0.6, 6)),
+ (("Equalize", 0.4, None), ("Solarize", 0.2, 4)),
+ (("Equalize", 0.4, None), ("Rotate", 0.8, 8)),
+ (("Solarize", 0.6, 3), ("Equalize", 0.6, None)),
+ (("Posterize", 0.8, 5), ("Equalize", 1.0, None)),
+ (("Rotate", 0.2, 3), ("Solarize", 0.6, 8)),
+ (("Equalize", 0.6, None), ("Posterize", 0.4, 6)),
+ (("Rotate", 0.8, 8), ("Color", 0.4, 0)),
+ (("Rotate", 0.4, 9), ("Equalize", 0.6, None)),
+ (("Equalize", 0.0, None), ("Equalize", 0.8, None)),
+ (("Invert", 0.6, None), ("Equalize", 1.0, None)),
+ (("Color", 0.6, 4), ("Contrast", 1.0, 8)),
+ (("Rotate", 0.8, 8), ("Color", 1.0, 2)),
+ (("Color", 0.8, 8), ("Solarize", 0.8, 7)),
+ (("Sharpness", 0.4, 7), ("Invert", 0.6, None)),
+ (("ShearX", 0.6, 5), ("Equalize", 1.0, None)),
+ (("Color", 0.4, 0), ("Equalize", 0.6, None)),
+ (("Equalize", 0.4, None), ("Solarize", 0.2, 4)),
+ (("Solarize", 0.6, 5), ("AutoContrast", 0.6, None)),
+ (("Invert", 0.6, None), ("Equalize", 1.0, None)),
+ (("Color", 0.6, 4), ("Contrast", 1.0, 8)),
+ (("Equalize", 0.8, None), ("Equalize", 0.6, None)),
+ ]
+ elif policy == AutoAugmentPolicy.CIFAR10:
+ return [
+ (("Invert", 0.1, None), ("Contrast", 0.2, 6)),
+ (("Rotate", 0.7, 2), ("TranslateX", 0.3, 9)),
+ (("Sharpness", 0.8, 1), ("Sharpness", 0.9, 3)),
+ (("ShearY", 0.5, 8), ("TranslateY", 0.7, 9)),
+ (("AutoContrast", 0.5, None), ("Equalize", 0.9, None)),
+ (("ShearY", 0.2, 7), ("Posterize", 0.3, 7)),
+ (("Color", 0.4, 3), ("Brightness", 0.6, 7)),
+ (("Sharpness", 0.3, 9), ("Brightness", 0.7, 9)),
+ (("Equalize", 0.6, None), ("Equalize", 0.5, None)),
+ (("Contrast", 0.6, 7), ("Sharpness", 0.6, 5)),
+ (("Color", 0.7, 7), ("TranslateX", 0.5, 8)),
+ (("Equalize", 0.3, None), ("AutoContrast", 0.4, None)),
+ (("TranslateY", 0.4, 3), ("Sharpness", 0.2, 6)),
+ (("Brightness", 0.9, 6), ("Color", 0.2, 8)),
+ (("Solarize", 0.5, 2), ("Invert", 0.0, None)),
+ (("Equalize", 0.2, None), ("AutoContrast", 0.6, None)),
+ (("Equalize", 0.2, None), ("Equalize", 0.6, None)),
+ (("Color", 0.9, 9), ("Equalize", 0.6, None)),
+ (("AutoContrast", 0.8, None), ("Solarize", 0.2, 8)),
+ (("Brightness", 0.1, 3), ("Color", 0.7, 0)),
+ (("Solarize", 0.4, 5), ("AutoContrast", 0.9, None)),
+ (("TranslateY", 0.9, 9), ("TranslateY", 0.7, 9)),
+ (("AutoContrast", 0.9, None), ("Solarize", 0.8, 3)),
+ (("Equalize", 0.8, None), ("Invert", 0.1, None)),
+ (("TranslateY", 0.7, 9), ("AutoContrast", 0.9, None)),
+ ]
+ elif policy == AutoAugmentPolicy.SVHN:
+ return [
+ (("ShearX", 0.9, 4), ("Invert", 0.2, None)),
+ (("ShearY", 0.9, 8), ("Invert", 0.7, None)),
+ (("Equalize", 0.6, None), ("Solarize", 0.6, 6)),
+ (("Invert", 0.9, None), ("Equalize", 0.6, None)),
+ (("Equalize", 0.6, None), ("Rotate", 0.9, 3)),
+ (("ShearX", 0.9, 4), ("AutoContrast", 0.8, None)),
+ (("ShearY", 0.9, 8), ("Invert", 0.4, None)),
+ (("ShearY", 0.9, 5), ("Solarize", 0.2, 6)),
+ (("Invert", 0.9, None), ("AutoContrast", 0.8, None)),
+ (("Equalize", 0.6, None), ("Rotate", 0.9, 3)),
+ (("ShearX", 0.9, 4), ("Solarize", 0.3, 3)),
+ (("ShearY", 0.8, 8), ("Invert", 0.7, None)),
+ (("Equalize", 0.9, None), ("TranslateY", 0.6, 6)),
+ (("Invert", 0.9, None), ("Equalize", 0.6, None)),
+ (("Contrast", 0.3, 3), ("Rotate", 0.8, 4)),
+ (("Invert", 0.8, None), ("TranslateY", 0.0, 2)),
+ (("ShearY", 0.7, 6), ("Solarize", 0.4, 8)),
+ (("Invert", 0.6, None), ("Rotate", 0.8, 4)),
+ (("ShearY", 0.3, 7), ("TranslateX", 0.9, 3)),
+ (("ShearX", 0.1, 6), ("Invert", 0.6, None)),
+ (("Solarize", 0.7, 2), ("TranslateY", 0.6, 7)),
+ (("ShearY", 0.8, 4), ("Invert", 0.8, None)),
+ (("ShearX", 0.7, 9), ("TranslateY", 0.8, 3)),
+ (("ShearY", 0.8, 5), ("AutoContrast", 0.7, None)),
+ (("ShearX", 0.7, 2), ("Invert", 0.1, None)),
+ ]
+ else:
+ raise ValueError(f"The provided policy {policy} is not recognized.")
+
+ def forward(self, *inputs: Any) -> Any:
+ flat_inputs_with_spec, image_or_video = self._flatten_and_extract_image_or_video(inputs)
+ height, width = get_size(image_or_video)
+
+ policy = self._policies[int(torch.randint(len(self._policies), ()))]
+
+ for transform_id, probability, magnitude_idx in policy:
+ if not torch.rand(()) <= probability:
+ continue
+
+ magnitudes_fn, signed = self._AUGMENTATION_SPACE[transform_id]
+
+ magnitudes = magnitudes_fn(10, height, width)
+ if magnitudes is not None:
+ magnitude = float(magnitudes[magnitude_idx])
+ if signed and torch.rand(()) <= 0.5:
+ magnitude *= -1
+ else:
+ magnitude = 0.0
+
+ image_or_video = self._apply_image_or_video_transform(
+ image_or_video, transform_id, magnitude, interpolation=self.interpolation, fill=self._fill
+ )
+
+ return self._unflatten_and_insert_image_or_video(flat_inputs_with_spec, image_or_video)
+
+
+class RandAugment(_AutoAugmentBase):
+ r"""[BETA] RandAugment data augmentation method based on
+ `"RandAugment: Practical automated data augmentation with a reduced search space"
+ `_.
+
+ .. v2betastatus:: RandAugment transform
+
+ This transformation works on images and videos only.
+
+ If the input is :class:`torch.Tensor`, it should be of type ``torch.uint8``, and it is expected
+ to have [..., 1 or 3, H, W] shape, where ... means an arbitrary number of leading dimensions.
+ If img is PIL Image, it is expected to be in mode "L" or "RGB".
+
+ Args:
+ num_ops (int, optional): Number of augmentation transformations to apply sequentially.
+ magnitude (int, optional): Magnitude for all the transformations.
+ num_magnitude_bins (int, optional): The number of different magnitude values.
+ interpolation (InterpolationMode, optional): Desired interpolation enum defined by
+ :class:`torchvision.transforms.InterpolationMode`. Default is ``InterpolationMode.NEAREST``.
+ If input is Tensor, only ``InterpolationMode.NEAREST``, ``InterpolationMode.BILINEAR`` are supported.
+ fill (sequence or number, optional): Pixel fill value for the area outside the transformed
+ image. If given a number, the value is used for all bands respectively.
+ """
+
+ _v1_transform_cls = _transforms.RandAugment
+ _AUGMENTATION_SPACE = {
+ "Identity": (lambda num_bins, height, width: None, False),
+ "ShearX": (lambda num_bins, height, width: torch.linspace(0.0, 0.3, num_bins), True),
+ "ShearY": (lambda num_bins, height, width: torch.linspace(0.0, 0.3, num_bins), True),
+ "TranslateX": (
+ lambda num_bins, height, width: torch.linspace(0.0, 150.0 / 331.0 * width, num_bins),
+ True,
+ ),
+ "TranslateY": (
+ lambda num_bins, height, width: torch.linspace(0.0, 150.0 / 331.0 * height, num_bins),
+ True,
+ ),
+ "Rotate": (lambda num_bins, height, width: torch.linspace(0.0, 30.0, num_bins), True),
+ "Brightness": (lambda num_bins, height, width: torch.linspace(0.0, 0.9, num_bins), True),
+ "Color": (lambda num_bins, height, width: torch.linspace(0.0, 0.9, num_bins), True),
+ "Contrast": (lambda num_bins, height, width: torch.linspace(0.0, 0.9, num_bins), True),
+ "Sharpness": (lambda num_bins, height, width: torch.linspace(0.0, 0.9, num_bins), True),
+ "Posterize": (
+ lambda num_bins, height, width: (8 - (torch.arange(num_bins) / ((num_bins - 1) / 4))).round().int(),
+ False,
+ ),
+ "Solarize": (lambda num_bins, height, width: torch.linspace(1.0, 0.0, num_bins), False),
+ "AutoContrast": (lambda num_bins, height, width: None, False),
+ "Equalize": (lambda num_bins, height, width: None, False),
+ }
+
+ def __init__(
+ self,
+ num_ops: int = 2,
+ magnitude: int = 9,
+ num_magnitude_bins: int = 31,
+ interpolation: Union[InterpolationMode, int] = InterpolationMode.NEAREST,
+ fill: Union[_FillType, Dict[Union[Type, str], _FillType]] = None,
+ ) -> None:
+ super().__init__(interpolation=interpolation, fill=fill)
+ self.num_ops = num_ops
+ self.magnitude = magnitude
+ self.num_magnitude_bins = num_magnitude_bins
+
+ def forward(self, *inputs: Any) -> Any:
+ flat_inputs_with_spec, image_or_video = self._flatten_and_extract_image_or_video(inputs)
+ height, width = get_size(image_or_video)
+
+ for _ in range(self.num_ops):
+ transform_id, (magnitudes_fn, signed) = self._get_random_item(self._AUGMENTATION_SPACE)
+ magnitudes = magnitudes_fn(self.num_magnitude_bins, height, width)
+ if magnitudes is not None:
+ magnitude = float(magnitudes[self.magnitude])
+ if signed and torch.rand(()) <= 0.5:
+ magnitude *= -1
+ else:
+ magnitude = 0.0
+ image_or_video = self._apply_image_or_video_transform(
+ image_or_video, transform_id, magnitude, interpolation=self.interpolation, fill=self._fill
+ )
+
+ return self._unflatten_and_insert_image_or_video(flat_inputs_with_spec, image_or_video)
+
+
+class TrivialAugmentWide(_AutoAugmentBase):
+ r"""[BETA] Dataset-independent data-augmentation with TrivialAugment Wide, as described in
+ `"TrivialAugment: Tuning-free Yet State-of-the-Art Data Augmentation" `_.
+
+ .. v2betastatus:: TrivialAugmentWide transform
+
+ This transformation works on images and videos only.
+
+ If the input is :class:`torch.Tensor`, it should be of type ``torch.uint8``, and it is expected
+ to have [..., 1 or 3, H, W] shape, where ... means an arbitrary number of leading dimensions.
+ If img is PIL Image, it is expected to be in mode "L" or "RGB".
+
+ Args:
+ num_magnitude_bins (int, optional): The number of different magnitude values.
+ interpolation (InterpolationMode, optional): Desired interpolation enum defined by
+ :class:`torchvision.transforms.InterpolationMode`. Default is ``InterpolationMode.NEAREST``.
+ If input is Tensor, only ``InterpolationMode.NEAREST``, ``InterpolationMode.BILINEAR`` are supported.
+ fill (sequence or number, optional): Pixel fill value for the area outside the transformed
+ image. If given a number, the value is used for all bands respectively.
+ """
+
+ _v1_transform_cls = _transforms.TrivialAugmentWide
+ _AUGMENTATION_SPACE = {
+ "Identity": (lambda num_bins, height, width: None, False),
+ "ShearX": (lambda num_bins, height, width: torch.linspace(0.0, 0.99, num_bins), True),
+ "ShearY": (lambda num_bins, height, width: torch.linspace(0.0, 0.99, num_bins), True),
+ "TranslateX": (lambda num_bins, height, width: torch.linspace(0.0, 32.0, num_bins), True),
+ "TranslateY": (lambda num_bins, height, width: torch.linspace(0.0, 32.0, num_bins), True),
+ "Rotate": (lambda num_bins, height, width: torch.linspace(0.0, 135.0, num_bins), True),
+ "Brightness": (lambda num_bins, height, width: torch.linspace(0.0, 0.99, num_bins), True),
+ "Color": (lambda num_bins, height, width: torch.linspace(0.0, 0.99, num_bins), True),
+ "Contrast": (lambda num_bins, height, width: torch.linspace(0.0, 0.99, num_bins), True),
+ "Sharpness": (lambda num_bins, height, width: torch.linspace(0.0, 0.99, num_bins), True),
+ "Posterize": (
+ lambda num_bins, height, width: (8 - (torch.arange(num_bins) / ((num_bins - 1) / 6))).round().int(),
+ False,
+ ),
+ "Solarize": (lambda num_bins, height, width: torch.linspace(1.0, 0.0, num_bins), False),
+ "AutoContrast": (lambda num_bins, height, width: None, False),
+ "Equalize": (lambda num_bins, height, width: None, False),
+ }
+
+ def __init__(
+ self,
+ num_magnitude_bins: int = 31,
+ interpolation: Union[InterpolationMode, int] = InterpolationMode.NEAREST,
+ fill: Union[_FillType, Dict[Union[Type, str], _FillType]] = None,
+ ):
+ super().__init__(interpolation=interpolation, fill=fill)
+ self.num_magnitude_bins = num_magnitude_bins
+
+ def forward(self, *inputs: Any) -> Any:
+ flat_inputs_with_spec, image_or_video = self._flatten_and_extract_image_or_video(inputs)
+ height, width = get_size(image_or_video)
+
+ transform_id, (magnitudes_fn, signed) = self._get_random_item(self._AUGMENTATION_SPACE)
+
+ magnitudes = magnitudes_fn(self.num_magnitude_bins, height, width)
+ if magnitudes is not None:
+ magnitude = float(magnitudes[int(torch.randint(self.num_magnitude_bins, ()))])
+ if signed and torch.rand(()) <= 0.5:
+ magnitude *= -1
+ else:
+ magnitude = 0.0
+
+ image_or_video = self._apply_image_or_video_transform(
+ image_or_video, transform_id, magnitude, interpolation=self.interpolation, fill=self._fill
+ )
+ return self._unflatten_and_insert_image_or_video(flat_inputs_with_spec, image_or_video)
+
+
+class AugMix(_AutoAugmentBase):
+ r"""[BETA] AugMix data augmentation method based on
+ `"AugMix: A Simple Data Processing Method to Improve Robustness and Uncertainty" `_.
+
+ .. v2betastatus:: AugMix transform
+
+ This transformation works on images and videos only.
+
+ If the input is :class:`torch.Tensor`, it should be of type ``torch.uint8``, and it is expected
+ to have [..., 1 or 3, H, W] shape, where ... means an arbitrary number of leading dimensions.
+ If img is PIL Image, it is expected to be in mode "L" or "RGB".
+
+ Args:
+ severity (int, optional): The severity of base augmentation operators. Default is ``3``.
+ mixture_width (int, optional): The number of augmentation chains. Default is ``3``.
+ chain_depth (int, optional): The depth of augmentation chains. A negative value denotes stochastic depth sampled from the interval [1, 3].
+ Default is ``-1``.
+ alpha (float, optional): The hyperparameter for the probability distributions. Default is ``1.0``.
+ all_ops (bool, optional): Use all operations (including brightness, contrast, color and sharpness). Default is ``True``.
+ interpolation (InterpolationMode, optional): Desired interpolation enum defined by
+ :class:`torchvision.transforms.InterpolationMode`. Default is ``InterpolationMode.NEAREST``.
+ If input is Tensor, only ``InterpolationMode.NEAREST``, ``InterpolationMode.BILINEAR`` are supported.
+ fill (sequence or number, optional): Pixel fill value for the area outside the transformed
+ image. If given a number, the value is used for all bands respectively.
+ """
+
+ _v1_transform_cls = _transforms.AugMix
+
+ _PARTIAL_AUGMENTATION_SPACE = {
+ "ShearX": (lambda num_bins, height, width: torch.linspace(0.0, 0.3, num_bins), True),
+ "ShearY": (lambda num_bins, height, width: torch.linspace(0.0, 0.3, num_bins), True),
+ "TranslateX": (lambda num_bins, height, width: torch.linspace(0.0, width / 3.0, num_bins), True),
+ "TranslateY": (lambda num_bins, height, width: torch.linspace(0.0, height / 3.0, num_bins), True),
+ "Rotate": (lambda num_bins, height, width: torch.linspace(0.0, 30.0, num_bins), True),
+ "Posterize": (
+ lambda num_bins, height, width: (4 - (torch.arange(num_bins) / ((num_bins - 1) / 4))).round().int(),
+ False,
+ ),
+ "Solarize": (lambda num_bins, height, width: torch.linspace(1.0, 0.0, num_bins), False),
+ "AutoContrast": (lambda num_bins, height, width: None, False),
+ "Equalize": (lambda num_bins, height, width: None, False),
+ }
+ _AUGMENTATION_SPACE: Dict[str, Tuple[Callable[[int, int, int], Optional[torch.Tensor]], bool]] = {
+ **_PARTIAL_AUGMENTATION_SPACE,
+ "Brightness": (lambda num_bins, height, width: torch.linspace(0.0, 0.9, num_bins), True),
+ "Color": (lambda num_bins, height, width: torch.linspace(0.0, 0.9, num_bins), True),
+ "Contrast": (lambda num_bins, height, width: torch.linspace(0.0, 0.9, num_bins), True),
+ "Sharpness": (lambda num_bins, height, width: torch.linspace(0.0, 0.9, num_bins), True),
+ }
+
+ def __init__(
+ self,
+ severity: int = 3,
+ mixture_width: int = 3,
+ chain_depth: int = -1,
+ alpha: float = 1.0,
+ all_ops: bool = True,
+ interpolation: Union[InterpolationMode, int] = InterpolationMode.BILINEAR,
+ fill: Union[_FillType, Dict[Union[Type, str], _FillType]] = None,
+ ) -> None:
+ super().__init__(interpolation=interpolation, fill=fill)
+ self._PARAMETER_MAX = 10
+ if not (1 <= severity <= self._PARAMETER_MAX):
+ raise ValueError(f"The severity must be between [1, {self._PARAMETER_MAX}]. Got {severity} instead.")
+ self.severity = severity
+ self.mixture_width = mixture_width
+ self.chain_depth = chain_depth
+ self.alpha = alpha
+ self.all_ops = all_ops
+
+ def _sample_dirichlet(self, params: torch.Tensor) -> torch.Tensor:
+ # Must be on a separate method so that we can overwrite it in tests.
+ return torch._sample_dirichlet(params)
+
+ def forward(self, *inputs: Any) -> Any:
+ flat_inputs_with_spec, orig_image_or_video = self._flatten_and_extract_image_or_video(inputs)
+ height, width = get_size(orig_image_or_video)
+
+ if isinstance(orig_image_or_video, torch.Tensor):
+ image_or_video = orig_image_or_video
+ else: # isinstance(inpt, PIL.Image.Image):
+ image_or_video = F.pil_to_tensor(orig_image_or_video)
+
+ augmentation_space = self._AUGMENTATION_SPACE if self.all_ops else self._PARTIAL_AUGMENTATION_SPACE
+
+ orig_dims = list(image_or_video.shape)
+ expected_ndim = 5 if isinstance(orig_image_or_video, tv_tensors.Video) else 4
+ batch = image_or_video.reshape([1] * max(expected_ndim - image_or_video.ndim, 0) + orig_dims)
+ batch_dims = [batch.size(0)] + [1] * (batch.ndim - 1)
+
+ # Sample the beta weights for combining the original and augmented image or video. To get Beta, we use a
+ # Dirichlet with 2 parameters. The 1st column stores the weights of the original and the 2nd the ones of
+ # augmented image or video.
+ m = self._sample_dirichlet(
+ torch.tensor([self.alpha, self.alpha], device=batch.device).expand(batch_dims[0], -1)
+ )
+
+ # Sample the mixing weights and combine them with the ones sampled from Beta for the augmented images or videos.
+ combined_weights = self._sample_dirichlet(
+ torch.tensor([self.alpha] * self.mixture_width, device=batch.device).expand(batch_dims[0], -1)
+ ) * m[:, 1].reshape([batch_dims[0], -1])
+
+ mix = m[:, 0].reshape(batch_dims) * batch
+ for i in range(self.mixture_width):
+ aug = batch
+ depth = self.chain_depth if self.chain_depth > 0 else int(torch.randint(low=1, high=4, size=(1,)).item())
+ for _ in range(depth):
+ transform_id, (magnitudes_fn, signed) = self._get_random_item(augmentation_space)
+
+ magnitudes = magnitudes_fn(self._PARAMETER_MAX, height, width)
+ if magnitudes is not None:
+ magnitude = float(magnitudes[int(torch.randint(self.severity, ()))])
+ if signed and torch.rand(()) <= 0.5:
+ magnitude *= -1
+ else:
+ magnitude = 0.0
+
+ aug = self._apply_image_or_video_transform(
+ aug, transform_id, magnitude, interpolation=self.interpolation, fill=self._fill
+ )
+ mix.add_(combined_weights[:, i].reshape(batch_dims) * aug)
+ mix = mix.reshape(orig_dims).to(dtype=image_or_video.dtype)
+
+ if isinstance(orig_image_or_video, (tv_tensors.Image, tv_tensors.Video)):
+ mix = tv_tensors.wrap(mix, like=orig_image_or_video)
+ elif isinstance(orig_image_or_video, PIL.Image.Image):
+ mix = F.to_pil_image(mix)
+
+ return self._unflatten_and_insert_image_or_video(flat_inputs_with_spec, mix)
diff --git a/llava_next/lib/python3.10/site-packages/torchvision/transforms/v2/_container.py b/llava_next/lib/python3.10/site-packages/torchvision/transforms/v2/_container.py
new file mode 100644
index 0000000000000000000000000000000000000000..8f591c49707b00c0bb6256ae3d74ad1f662c0bbf
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/torchvision/transforms/v2/_container.py
@@ -0,0 +1,180 @@
+from typing import Any, Callable, Dict, List, Optional, Sequence, Union
+
+import torch
+
+from torch import nn
+from torchvision import transforms as _transforms
+from torchvision.transforms.v2 import Transform
+
+
+class Compose(Transform):
+ """[BETA] Composes several transforms together.
+
+ .. v2betastatus:: Compose transform
+
+ This transform does not support torchscript.
+ Please, see the note below.
+
+ Args:
+ transforms (list of ``Transform`` objects): list of transforms to compose.
+
+ Example:
+ >>> transforms.Compose([
+ >>> transforms.CenterCrop(10),
+ >>> transforms.PILToTensor(),
+ >>> transforms.ConvertImageDtype(torch.float),
+ >>> ])
+
+ .. note::
+ In order to script the transformations, please use ``torch.nn.Sequential`` as below.
+
+ >>> transforms = torch.nn.Sequential(
+ >>> transforms.CenterCrop(10),
+ >>> transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)),
+ >>> )
+ >>> scripted_transforms = torch.jit.script(transforms)
+
+ Make sure to use only scriptable transformations, i.e. that work with ``torch.Tensor``, does not require
+ `lambda` functions or ``PIL.Image``.
+
+ """
+
+ def __init__(self, transforms: Sequence[Callable]) -> None:
+ super().__init__()
+ if not isinstance(transforms, Sequence):
+ raise TypeError("Argument transforms should be a sequence of callables")
+ elif not transforms:
+ raise ValueError("Pass at least one transform")
+ self.transforms = transforms
+
+ def forward(self, *inputs: Any) -> Any:
+ needs_unpacking = len(inputs) > 1
+ for transform in self.transforms:
+ outputs = transform(*inputs)
+ inputs = outputs if needs_unpacking else (outputs,)
+ return outputs
+
+ def extra_repr(self) -> str:
+ format_string = []
+ for t in self.transforms:
+ format_string.append(f" {t}")
+ return "\n".join(format_string)
+
+
+class RandomApply(Transform):
+ """[BETA] Apply randomly a list of transformations with a given probability.
+
+ .. v2betastatus:: RandomApply transform
+
+ .. note::
+ In order to script the transformation, please use ``torch.nn.ModuleList`` as input instead of list/tuple of
+ transforms as shown below:
+
+ >>> transforms = transforms.RandomApply(torch.nn.ModuleList([
+ >>> transforms.ColorJitter(),
+ >>> ]), p=0.3)
+ >>> scripted_transforms = torch.jit.script(transforms)
+
+ Make sure to use only scriptable transformations, i.e. that work with ``torch.Tensor``, does not require
+ `lambda` functions or ``PIL.Image``.
+
+ Args:
+ transforms (sequence or torch.nn.Module): list of transformations
+ p (float): probability of applying the list of transforms
+ """
+
+ _v1_transform_cls = _transforms.RandomApply
+
+ def __init__(self, transforms: Union[Sequence[Callable], nn.ModuleList], p: float = 0.5) -> None:
+ super().__init__()
+
+ if not isinstance(transforms, (Sequence, nn.ModuleList)):
+ raise TypeError("Argument transforms should be a sequence of callables or a `nn.ModuleList`")
+ self.transforms = transforms
+
+ if not (0.0 <= p <= 1.0):
+ raise ValueError("`p` should be a floating point value in the interval [0.0, 1.0].")
+ self.p = p
+
+ def _extract_params_for_v1_transform(self) -> Dict[str, Any]:
+ return {"transforms": self.transforms, "p": self.p}
+
+ def forward(self, *inputs: Any) -> Any:
+ sample = inputs if len(inputs) > 1 else inputs[0]
+
+ if torch.rand(1) >= self.p:
+ return sample
+
+ for transform in self.transforms:
+ sample = transform(sample)
+ return sample
+
+ def extra_repr(self) -> str:
+ format_string = []
+ for t in self.transforms:
+ format_string.append(f" {t}")
+ return "\n".join(format_string)
+
+
+class RandomChoice(Transform):
+ """[BETA] Apply single transformation randomly picked from a list.
+
+ .. v2betastatus:: RandomChoice transform
+
+ This transform does not support torchscript.
+
+ Args:
+ transforms (sequence or torch.nn.Module): list of transformations
+ p (list of floats or None, optional): probability of each transform being picked.
+ If ``p`` doesn't sum to 1, it is automatically normalized. If ``None``
+ (default), all transforms have the same probability.
+ """
+
+ def __init__(
+ self,
+ transforms: Sequence[Callable],
+ p: Optional[List[float]] = None,
+ ) -> None:
+ if not isinstance(transforms, Sequence):
+ raise TypeError("Argument transforms should be a sequence of callables")
+
+ if p is None:
+ p = [1] * len(transforms)
+ elif len(p) != len(transforms):
+ raise ValueError(f"Length of p doesn't match the number of transforms: {len(p)} != {len(transforms)}")
+
+ super().__init__()
+
+ self.transforms = transforms
+ total = sum(p)
+ self.p = [prob / total for prob in p]
+
+ def forward(self, *inputs: Any) -> Any:
+ idx = int(torch.multinomial(torch.tensor(self.p), 1))
+ transform = self.transforms[idx]
+ return transform(*inputs)
+
+
+class RandomOrder(Transform):
+ """[BETA] Apply a list of transformations in a random order.
+
+ .. v2betastatus:: RandomOrder transform
+
+ This transform does not support torchscript.
+
+ Args:
+ transforms (sequence or torch.nn.Module): list of transformations
+ """
+
+ def __init__(self, transforms: Sequence[Callable]) -> None:
+ if not isinstance(transforms, Sequence):
+ raise TypeError("Argument transforms should be a sequence of callables")
+ super().__init__()
+ self.transforms = transforms
+
+ def forward(self, *inputs: Any) -> Any:
+ sample = inputs if len(inputs) > 1 else inputs[0]
+ for idx in torch.randperm(len(self.transforms)):
+ transform = self.transforms[idx]
+ sample = transform(sample)
+ return sample
diff --git a/llava_next/lib/python3.10/site-packages/torchvision/transforms/v2/_geometry.py b/llava_next/lib/python3.10/site-packages/torchvision/transforms/v2/_geometry.py
new file mode 100644
index 0000000000000000000000000000000000000000..8d79fb6fd24b2c0a3d96c2b877d0f0299466e28b
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/torchvision/transforms/v2/_geometry.py
@@ -0,0 +1,1447 @@
+import math
+import numbers
+import warnings
+from typing import Any, Callable, cast, Dict, List, Literal, Optional, Sequence, Tuple, Type, Union
+
+import PIL.Image
+import torch
+
+from torchvision import transforms as _transforms, tv_tensors
+from torchvision.ops.boxes import box_iou
+from torchvision.transforms.functional import _get_perspective_coeffs
+from torchvision.transforms.v2 import functional as F, InterpolationMode, Transform
+from torchvision.transforms.v2.functional._geometry import _check_interpolation
+from torchvision.transforms.v2.functional._utils import _FillType
+
+from ._transform import _RandomApplyTransform
+from ._utils import (
+ _check_padding_arg,
+ _check_padding_mode_arg,
+ _check_sequence_input,
+ _get_fill,
+ _setup_angle,
+ _setup_fill_arg,
+ _setup_number_or_seq,
+ _setup_size,
+ get_bounding_boxes,
+ has_all,
+ has_any,
+ is_pure_tensor,
+ query_size,
+)
+
+
+class RandomHorizontalFlip(_RandomApplyTransform):
+ """[BETA] Horizontally flip the input with a given probability.
+
+ .. v2betastatus:: RandomHorizontalFlip transform
+
+ If the input is a :class:`torch.Tensor` or a ``TVTensor`` (e.g. :class:`~torchvision.tv_tensors.Image`,
+ :class:`~torchvision.tv_tensors.Video`, :class:`~torchvision.tv_tensors.BoundingBoxes` etc.)
+ it can have arbitrary number of leading batch dimensions. For example,
+ the image can have ``[..., C, H, W]`` shape. A bounding box can have ``[..., 4]`` shape.
+
+ Args:
+ p (float, optional): probability of the input being flipped. Default value is 0.5
+ """
+
+ _v1_transform_cls = _transforms.RandomHorizontalFlip
+
+ def _transform(self, inpt: Any, params: Dict[str, Any]) -> Any:
+ return self._call_kernel(F.horizontal_flip, inpt)
+
+
+class RandomVerticalFlip(_RandomApplyTransform):
+ """[BETA] Vertically flip the input with a given probability.
+
+ .. v2betastatus:: RandomVerticalFlip transform
+
+ If the input is a :class:`torch.Tensor` or a ``TVTensor`` (e.g. :class:`~torchvision.tv_tensors.Image`,
+ :class:`~torchvision.tv_tensors.Video`, :class:`~torchvision.tv_tensors.BoundingBoxes` etc.)
+ it can have arbitrary number of leading batch dimensions. For example,
+ the image can have ``[..., C, H, W]`` shape. A bounding box can have ``[..., 4]`` shape.
+
+ Args:
+ p (float, optional): probability of the input being flipped. Default value is 0.5
+ """
+
+ _v1_transform_cls = _transforms.RandomVerticalFlip
+
+ def _transform(self, inpt: Any, params: Dict[str, Any]) -> Any:
+ return self._call_kernel(F.vertical_flip, inpt)
+
+
+class Resize(Transform):
+ """[BETA] Resize the input to the given size.
+
+ .. v2betastatus:: Resize transform
+
+ If the input is a :class:`torch.Tensor` or a ``TVTensor`` (e.g. :class:`~torchvision.tv_tensors.Image`,
+ :class:`~torchvision.tv_tensors.Video`, :class:`~torchvision.tv_tensors.BoundingBoxes` etc.)
+ it can have arbitrary number of leading batch dimensions. For example,
+ the image can have ``[..., C, H, W]`` shape. A bounding box can have ``[..., 4]`` shape.
+
+ .. warning::
+ The output image might be different depending on its type: when downsampling, the interpolation of PIL images
+ and tensors is slightly different, because PIL applies antialiasing. This may lead to significant differences
+ in the performance of a network. Therefore, it is preferable to train and serve a model with the same input
+ types. See also below the ``antialias`` parameter, which can help making the output of PIL images and tensors
+ closer.
+
+ Args:
+ size (sequence or int): Desired output size. If size is a sequence like
+ (h, w), output size will be matched to this. If size is an int,
+ smaller edge of the image will be matched to this number.
+ i.e, if height > width, then image will be rescaled to
+ (size * height / width, size).
+
+ .. note::
+ In torchscript mode size as single int is not supported, use a sequence of length 1: ``[size, ]``.
+ interpolation (InterpolationMode, optional): Desired interpolation enum defined by
+ :class:`torchvision.transforms.InterpolationMode`. Default is ``InterpolationMode.BILINEAR``.
+ If input is Tensor, only ``InterpolationMode.NEAREST``, ``InterpolationMode.NEAREST_EXACT``,
+ ``InterpolationMode.BILINEAR`` and ``InterpolationMode.BICUBIC`` are supported.
+ The corresponding Pillow integer constants, e.g. ``PIL.Image.BILINEAR`` are accepted as well.
+ max_size (int, optional): The maximum allowed for the longer edge of
+ the resized image. If the longer edge of the image is greater
+ than ``max_size`` after being resized according to ``size``,
+ ``size`` will be overruled so that the longer edge is equal to
+ ``max_size``.
+ As a result, the smaller edge may be shorter than ``size``. This
+ is only supported if ``size`` is an int (or a sequence of length
+ 1 in torchscript mode).
+ antialias (bool, optional): Whether to apply antialiasing.
+ It only affects **tensors** with bilinear or bicubic modes and it is
+ ignored otherwise: on PIL images, antialiasing is always applied on
+ bilinear or bicubic modes; on other modes (for PIL images and
+ tensors), antialiasing makes no sense and this parameter is ignored.
+ Possible values are:
+
+ - ``True``: will apply antialiasing for bilinear or bicubic modes.
+ Other mode aren't affected. This is probably what you want to use.
+ - ``False``: will not apply antialiasing for tensors on any mode. PIL
+ images are still antialiased on bilinear or bicubic modes, because
+ PIL doesn't support no antialias.
+ - ``None``: equivalent to ``False`` for tensors and ``True`` for
+ PIL images. This value exists for legacy reasons and you probably
+ don't want to use it unless you really know what you are doing.
+
+ The current default is ``None`` **but will change to** ``True`` **in
+ v0.17** for the PIL and Tensor backends to be consistent.
+ """
+
+ _v1_transform_cls = _transforms.Resize
+
+ def __init__(
+ self,
+ size: Union[int, Sequence[int]],
+ interpolation: Union[InterpolationMode, int] = InterpolationMode.BILINEAR,
+ max_size: Optional[int] = None,
+ antialias: Optional[Union[str, bool]] = "warn",
+ ) -> None:
+ super().__init__()
+
+ if isinstance(size, int):
+ size = [size]
+ elif isinstance(size, (list, tuple)) and len(size) in {1, 2}:
+ size = list(size)
+ else:
+ raise ValueError(
+ f"size can either be an integer or a list or tuple of one or two integers, " f"but got {size} instead."
+ )
+ self.size = size
+
+ self.interpolation = _check_interpolation(interpolation)
+ self.max_size = max_size
+ self.antialias = antialias
+
+ def _transform(self, inpt: Any, params: Dict[str, Any]) -> Any:
+ return self._call_kernel(
+ F.resize,
+ inpt,
+ self.size,
+ interpolation=self.interpolation,
+ max_size=self.max_size,
+ antialias=self.antialias,
+ )
+
+
+class CenterCrop(Transform):
+ """[BETA] Crop the input at the center.
+
+ .. v2betastatus:: CenterCrop transform
+
+ If the input is a :class:`torch.Tensor` or a ``TVTensor`` (e.g. :class:`~torchvision.tv_tensors.Image`,
+ :class:`~torchvision.tv_tensors.Video`, :class:`~torchvision.tv_tensors.BoundingBoxes` etc.)
+ it can have arbitrary number of leading batch dimensions. For example,
+ the image can have ``[..., C, H, W]`` shape. A bounding box can have ``[..., 4]`` shape.
+
+ If image size is smaller than output size along any edge, image is padded with 0 and then center cropped.
+
+ Args:
+ size (sequence or int): Desired output size of the crop. If size is an
+ int instead of sequence like (h, w), a square crop (size, size) is
+ made. If provided a sequence of length 1, it will be interpreted as (size[0], size[0]).
+ """
+
+ _v1_transform_cls = _transforms.CenterCrop
+
+ def __init__(self, size: Union[int, Sequence[int]]):
+ super().__init__()
+ self.size = _setup_size(size, error_msg="Please provide only two dimensions (h, w) for size.")
+
+ def _transform(self, inpt: Any, params: Dict[str, Any]) -> Any:
+ return self._call_kernel(F.center_crop, inpt, output_size=self.size)
+
+
+class RandomResizedCrop(Transform):
+ """[BETA] Crop a random portion of the input and resize it to a given size.
+
+ .. v2betastatus:: RandomResizedCrop transform
+
+ If the input is a :class:`torch.Tensor` or a ``TVTensor`` (e.g. :class:`~torchvision.tv_tensors.Image`,
+ :class:`~torchvision.tv_tensors.Video`, :class:`~torchvision.tv_tensors.BoundingBoxes` etc.)
+ it can have arbitrary number of leading batch dimensions. For example,
+ the image can have ``[..., C, H, W]`` shape. A bounding box can have ``[..., 4]`` shape.
+
+ A crop of the original input is made: the crop has a random area (H * W)
+ and a random aspect ratio. This crop is finally resized to the given
+ size. This is popularly used to train the Inception networks.
+
+ Args:
+ size (int or sequence): expected output size of the crop, for each edge. If size is an
+ int instead of sequence like (h, w), a square output size ``(size, size)`` is
+ made. If provided a sequence of length 1, it will be interpreted as (size[0], size[0]).
+
+ .. note::
+ In torchscript mode size as single int is not supported, use a sequence of length 1: ``[size, ]``.
+ scale (tuple of float, optional): Specifies the lower and upper bounds for the random area of the crop,
+ before resizing. The scale is defined with respect to the area of the original image.
+ ratio (tuple of float, optional): lower and upper bounds for the random aspect ratio of the crop, before
+ resizing.
+ interpolation (InterpolationMode, optional): Desired interpolation enum defined by
+ :class:`torchvision.transforms.InterpolationMode`. Default is ``InterpolationMode.BILINEAR``.
+ If input is Tensor, only ``InterpolationMode.NEAREST``, ``InterpolationMode.NEAREST_EXACT``,
+ ``InterpolationMode.BILINEAR`` and ``InterpolationMode.BICUBIC`` are supported.
+ The corresponding Pillow integer constants, e.g. ``PIL.Image.BILINEAR`` are accepted as well.
+ antialias (bool, optional): Whether to apply antialiasing.
+ It only affects **tensors** with bilinear or bicubic modes and it is
+ ignored otherwise: on PIL images, antialiasing is always applied on
+ bilinear or bicubic modes; on other modes (for PIL images and
+ tensors), antialiasing makes no sense and this parameter is ignored.
+ Possible values are:
+
+ - ``True``: will apply antialiasing for bilinear or bicubic modes.
+ Other mode aren't affected. This is probably what you want to use.
+ - ``False``: will not apply antialiasing for tensors on any mode. PIL
+ images are still antialiased on bilinear or bicubic modes, because
+ PIL doesn't support no antialias.
+ - ``None``: equivalent to ``False`` for tensors and ``True`` for
+ PIL images. This value exists for legacy reasons and you probably
+ don't want to use it unless you really know what you are doing.
+
+ The current default is ``None`` **but will change to** ``True`` **in
+ v0.17** for the PIL and Tensor backends to be consistent.
+ """
+
+ _v1_transform_cls = _transforms.RandomResizedCrop
+
+ def __init__(
+ self,
+ size: Union[int, Sequence[int]],
+ scale: Tuple[float, float] = (0.08, 1.0),
+ ratio: Tuple[float, float] = (3.0 / 4.0, 4.0 / 3.0),
+ interpolation: Union[InterpolationMode, int] = InterpolationMode.BILINEAR,
+ antialias: Optional[Union[str, bool]] = "warn",
+ ) -> None:
+ super().__init__()
+ self.size = _setup_size(size, error_msg="Please provide only two dimensions (h, w) for size.")
+
+ if not isinstance(scale, Sequence):
+ raise TypeError("Scale should be a sequence")
+ scale = cast(Tuple[float, float], scale)
+ if not isinstance(ratio, Sequence):
+ raise TypeError("Ratio should be a sequence")
+ ratio = cast(Tuple[float, float], ratio)
+ if (scale[0] > scale[1]) or (ratio[0] > ratio[1]):
+ warnings.warn("Scale and ratio should be of kind (min, max)")
+
+ self.scale = scale
+ self.ratio = ratio
+ self.interpolation = _check_interpolation(interpolation)
+ self.antialias = antialias
+
+ self._log_ratio = torch.log(torch.tensor(self.ratio))
+
+ def _get_params(self, flat_inputs: List[Any]) -> Dict[str, Any]:
+ height, width = query_size(flat_inputs)
+ area = height * width
+
+ log_ratio = self._log_ratio
+ for _ in range(10):
+ target_area = area * torch.empty(1).uniform_(self.scale[0], self.scale[1]).item()
+ aspect_ratio = torch.exp(
+ torch.empty(1).uniform_(
+ log_ratio[0], # type: ignore[arg-type]
+ log_ratio[1], # type: ignore[arg-type]
+ )
+ ).item()
+
+ w = int(round(math.sqrt(target_area * aspect_ratio)))
+ h = int(round(math.sqrt(target_area / aspect_ratio)))
+
+ if 0 < w <= width and 0 < h <= height:
+ i = torch.randint(0, height - h + 1, size=(1,)).item()
+ j = torch.randint(0, width - w + 1, size=(1,)).item()
+ break
+ else:
+ # Fallback to central crop
+ in_ratio = float(width) / float(height)
+ if in_ratio < min(self.ratio):
+ w = width
+ h = int(round(w / min(self.ratio)))
+ elif in_ratio > max(self.ratio):
+ h = height
+ w = int(round(h * max(self.ratio)))
+ else: # whole image
+ w = width
+ h = height
+ i = (height - h) // 2
+ j = (width - w) // 2
+
+ return dict(top=i, left=j, height=h, width=w)
+
+ def _transform(self, inpt: Any, params: Dict[str, Any]) -> Any:
+ return self._call_kernel(
+ F.resized_crop, inpt, **params, size=self.size, interpolation=self.interpolation, antialias=self.antialias
+ )
+
+
+class FiveCrop(Transform):
+ """[BETA] Crop the image or video into four corners and the central crop.
+
+ .. v2betastatus:: FiveCrop transform
+
+ If the input is a :class:`torch.Tensor` or a :class:`~torchvision.tv_tensors.Image` or a
+ :class:`~torchvision.tv_tensors.Video` it can have arbitrary number of leading batch dimensions.
+ For example, the image can have ``[..., C, H, W]`` shape.
+
+ .. Note::
+ This transform returns a tuple of images and there may be a mismatch in the number of
+ inputs and targets your Dataset returns. See below for an example of how to deal with
+ this.
+
+ Args:
+ size (sequence or int): Desired output size of the crop. If size is an ``int``
+ instead of sequence like (h, w), a square crop of size (size, size) is made.
+ If provided a sequence of length 1, it will be interpreted as (size[0], size[0]).
+
+ Example:
+ >>> class BatchMultiCrop(transforms.Transform):
+ ... def forward(self, sample: Tuple[Tuple[Union[tv_tensors.Image, tv_tensors.Video], ...], int]):
+ ... images_or_videos, labels = sample
+ ... batch_size = len(images_or_videos)
+ ... image_or_video = images_or_videos[0]
+ ... images_or_videos = tv_tensors.wrap(torch.stack(images_or_videos), like=image_or_video)
+ ... labels = torch.full((batch_size,), label, device=images_or_videos.device)
+ ... return images_or_videos, labels
+ ...
+ >>> image = tv_tensors.Image(torch.rand(3, 256, 256))
+ >>> label = 3
+ >>> transform = transforms.Compose([transforms.FiveCrop(224), BatchMultiCrop()])
+ >>> images, labels = transform(image, label)
+ >>> images.shape
+ torch.Size([5, 3, 224, 224])
+ >>> labels
+ tensor([3, 3, 3, 3, 3])
+ """
+
+ _v1_transform_cls = _transforms.FiveCrop
+
+ def __init__(self, size: Union[int, Sequence[int]]) -> None:
+ super().__init__()
+ self.size = _setup_size(size, error_msg="Please provide only two dimensions (h, w) for size.")
+
+ def _call_kernel(self, functional: Callable, inpt: Any, *args: Any, **kwargs: Any) -> Any:
+ if isinstance(inpt, (tv_tensors.BoundingBoxes, tv_tensors.Mask)):
+ warnings.warn(
+ f"{type(self).__name__}() is currently passing through inputs of type "
+ f"tv_tensors.{type(inpt).__name__}. This will likely change in the future."
+ )
+ return super()._call_kernel(functional, inpt, *args, **kwargs)
+
+ def _transform(self, inpt: Any, params: Dict[str, Any]) -> Any:
+ return self._call_kernel(F.five_crop, inpt, self.size)
+
+ def _check_inputs(self, flat_inputs: List[Any]) -> None:
+ if has_any(flat_inputs, tv_tensors.BoundingBoxes, tv_tensors.Mask):
+ raise TypeError(f"BoundingBoxes'es and Mask's are not supported by {type(self).__name__}()")
+
+
+class TenCrop(Transform):
+ """[BETA] Crop the image or video into four corners and the central crop plus the flipped version of
+ these (horizontal flipping is used by default).
+
+ .. v2betastatus:: TenCrop transform
+
+ If the input is a :class:`torch.Tensor` or a :class:`~torchvision.tv_tensors.Image` or a
+ :class:`~torchvision.tv_tensors.Video` it can have arbitrary number of leading batch dimensions.
+ For example, the image can have ``[..., C, H, W]`` shape.
+
+ See :class:`~torchvision.transforms.v2.FiveCrop` for an example.
+
+ .. Note::
+ This transform returns a tuple of images and there may be a mismatch in the number of
+ inputs and targets your Dataset returns. See below for an example of how to deal with
+ this.
+
+ Args:
+ size (sequence or int): Desired output size of the crop. If size is an
+ int instead of sequence like (h, w), a square crop (size, size) is
+ made. If provided a sequence of length 1, it will be interpreted as (size[0], size[0]).
+ vertical_flip (bool, optional): Use vertical flipping instead of horizontal
+ """
+
+ _v1_transform_cls = _transforms.TenCrop
+
+ def __init__(self, size: Union[int, Sequence[int]], vertical_flip: bool = False) -> None:
+ super().__init__()
+ self.size = _setup_size(size, error_msg="Please provide only two dimensions (h, w) for size.")
+ self.vertical_flip = vertical_flip
+
+ def _call_kernel(self, functional: Callable, inpt: Any, *args: Any, **kwargs: Any) -> Any:
+ if isinstance(inpt, (tv_tensors.BoundingBoxes, tv_tensors.Mask)):
+ warnings.warn(
+ f"{type(self).__name__}() is currently passing through inputs of type "
+ f"tv_tensors.{type(inpt).__name__}. This will likely change in the future."
+ )
+ return super()._call_kernel(functional, inpt, *args, **kwargs)
+
+ def _check_inputs(self, flat_inputs: List[Any]) -> None:
+ if has_any(flat_inputs, tv_tensors.BoundingBoxes, tv_tensors.Mask):
+ raise TypeError(f"BoundingBoxes'es and Mask's are not supported by {type(self).__name__}()")
+
+ def _transform(self, inpt: Any, params: Dict[str, Any]) -> Any:
+ return self._call_kernel(F.ten_crop, inpt, self.size, vertical_flip=self.vertical_flip)
+
+
+class Pad(Transform):
+ """[BETA] Pad the input on all sides with the given "pad" value.
+
+ .. v2betastatus:: Pad transform
+
+ If the input is a :class:`torch.Tensor` or a ``TVTensor`` (e.g. :class:`~torchvision.tv_tensors.Image`,
+ :class:`~torchvision.tv_tensors.Video`, :class:`~torchvision.tv_tensors.BoundingBoxes` etc.)
+ it can have arbitrary number of leading batch dimensions. For example,
+ the image can have ``[..., C, H, W]`` shape. A bounding box can have ``[..., 4]`` shape.
+
+ Args:
+ padding (int or sequence): Padding on each border. If a single int is provided this
+ is used to pad all borders. If sequence of length 2 is provided this is the padding
+ on left/right and top/bottom respectively. If a sequence of length 4 is provided
+ this is the padding for the left, top, right and bottom borders respectively.
+
+ .. note::
+ In torchscript mode padding as single int is not supported, use a sequence of
+ length 1: ``[padding, ]``.
+ fill (number or tuple or dict, optional): Pixel fill value used when the ``padding_mode`` is constant.
+ Default is 0. If a tuple of length 3, it is used to fill R, G, B channels respectively.
+ Fill value can be also a dictionary mapping data type to the fill value, e.g.
+ ``fill={tv_tensors.Image: 127, tv_tensors.Mask: 0}`` where ``Image`` will be filled with 127 and
+ ``Mask`` will be filled with 0.
+ padding_mode (str, optional): Type of padding. Should be: constant, edge, reflect or symmetric.
+ Default is "constant".
+
+ - constant: pads with a constant value, this value is specified with fill
+
+ - edge: pads with the last value at the edge of the image.
+
+ - reflect: pads with reflection of image without repeating the last value on the edge.
+ For example, padding [1, 2, 3, 4] with 2 elements on both sides in reflect mode
+ will result in [3, 2, 1, 2, 3, 4, 3, 2]
+
+ - symmetric: pads with reflection of image repeating the last value on the edge.
+ For example, padding [1, 2, 3, 4] with 2 elements on both sides in symmetric mode
+ will result in [2, 1, 1, 2, 3, 4, 4, 3]
+ """
+
+ _v1_transform_cls = _transforms.Pad
+
+ def _extract_params_for_v1_transform(self) -> Dict[str, Any]:
+ params = super()._extract_params_for_v1_transform()
+
+ if not (params["fill"] is None or isinstance(params["fill"], (int, float))):
+ raise ValueError(f"{type(self).__name__}() can only be scripted for a scalar `fill`, but got {self.fill}.")
+
+ return params
+
+ def __init__(
+ self,
+ padding: Union[int, Sequence[int]],
+ fill: Union[_FillType, Dict[Union[Type, str], _FillType]] = 0,
+ padding_mode: Literal["constant", "edge", "reflect", "symmetric"] = "constant",
+ ) -> None:
+ super().__init__()
+
+ _check_padding_arg(padding)
+ _check_padding_mode_arg(padding_mode)
+
+ # This cast does Sequence[int] -> List[int] and is required to make mypy happy
+ if not isinstance(padding, int):
+ padding = list(padding)
+ self.padding = padding
+ self.fill = fill
+ self._fill = _setup_fill_arg(fill)
+ self.padding_mode = padding_mode
+
+ def _transform(self, inpt: Any, params: Dict[str, Any]) -> Any:
+ fill = _get_fill(self._fill, type(inpt))
+ return self._call_kernel(F.pad, inpt, padding=self.padding, fill=fill, padding_mode=self.padding_mode) # type: ignore[arg-type]
+
+
+class RandomZoomOut(_RandomApplyTransform):
+ """[BETA] "Zoom out" transformation from
+ `"SSD: Single Shot MultiBox Detector" `_.
+
+ .. v2betastatus:: RandomZoomOut transform
+
+ This transformation randomly pads images, videos, bounding boxes and masks creating a zoom out effect.
+ Output spatial size is randomly sampled from original size up to a maximum size configured
+ with ``side_range`` parameter:
+
+ .. code-block:: python
+
+ r = uniform_sample(side_range[0], side_range[1])
+ output_width = input_width * r
+ output_height = input_height * r
+
+ If the input is a :class:`torch.Tensor` or a ``TVTensor`` (e.g. :class:`~torchvision.tv_tensors.Image`,
+ :class:`~torchvision.tv_tensors.Video`, :class:`~torchvision.tv_tensors.BoundingBoxes` etc.)
+ it can have arbitrary number of leading batch dimensions. For example,
+ the image can have ``[..., C, H, W]`` shape. A bounding box can have ``[..., 4]`` shape.
+
+ Args:
+ fill (number or tuple or dict, optional): Pixel fill value used when the ``padding_mode`` is constant.
+ Default is 0. If a tuple of length 3, it is used to fill R, G, B channels respectively.
+ Fill value can be also a dictionary mapping data type to the fill value, e.g.
+ ``fill={tv_tensors.Image: 127, tv_tensors.Mask: 0}`` where ``Image`` will be filled with 127 and
+ ``Mask`` will be filled with 0.
+ side_range (sequence of floats, optional): tuple of two floats defines minimum and maximum factors to
+ scale the input size.
+ p (float, optional): probability that the zoom operation will be performed.
+ """
+
+ def __init__(
+ self,
+ fill: Union[_FillType, Dict[Union[Type, str], _FillType]] = 0,
+ side_range: Sequence[float] = (1.0, 4.0),
+ p: float = 0.5,
+ ) -> None:
+ super().__init__(p=p)
+
+ self.fill = fill
+ self._fill = _setup_fill_arg(fill)
+
+ _check_sequence_input(side_range, "side_range", req_sizes=(2,))
+
+ self.side_range = side_range
+ if side_range[0] < 1.0 or side_range[0] > side_range[1]:
+ raise ValueError(f"Invalid canvas side range provided {side_range}.")
+
+ def _get_params(self, flat_inputs: List[Any]) -> Dict[str, Any]:
+ orig_h, orig_w = query_size(flat_inputs)
+
+ r = self.side_range[0] + torch.rand(1) * (self.side_range[1] - self.side_range[0])
+ canvas_width = int(orig_w * r)
+ canvas_height = int(orig_h * r)
+
+ r = torch.rand(2)
+ left = int((canvas_width - orig_w) * r[0])
+ top = int((canvas_height - orig_h) * r[1])
+ right = canvas_width - (left + orig_w)
+ bottom = canvas_height - (top + orig_h)
+ padding = [left, top, right, bottom]
+
+ return dict(padding=padding)
+
+ def _transform(self, inpt: Any, params: Dict[str, Any]) -> Any:
+ fill = _get_fill(self._fill, type(inpt))
+ return self._call_kernel(F.pad, inpt, **params, fill=fill)
+
+
+class RandomRotation(Transform):
+ """[BETA] Rotate the input by angle.
+
+ .. v2betastatus:: RandomRotation transform
+
+ If the input is a :class:`torch.Tensor` or a ``TVTensor`` (e.g. :class:`~torchvision.tv_tensors.Image`,
+ :class:`~torchvision.tv_tensors.Video`, :class:`~torchvision.tv_tensors.BoundingBoxes` etc.)
+ it can have arbitrary number of leading batch dimensions. For example,
+ the image can have ``[..., C, H, W]`` shape. A bounding box can have ``[..., 4]`` shape.
+
+ Args:
+ degrees (sequence or number): Range of degrees to select from.
+ If degrees is a number instead of sequence like (min, max), the range of degrees
+ will be (-degrees, +degrees).
+ interpolation (InterpolationMode, optional): Desired interpolation enum defined by
+ :class:`torchvision.transforms.InterpolationMode`. Default is ``InterpolationMode.NEAREST``.
+ If input is Tensor, only ``InterpolationMode.NEAREST``, ``InterpolationMode.BILINEAR`` are supported.
+ The corresponding Pillow integer constants, e.g. ``PIL.Image.BILINEAR`` are accepted as well.
+ expand (bool, optional): Optional expansion flag.
+ If true, expands the output to make it large enough to hold the entire rotated image.
+ If false or omitted, make the output image the same size as the input image.
+ Note that the expand flag assumes rotation around the center (see note below) and no translation.
+ center (sequence, optional): Optional center of rotation, (x, y). Origin is the upper left corner.
+ Default is the center of the image.
+
+ .. note::
+
+ In theory, setting ``center`` has no effect if ``expand=True``, since the image center will become the
+ center of rotation. In practice however, due to numerical precision, this can lead to off-by-one
+ differences of the resulting image size compared to using the image center in the first place. Thus, when
+ setting ``expand=True``, it's best to leave ``center=None`` (default).
+ fill (number or tuple or dict, optional): Pixel fill value used when the ``padding_mode`` is constant.
+ Default is 0. If a tuple of length 3, it is used to fill R, G, B channels respectively.
+ Fill value can be also a dictionary mapping data type to the fill value, e.g.
+ ``fill={tv_tensors.Image: 127, tv_tensors.Mask: 0}`` where ``Image`` will be filled with 127 and
+ ``Mask`` will be filled with 0.
+
+ .. _filters: https://pillow.readthedocs.io/en/latest/handbook/concepts.html#filters
+
+ """
+
+ _v1_transform_cls = _transforms.RandomRotation
+
+ def __init__(
+ self,
+ degrees: Union[numbers.Number, Sequence],
+ interpolation: Union[InterpolationMode, int] = InterpolationMode.NEAREST,
+ expand: bool = False,
+ center: Optional[List[float]] = None,
+ fill: Union[_FillType, Dict[Union[Type, str], _FillType]] = 0,
+ ) -> None:
+ super().__init__()
+ self.degrees = _setup_angle(degrees, name="degrees", req_sizes=(2,))
+ self.interpolation = _check_interpolation(interpolation)
+ self.expand = expand
+
+ self.fill = fill
+ self._fill = _setup_fill_arg(fill)
+
+ if center is not None:
+ _check_sequence_input(center, "center", req_sizes=(2,))
+
+ self.center = center
+
+ def _get_params(self, flat_inputs: List[Any]) -> Dict[str, Any]:
+ angle = torch.empty(1).uniform_(self.degrees[0], self.degrees[1]).item()
+ return dict(angle=angle)
+
+ def _transform(self, inpt: Any, params: Dict[str, Any]) -> Any:
+ fill = _get_fill(self._fill, type(inpt))
+ return self._call_kernel(
+ F.rotate,
+ inpt,
+ **params,
+ interpolation=self.interpolation,
+ expand=self.expand,
+ center=self.center,
+ fill=fill,
+ )
+
+
+class RandomAffine(Transform):
+ """[BETA] Random affine transformation the input keeping center invariant.
+
+ .. v2betastatus:: RandomAffine transform
+
+ If the input is a :class:`torch.Tensor` or a ``TVTensor`` (e.g. :class:`~torchvision.tv_tensors.Image`,
+ :class:`~torchvision.tv_tensors.Video`, :class:`~torchvision.tv_tensors.BoundingBoxes` etc.)
+ it can have arbitrary number of leading batch dimensions. For example,
+ the image can have ``[..., C, H, W]`` shape. A bounding box can have ``[..., 4]`` shape.
+
+ Args:
+ degrees (sequence or number): Range of degrees to select from.
+ If degrees is a number instead of sequence like (min, max), the range of degrees
+ will be (-degrees, +degrees). Set to 0 to deactivate rotations.
+ translate (tuple, optional): tuple of maximum absolute fraction for horizontal
+ and vertical translations. For example translate=(a, b), then horizontal shift
+ is randomly sampled in the range -img_width * a < dx < img_width * a and vertical shift is
+ randomly sampled in the range -img_height * b < dy < img_height * b. Will not translate by default.
+ scale (tuple, optional): scaling factor interval, e.g (a, b), then scale is
+ randomly sampled from the range a <= scale <= b. Will keep original scale by default.
+ shear (sequence or number, optional): Range of degrees to select from.
+ If shear is a number, a shear parallel to the x-axis in the range (-shear, +shear)
+ will be applied. Else if shear is a sequence of 2 values a shear parallel to the x-axis in the
+ range (shear[0], shear[1]) will be applied. Else if shear is a sequence of 4 values,
+ an x-axis shear in (shear[0], shear[1]) and y-axis shear in (shear[2], shear[3]) will be applied.
+ Will not apply shear by default.
+ interpolation (InterpolationMode, optional): Desired interpolation enum defined by
+ :class:`torchvision.transforms.InterpolationMode`. Default is ``InterpolationMode.NEAREST``.
+ If input is Tensor, only ``InterpolationMode.NEAREST``, ``InterpolationMode.BILINEAR`` are supported.
+ The corresponding Pillow integer constants, e.g. ``PIL.Image.BILINEAR`` are accepted as well.
+ fill (number or tuple or dict, optional): Pixel fill value used when the ``padding_mode`` is constant.
+ Default is 0. If a tuple of length 3, it is used to fill R, G, B channels respectively.
+ Fill value can be also a dictionary mapping data type to the fill value, e.g.
+ ``fill={tv_tensors.Image: 127, tv_tensors.Mask: 0}`` where ``Image`` will be filled with 127 and
+ ``Mask`` will be filled with 0.
+ center (sequence, optional): Optional center of rotation, (x, y). Origin is the upper left corner.
+ Default is the center of the image.
+
+ .. _filters: https://pillow.readthedocs.io/en/latest/handbook/concepts.html#filters
+
+ """
+
+ _v1_transform_cls = _transforms.RandomAffine
+
+ def __init__(
+ self,
+ degrees: Union[numbers.Number, Sequence],
+ translate: Optional[Sequence[float]] = None,
+ scale: Optional[Sequence[float]] = None,
+ shear: Optional[Union[int, float, Sequence[float]]] = None,
+ interpolation: Union[InterpolationMode, int] = InterpolationMode.NEAREST,
+ fill: Union[_FillType, Dict[Union[Type, str], _FillType]] = 0,
+ center: Optional[List[float]] = None,
+ ) -> None:
+ super().__init__()
+ self.degrees = _setup_angle(degrees, name="degrees", req_sizes=(2,))
+ if translate is not None:
+ _check_sequence_input(translate, "translate", req_sizes=(2,))
+ for t in translate:
+ if not (0.0 <= t <= 1.0):
+ raise ValueError("translation values should be between 0 and 1")
+ self.translate = translate
+ if scale is not None:
+ _check_sequence_input(scale, "scale", req_sizes=(2,))
+ for s in scale:
+ if s <= 0:
+ raise ValueError("scale values should be positive")
+ self.scale = scale
+
+ if shear is not None:
+ self.shear = _setup_angle(shear, name="shear", req_sizes=(2, 4))
+ else:
+ self.shear = shear
+
+ self.interpolation = _check_interpolation(interpolation)
+ self.fill = fill
+ self._fill = _setup_fill_arg(fill)
+
+ if center is not None:
+ _check_sequence_input(center, "center", req_sizes=(2,))
+
+ self.center = center
+
+ def _get_params(self, flat_inputs: List[Any]) -> Dict[str, Any]:
+ height, width = query_size(flat_inputs)
+
+ angle = torch.empty(1).uniform_(self.degrees[0], self.degrees[1]).item()
+ if self.translate is not None:
+ max_dx = float(self.translate[0] * width)
+ max_dy = float(self.translate[1] * height)
+ tx = int(round(torch.empty(1).uniform_(-max_dx, max_dx).item()))
+ ty = int(round(torch.empty(1).uniform_(-max_dy, max_dy).item()))
+ translate = (tx, ty)
+ else:
+ translate = (0, 0)
+
+ if self.scale is not None:
+ scale = torch.empty(1).uniform_(self.scale[0], self.scale[1]).item()
+ else:
+ scale = 1.0
+
+ shear_x = shear_y = 0.0
+ if self.shear is not None:
+ shear_x = torch.empty(1).uniform_(self.shear[0], self.shear[1]).item()
+ if len(self.shear) == 4:
+ shear_y = torch.empty(1).uniform_(self.shear[2], self.shear[3]).item()
+
+ shear = (shear_x, shear_y)
+ return dict(angle=angle, translate=translate, scale=scale, shear=shear)
+
+ def _transform(self, inpt: Any, params: Dict[str, Any]) -> Any:
+ fill = _get_fill(self._fill, type(inpt))
+ return self._call_kernel(
+ F.affine,
+ inpt,
+ **params,
+ interpolation=self.interpolation,
+ fill=fill,
+ center=self.center,
+ )
+
+
+class RandomCrop(Transform):
+ """[BETA] Crop the input at a random location.
+
+ .. v2betastatus:: RandomCrop transform
+
+ If the input is a :class:`torch.Tensor` or a ``TVTensor`` (e.g. :class:`~torchvision.tv_tensors.Image`,
+ :class:`~torchvision.tv_tensors.Video`, :class:`~torchvision.tv_tensors.BoundingBoxes` etc.)
+ it can have arbitrary number of leading batch dimensions. For example,
+ the image can have ``[..., C, H, W]`` shape. A bounding box can have ``[..., 4]`` shape.
+
+ Args:
+ size (sequence or int): Desired output size of the crop. If size is an
+ int instead of sequence like (h, w), a square crop (size, size) is
+ made. If provided a sequence of length 1, it will be interpreted as (size[0], size[0]).
+ padding (int or sequence, optional): Optional padding on each border
+ of the image. Default is None. If a single int is provided this
+ is used to pad all borders. If sequence of length 2 is provided this is the padding
+ on left/right and top/bottom respectively. If a sequence of length 4 is provided
+ this is the padding for the left, top, right and bottom borders respectively.
+
+ .. note::
+ In torchscript mode padding as single int is not supported, use a sequence of
+ length 1: ``[padding, ]``.
+ pad_if_needed (boolean, optional): It will pad the image if smaller than the
+ desired size to avoid raising an exception. Since cropping is done
+ after padding, the padding seems to be done at a random offset.
+ fill (number or tuple or dict, optional): Pixel fill value used when the ``padding_mode`` is constant.
+ Default is 0. If a tuple of length 3, it is used to fill R, G, B channels respectively.
+ Fill value can be also a dictionary mapping data type to the fill value, e.g.
+ ``fill={tv_tensors.Image: 127, tv_tensors.Mask: 0}`` where ``Image`` will be filled with 127 and
+ ``Mask`` will be filled with 0.
+ padding_mode (str, optional): Type of padding. Should be: constant, edge, reflect or symmetric.
+ Default is constant.
+
+ - constant: pads with a constant value, this value is specified with fill
+
+ - edge: pads with the last value at the edge of the image.
+
+ - reflect: pads with reflection of image without repeating the last value on the edge.
+ For example, padding [1, 2, 3, 4] with 2 elements on both sides in reflect mode
+ will result in [3, 2, 1, 2, 3, 4, 3, 2]
+
+ - symmetric: pads with reflection of image repeating the last value on the edge.
+ For example, padding [1, 2, 3, 4] with 2 elements on both sides in symmetric mode
+ will result in [2, 1, 1, 2, 3, 4, 4, 3]
+ """
+
+ _v1_transform_cls = _transforms.RandomCrop
+
+ def _extract_params_for_v1_transform(self) -> Dict[str, Any]:
+ params = super()._extract_params_for_v1_transform()
+
+ if not (params["fill"] is None or isinstance(params["fill"], (int, float))):
+ raise ValueError(f"{type(self).__name__}() can only be scripted for a scalar `fill`, but got {self.fill}.")
+
+ padding = self.padding
+ if padding is not None:
+ pad_left, pad_right, pad_top, pad_bottom = padding
+ padding = [pad_left, pad_top, pad_right, pad_bottom]
+ params["padding"] = padding
+
+ return params
+
+ def __init__(
+ self,
+ size: Union[int, Sequence[int]],
+ padding: Optional[Union[int, Sequence[int]]] = None,
+ pad_if_needed: bool = False,
+ fill: Union[_FillType, Dict[Union[Type, str], _FillType]] = 0,
+ padding_mode: Literal["constant", "edge", "reflect", "symmetric"] = "constant",
+ ) -> None:
+ super().__init__()
+
+ self.size = _setup_size(size, error_msg="Please provide only two dimensions (h, w) for size.")
+
+ if pad_if_needed or padding is not None:
+ if padding is not None:
+ _check_padding_arg(padding)
+ _check_padding_mode_arg(padding_mode)
+
+ self.padding = F._geometry._parse_pad_padding(padding) if padding else None # type: ignore[arg-type]
+ self.pad_if_needed = pad_if_needed
+ self.fill = fill
+ self._fill = _setup_fill_arg(fill)
+ self.padding_mode = padding_mode
+
+ def _get_params(self, flat_inputs: List[Any]) -> Dict[str, Any]:
+ padded_height, padded_width = query_size(flat_inputs)
+
+ if self.padding is not None:
+ pad_left, pad_right, pad_top, pad_bottom = self.padding
+ padded_height += pad_top + pad_bottom
+ padded_width += pad_left + pad_right
+ else:
+ pad_left = pad_right = pad_top = pad_bottom = 0
+
+ cropped_height, cropped_width = self.size
+
+ if self.pad_if_needed:
+ if padded_height < cropped_height:
+ diff = cropped_height - padded_height
+
+ pad_top += diff
+ pad_bottom += diff
+ padded_height += 2 * diff
+
+ if padded_width < cropped_width:
+ diff = cropped_width - padded_width
+
+ pad_left += diff
+ pad_right += diff
+ padded_width += 2 * diff
+
+ if padded_height < cropped_height or padded_width < cropped_width:
+ raise ValueError(
+ f"Required crop size {(cropped_height, cropped_width)} is larger than "
+ f"{'padded ' if self.padding is not None else ''}input image size {(padded_height, padded_width)}."
+ )
+
+ # We need a different order here than we have in self.padding since this padding will be parsed again in `F.pad`
+ padding = [pad_left, pad_top, pad_right, pad_bottom]
+ needs_pad = any(padding)
+
+ needs_vert_crop, top = (
+ (True, int(torch.randint(0, padded_height - cropped_height + 1, size=())))
+ if padded_height > cropped_height
+ else (False, 0)
+ )
+ needs_horz_crop, left = (
+ (True, int(torch.randint(0, padded_width - cropped_width + 1, size=())))
+ if padded_width > cropped_width
+ else (False, 0)
+ )
+
+ return dict(
+ needs_crop=needs_vert_crop or needs_horz_crop,
+ top=top,
+ left=left,
+ height=cropped_height,
+ width=cropped_width,
+ needs_pad=needs_pad,
+ padding=padding,
+ )
+
+ def _transform(self, inpt: Any, params: Dict[str, Any]) -> Any:
+ if params["needs_pad"]:
+ fill = _get_fill(self._fill, type(inpt))
+ inpt = self._call_kernel(F.pad, inpt, padding=params["padding"], fill=fill, padding_mode=self.padding_mode)
+
+ if params["needs_crop"]:
+ inpt = self._call_kernel(
+ F.crop, inpt, top=params["top"], left=params["left"], height=params["height"], width=params["width"]
+ )
+
+ return inpt
+
+
+class RandomPerspective(_RandomApplyTransform):
+ """[BETA] Perform a random perspective transformation of the input with a given probability.
+
+ .. v2betastatus:: RandomPerspective transform
+
+ If the input is a :class:`torch.Tensor` or a ``TVTensor`` (e.g. :class:`~torchvision.tv_tensors.Image`,
+ :class:`~torchvision.tv_tensors.Video`, :class:`~torchvision.tv_tensors.BoundingBoxes` etc.)
+ it can have arbitrary number of leading batch dimensions. For example,
+ the image can have ``[..., C, H, W]`` shape. A bounding box can have ``[..., 4]`` shape.
+
+ Args:
+ distortion_scale (float, optional): argument to control the degree of distortion and ranges from 0 to 1.
+ Default is 0.5.
+ p (float, optional): probability of the input being transformed. Default is 0.5.
+ interpolation (InterpolationMode, optional): Desired interpolation enum defined by
+ :class:`torchvision.transforms.InterpolationMode`. Default is ``InterpolationMode.BILINEAR``.
+ If input is Tensor, only ``InterpolationMode.NEAREST``, ``InterpolationMode.BILINEAR`` are supported.
+ The corresponding Pillow integer constants, e.g. ``PIL.Image.BILINEAR`` are accepted as well.
+ fill (number or tuple or dict, optional): Pixel fill value used when the ``padding_mode`` is constant.
+ Default is 0. If a tuple of length 3, it is used to fill R, G, B channels respectively.
+ Fill value can be also a dictionary mapping data type to the fill value, e.g.
+ ``fill={tv_tensors.Image: 127, tv_tensors.Mask: 0}`` where ``Image`` will be filled with 127 and
+ ``Mask`` will be filled with 0.
+ """
+
+ _v1_transform_cls = _transforms.RandomPerspective
+
+ def __init__(
+ self,
+ distortion_scale: float = 0.5,
+ p: float = 0.5,
+ interpolation: Union[InterpolationMode, int] = InterpolationMode.BILINEAR,
+ fill: Union[_FillType, Dict[Union[Type, str], _FillType]] = 0,
+ ) -> None:
+ super().__init__(p=p)
+
+ if not (0 <= distortion_scale <= 1):
+ raise ValueError("Argument distortion_scale value should be between 0 and 1")
+
+ self.distortion_scale = distortion_scale
+ self.interpolation = _check_interpolation(interpolation)
+ self.fill = fill
+ self._fill = _setup_fill_arg(fill)
+
+ def _get_params(self, flat_inputs: List[Any]) -> Dict[str, Any]:
+ height, width = query_size(flat_inputs)
+
+ distortion_scale = self.distortion_scale
+
+ half_height = height // 2
+ half_width = width // 2
+ bound_height = int(distortion_scale * half_height) + 1
+ bound_width = int(distortion_scale * half_width) + 1
+ topleft = [
+ int(torch.randint(0, bound_width, size=(1,))),
+ int(torch.randint(0, bound_height, size=(1,))),
+ ]
+ topright = [
+ int(torch.randint(width - bound_width, width, size=(1,))),
+ int(torch.randint(0, bound_height, size=(1,))),
+ ]
+ botright = [
+ int(torch.randint(width - bound_width, width, size=(1,))),
+ int(torch.randint(height - bound_height, height, size=(1,))),
+ ]
+ botleft = [
+ int(torch.randint(0, bound_width, size=(1,))),
+ int(torch.randint(height - bound_height, height, size=(1,))),
+ ]
+ startpoints = [[0, 0], [width - 1, 0], [width - 1, height - 1], [0, height - 1]]
+ endpoints = [topleft, topright, botright, botleft]
+ perspective_coeffs = _get_perspective_coeffs(startpoints, endpoints)
+ return dict(coefficients=perspective_coeffs)
+
+ def _transform(self, inpt: Any, params: Dict[str, Any]) -> Any:
+ fill = _get_fill(self._fill, type(inpt))
+ return self._call_kernel(
+ F.perspective,
+ inpt,
+ None,
+ None,
+ fill=fill,
+ interpolation=self.interpolation,
+ **params,
+ )
+
+
+class ElasticTransform(Transform):
+ """[BETA] Transform the input with elastic transformations.
+
+ .. v2betastatus:: RandomPerspective transform
+
+ If the input is a :class:`torch.Tensor` or a ``TVTensor`` (e.g. :class:`~torchvision.tv_tensors.Image`,
+ :class:`~torchvision.tv_tensors.Video`, :class:`~torchvision.tv_tensors.BoundingBoxes` etc.)
+ it can have arbitrary number of leading batch dimensions. For example,
+ the image can have ``[..., C, H, W]`` shape. A bounding box can have ``[..., 4]`` shape.
+
+ Given alpha and sigma, it will generate displacement
+ vectors for all pixels based on random offsets. Alpha controls the strength
+ and sigma controls the smoothness of the displacements.
+ The displacements are added to an identity grid and the resulting grid is
+ used to transform the input.
+
+ .. note::
+ Implementation to transform bounding boxes is approximative (not exact).
+ We construct an approximation of the inverse grid as ``inverse_grid = identity - displacement``.
+ This is not an exact inverse of the grid used to transform images, i.e. ``grid = identity + displacement``.
+ Our assumption is that ``displacement * displacement`` is small and can be ignored.
+ Large displacements would lead to large errors in the approximation.
+
+ Applications:
+ Randomly transforms the morphology of objects in images and produces a
+ see-through-water-like effect.
+
+ Args:
+ alpha (float or sequence of floats, optional): Magnitude of displacements. Default is 50.0.
+ sigma (float or sequence of floats, optional): Smoothness of displacements. Default is 5.0.
+ interpolation (InterpolationMode, optional): Desired interpolation enum defined by
+ :class:`torchvision.transforms.InterpolationMode`. Default is ``InterpolationMode.BILINEAR``.
+ If input is Tensor, only ``InterpolationMode.NEAREST``, ``InterpolationMode.BILINEAR`` are supported.
+ The corresponding Pillow integer constants, e.g. ``PIL.Image.BILINEAR`` are accepted as well.
+ fill (number or tuple or dict, optional): Pixel fill value used when the ``padding_mode`` is constant.
+ Default is 0. If a tuple of length 3, it is used to fill R, G, B channels respectively.
+ Fill value can be also a dictionary mapping data type to the fill value, e.g.
+ ``fill={tv_tensors.Image: 127, tv_tensors.Mask: 0}`` where ``Image`` will be filled with 127 and
+ ``Mask`` will be filled with 0.
+ """
+
+ _v1_transform_cls = _transforms.ElasticTransform
+
+ def __init__(
+ self,
+ alpha: Union[float, Sequence[float]] = 50.0,
+ sigma: Union[float, Sequence[float]] = 5.0,
+ interpolation: Union[InterpolationMode, int] = InterpolationMode.BILINEAR,
+ fill: Union[_FillType, Dict[Union[Type, str], _FillType]] = 0,
+ ) -> None:
+ super().__init__()
+ self.alpha = _setup_number_or_seq(alpha, "alpha")
+ self.sigma = _setup_number_or_seq(sigma, "sigma")
+
+ self.interpolation = _check_interpolation(interpolation)
+ self.fill = fill
+ self._fill = _setup_fill_arg(fill)
+
+ def _get_params(self, flat_inputs: List[Any]) -> Dict[str, Any]:
+ size = list(query_size(flat_inputs))
+
+ dx = torch.rand([1, 1] + size) * 2 - 1
+ if self.sigma[0] > 0.0:
+ kx = int(8 * self.sigma[0] + 1)
+ # if kernel size is even we have to make it odd
+ if kx % 2 == 0:
+ kx += 1
+ dx = self._call_kernel(F.gaussian_blur, dx, [kx, kx], list(self.sigma))
+ dx = dx * self.alpha[0] / size[0]
+
+ dy = torch.rand([1, 1] + size) * 2 - 1
+ if self.sigma[1] > 0.0:
+ ky = int(8 * self.sigma[1] + 1)
+ # if kernel size is even we have to make it odd
+ if ky % 2 == 0:
+ ky += 1
+ dy = self._call_kernel(F.gaussian_blur, dy, [ky, ky], list(self.sigma))
+ dy = dy * self.alpha[1] / size[1]
+ displacement = torch.concat([dx, dy], 1).permute([0, 2, 3, 1]) # 1 x H x W x 2
+ return dict(displacement=displacement)
+
+ def _transform(self, inpt: Any, params: Dict[str, Any]) -> Any:
+ fill = _get_fill(self._fill, type(inpt))
+ return self._call_kernel(
+ F.elastic,
+ inpt,
+ **params,
+ fill=fill,
+ interpolation=self.interpolation,
+ )
+
+
+class RandomIoUCrop(Transform):
+ """[BETA] Random IoU crop transformation from
+ `"SSD: Single Shot MultiBox Detector" `_.
+
+ .. v2betastatus:: RandomIoUCrop transform
+
+ This transformation requires an image or video data and ``tv_tensors.BoundingBoxes`` in the input.
+
+ .. warning::
+ In order to properly remove the bounding boxes below the IoU threshold, `RandomIoUCrop`
+ must be followed by :class:`~torchvision.transforms.v2.SanitizeBoundingBoxes`, either immediately
+ after or later in the transforms pipeline.
+
+ If the input is a :class:`torch.Tensor` or a ``TVTensor`` (e.g. :class:`~torchvision.tv_tensors.Image`,
+ :class:`~torchvision.tv_tensors.Video`, :class:`~torchvision.tv_tensors.BoundingBoxes` etc.)
+ it can have arbitrary number of leading batch dimensions. For example,
+ the image can have ``[..., C, H, W]`` shape. A bounding box can have ``[..., 4]`` shape.
+
+ Args:
+ min_scale (float, optional): Minimum factors to scale the input size.
+ max_scale (float, optional): Maximum factors to scale the input size.
+ min_aspect_ratio (float, optional): Minimum aspect ratio for the cropped image or video.
+ max_aspect_ratio (float, optional): Maximum aspect ratio for the cropped image or video.
+ sampler_options (list of float, optional): List of minimal IoU (Jaccard) overlap between all the boxes and
+ a cropped image or video. Default, ``None`` which corresponds to ``[0.0, 0.1, 0.3, 0.5, 0.7, 0.9, 1.0]``
+ trials (int, optional): Number of trials to find a crop for a given value of minimal IoU (Jaccard) overlap.
+ Default, 40.
+ """
+
+ def __init__(
+ self,
+ min_scale: float = 0.3,
+ max_scale: float = 1.0,
+ min_aspect_ratio: float = 0.5,
+ max_aspect_ratio: float = 2.0,
+ sampler_options: Optional[List[float]] = None,
+ trials: int = 40,
+ ):
+ super().__init__()
+ # Configuration similar to https://github.com/weiliu89/caffe/blob/ssd/examples/ssd/ssd_coco.py#L89-L174
+ self.min_scale = min_scale
+ self.max_scale = max_scale
+ self.min_aspect_ratio = min_aspect_ratio
+ self.max_aspect_ratio = max_aspect_ratio
+ if sampler_options is None:
+ sampler_options = [0.0, 0.1, 0.3, 0.5, 0.7, 0.9, 1.0]
+ self.options = sampler_options
+ self.trials = trials
+
+ def _check_inputs(self, flat_inputs: List[Any]) -> None:
+ if not (
+ has_all(flat_inputs, tv_tensors.BoundingBoxes)
+ and has_any(flat_inputs, PIL.Image.Image, tv_tensors.Image, is_pure_tensor)
+ ):
+ raise TypeError(
+ f"{type(self).__name__}() requires input sample to contain tensor or PIL images "
+ "and bounding boxes. Sample can also contain masks."
+ )
+
+ def _get_params(self, flat_inputs: List[Any]) -> Dict[str, Any]:
+ orig_h, orig_w = query_size(flat_inputs)
+ bboxes = get_bounding_boxes(flat_inputs)
+
+ while True:
+ # sample an option
+ idx = int(torch.randint(low=0, high=len(self.options), size=(1,)))
+ min_jaccard_overlap = self.options[idx]
+ if min_jaccard_overlap >= 1.0: # a value larger than 1 encodes the leave as-is option
+ return dict()
+
+ for _ in range(self.trials):
+ # check the aspect ratio limitations
+ r = self.min_scale + (self.max_scale - self.min_scale) * torch.rand(2)
+ new_w = int(orig_w * r[0])
+ new_h = int(orig_h * r[1])
+ aspect_ratio = new_w / new_h
+ if not (self.min_aspect_ratio <= aspect_ratio <= self.max_aspect_ratio):
+ continue
+
+ # check for 0 area crops
+ r = torch.rand(2)
+ left = int((orig_w - new_w) * r[0])
+ top = int((orig_h - new_h) * r[1])
+ right = left + new_w
+ bottom = top + new_h
+ if left == right or top == bottom:
+ continue
+
+ # check for any valid boxes with centers within the crop area
+ xyxy_bboxes = F.convert_bounding_box_format(
+ bboxes.as_subclass(torch.Tensor),
+ bboxes.format,
+ tv_tensors.BoundingBoxFormat.XYXY,
+ )
+ cx = 0.5 * (xyxy_bboxes[..., 0] + xyxy_bboxes[..., 2])
+ cy = 0.5 * (xyxy_bboxes[..., 1] + xyxy_bboxes[..., 3])
+ is_within_crop_area = (left < cx) & (cx < right) & (top < cy) & (cy < bottom)
+ if not is_within_crop_area.any():
+ continue
+
+ # check at least 1 box with jaccard limitations
+ xyxy_bboxes = xyxy_bboxes[is_within_crop_area]
+ ious = box_iou(
+ xyxy_bboxes,
+ torch.tensor([[left, top, right, bottom]], dtype=xyxy_bboxes.dtype, device=xyxy_bboxes.device),
+ )
+ if ious.max() < min_jaccard_overlap:
+ continue
+
+ return dict(top=top, left=left, height=new_h, width=new_w, is_within_crop_area=is_within_crop_area)
+
+ def _transform(self, inpt: Any, params: Dict[str, Any]) -> Any:
+
+ if len(params) < 1:
+ return inpt
+
+ output = self._call_kernel(
+ F.crop, inpt, top=params["top"], left=params["left"], height=params["height"], width=params["width"]
+ )
+
+ if isinstance(output, tv_tensors.BoundingBoxes):
+ # We "mark" the invalid boxes as degenreate, and they can be
+ # removed by a later call to SanitizeBoundingBoxes()
+ output[~params["is_within_crop_area"]] = 0
+
+ return output
+
+
+class ScaleJitter(Transform):
+ """[BETA] Perform Large Scale Jitter on the input according to
+ `"Simple Copy-Paste is a Strong Data Augmentation Method for Instance Segmentation" `_.
+
+ .. v2betastatus:: ScaleJitter transform
+
+ If the input is a :class:`torch.Tensor` or a ``TVTensor`` (e.g. :class:`~torchvision.tv_tensors.Image`,
+ :class:`~torchvision.tv_tensors.Video`, :class:`~torchvision.tv_tensors.BoundingBoxes` etc.)
+ it can have arbitrary number of leading batch dimensions. For example,
+ the image can have ``[..., C, H, W]`` shape. A bounding box can have ``[..., 4]`` shape.
+
+ Args:
+ target_size (tuple of int): Target size. This parameter defines base scale for jittering,
+ e.g. ``min(target_size[0] / width, target_size[1] / height)``.
+ scale_range (tuple of float, optional): Minimum and maximum of the scale range. Default, ``(0.1, 2.0)``.
+ interpolation (InterpolationMode, optional): Desired interpolation enum defined by
+ :class:`torchvision.transforms.InterpolationMode`. Default is ``InterpolationMode.BILINEAR``.
+ If input is Tensor, only ``InterpolationMode.NEAREST``, ``InterpolationMode.NEAREST_EXACT``,
+ ``InterpolationMode.BILINEAR`` and ``InterpolationMode.BICUBIC`` are supported.
+ The corresponding Pillow integer constants, e.g. ``PIL.Image.BILINEAR`` are accepted as well.
+ antialias (bool, optional): Whether to apply antialiasing.
+ It only affects **tensors** with bilinear or bicubic modes and it is
+ ignored otherwise: on PIL images, antialiasing is always applied on
+ bilinear or bicubic modes; on other modes (for PIL images and
+ tensors), antialiasing makes no sense and this parameter is ignored.
+ Possible values are:
+
+ - ``True``: will apply antialiasing for bilinear or bicubic modes.
+ Other mode aren't affected. This is probably what you want to use.
+ - ``False``: will not apply antialiasing for tensors on any mode. PIL
+ images are still antialiased on bilinear or bicubic modes, because
+ PIL doesn't support no antialias.
+ - ``None``: equivalent to ``False`` for tensors and ``True`` for
+ PIL images. This value exists for legacy reasons and you probably
+ don't want to use it unless you really know what you are doing.
+
+ The current default is ``None`` **but will change to** ``True`` **in
+ v0.17** for the PIL and Tensor backends to be consistent.
+ """
+
+ def __init__(
+ self,
+ target_size: Tuple[int, int],
+ scale_range: Tuple[float, float] = (0.1, 2.0),
+ interpolation: Union[InterpolationMode, int] = InterpolationMode.BILINEAR,
+ antialias: Optional[Union[str, bool]] = "warn",
+ ):
+ super().__init__()
+ self.target_size = target_size
+ self.scale_range = scale_range
+ self.interpolation = _check_interpolation(interpolation)
+ self.antialias = antialias
+
+ def _get_params(self, flat_inputs: List[Any]) -> Dict[str, Any]:
+ orig_height, orig_width = query_size(flat_inputs)
+
+ scale = self.scale_range[0] + torch.rand(1) * (self.scale_range[1] - self.scale_range[0])
+ r = min(self.target_size[1] / orig_height, self.target_size[0] / orig_width) * scale
+ new_width = int(orig_width * r)
+ new_height = int(orig_height * r)
+
+ return dict(size=(new_height, new_width))
+
+ def _transform(self, inpt: Any, params: Dict[str, Any]) -> Any:
+ return self._call_kernel(
+ F.resize, inpt, size=params["size"], interpolation=self.interpolation, antialias=self.antialias
+ )
+
+
+class RandomShortestSize(Transform):
+ """[BETA] Randomly resize the input.
+
+ .. v2betastatus:: RandomShortestSize transform
+
+ If the input is a :class:`torch.Tensor` or a ``TVTensor`` (e.g. :class:`~torchvision.tv_tensors.Image`,
+ :class:`~torchvision.tv_tensors.Video`, :class:`~torchvision.tv_tensors.BoundingBoxes` etc.)
+ it can have arbitrary number of leading batch dimensions. For example,
+ the image can have ``[..., C, H, W]`` shape. A bounding box can have ``[..., 4]`` shape.
+
+ Args:
+ min_size (int or sequence of int): Minimum spatial size. Single integer value or a sequence of integer values.
+ max_size (int, optional): Maximum spatial size. Default, None.
+ interpolation (InterpolationMode, optional): Desired interpolation enum defined by
+ :class:`torchvision.transforms.InterpolationMode`. Default is ``InterpolationMode.BILINEAR``.
+ If input is Tensor, only ``InterpolationMode.NEAREST``, ``InterpolationMode.NEAREST_EXACT``,
+ ``InterpolationMode.BILINEAR`` and ``InterpolationMode.BICUBIC`` are supported.
+ The corresponding Pillow integer constants, e.g. ``PIL.Image.BILINEAR`` are accepted as well.
+ antialias (bool, optional): Whether to apply antialiasing.
+ It only affects **tensors** with bilinear or bicubic modes and it is
+ ignored otherwise: on PIL images, antialiasing is always applied on
+ bilinear or bicubic modes; on other modes (for PIL images and
+ tensors), antialiasing makes no sense and this parameter is ignored.
+ Possible values are:
+
+ - ``True``: will apply antialiasing for bilinear or bicubic modes.
+ Other mode aren't affected. This is probably what you want to use.
+ - ``False``: will not apply antialiasing for tensors on any mode. PIL
+ images are still antialiased on bilinear or bicubic modes, because
+ PIL doesn't support no antialias.
+ - ``None``: equivalent to ``False`` for tensors and ``True`` for
+ PIL images. This value exists for legacy reasons and you probably
+ don't want to use it unless you really know what you are doing.
+
+ The current default is ``None`` **but will change to** ``True`` **in
+ v0.17** for the PIL and Tensor backends to be consistent.
+ """
+
+ def __init__(
+ self,
+ min_size: Union[List[int], Tuple[int], int],
+ max_size: Optional[int] = None,
+ interpolation: Union[InterpolationMode, int] = InterpolationMode.BILINEAR,
+ antialias: Optional[Union[str, bool]] = "warn",
+ ):
+ super().__init__()
+ self.min_size = [min_size] if isinstance(min_size, int) else list(min_size)
+ self.max_size = max_size
+ self.interpolation = _check_interpolation(interpolation)
+ self.antialias = antialias
+
+ def _get_params(self, flat_inputs: List[Any]) -> Dict[str, Any]:
+ orig_height, orig_width = query_size(flat_inputs)
+
+ min_size = self.min_size[int(torch.randint(len(self.min_size), ()))]
+ r = min_size / min(orig_height, orig_width)
+ if self.max_size is not None:
+ r = min(r, self.max_size / max(orig_height, orig_width))
+
+ new_width = int(orig_width * r)
+ new_height = int(orig_height * r)
+
+ return dict(size=(new_height, new_width))
+
+ def _transform(self, inpt: Any, params: Dict[str, Any]) -> Any:
+ return self._call_kernel(
+ F.resize, inpt, size=params["size"], interpolation=self.interpolation, antialias=self.antialias
+ )
+
+
+class RandomResize(Transform):
+ """[BETA] Randomly resize the input.
+
+ .. v2betastatus:: RandomResize transform
+
+ This transformation can be used together with ``RandomCrop`` as data augmentations to train
+ models on image segmentation task.
+
+ Output spatial size is randomly sampled from the interval ``[min_size, max_size]``:
+
+ .. code-block:: python
+
+ size = uniform_sample(min_size, max_size)
+ output_width = size
+ output_height = size
+
+ If the input is a :class:`torch.Tensor` or a ``TVTensor`` (e.g. :class:`~torchvision.tv_tensors.Image`,
+ :class:`~torchvision.tv_tensors.Video`, :class:`~torchvision.tv_tensors.BoundingBoxes` etc.)
+ it can have arbitrary number of leading batch dimensions. For example,
+ the image can have ``[..., C, H, W]`` shape. A bounding box can have ``[..., 4]`` shape.
+
+ Args:
+ min_size (int): Minimum output size for random sampling
+ max_size (int): Maximum output size for random sampling
+ interpolation (InterpolationMode, optional): Desired interpolation enum defined by
+ :class:`torchvision.transforms.InterpolationMode`. Default is ``InterpolationMode.BILINEAR``.
+ If input is Tensor, only ``InterpolationMode.NEAREST``, ``InterpolationMode.NEAREST_EXACT``,
+ ``InterpolationMode.BILINEAR`` and ``InterpolationMode.BICUBIC`` are supported.
+ The corresponding Pillow integer constants, e.g. ``PIL.Image.BILINEAR`` are accepted as well.
+ antialias (bool, optional): Whether to apply antialiasing.
+ It only affects **tensors** with bilinear or bicubic modes and it is
+ ignored otherwise: on PIL images, antialiasing is always applied on
+ bilinear or bicubic modes; on other modes (for PIL images and
+ tensors), antialiasing makes no sense and this parameter is ignored.
+ Possible values are:
+
+ - ``True``: will apply antialiasing for bilinear or bicubic modes.
+ Other mode aren't affected. This is probably what you want to use.
+ - ``False``: will not apply antialiasing for tensors on any mode. PIL
+ images are still antialiased on bilinear or bicubic modes, because
+ PIL doesn't support no antialias.
+ - ``None``: equivalent to ``False`` for tensors and ``True`` for
+ PIL images. This value exists for legacy reasons and you probably
+ don't want to use it unless you really know what you are doing.
+
+ The current default is ``None`` **but will change to** ``True`` **in
+ v0.17** for the PIL and Tensor backends to be consistent.
+ """
+
+ def __init__(
+ self,
+ min_size: int,
+ max_size: int,
+ interpolation: Union[InterpolationMode, int] = InterpolationMode.BILINEAR,
+ antialias: Optional[Union[str, bool]] = "warn",
+ ) -> None:
+ super().__init__()
+ self.min_size = min_size
+ self.max_size = max_size
+ self.interpolation = _check_interpolation(interpolation)
+ self.antialias = antialias
+
+ def _get_params(self, flat_inputs: List[Any]) -> Dict[str, Any]:
+ size = int(torch.randint(self.min_size, self.max_size, ()))
+ return dict(size=[size])
+
+ def _transform(self, inpt: Any, params: Dict[str, Any]) -> Any:
+ return self._call_kernel(
+ F.resize, inpt, params["size"], interpolation=self.interpolation, antialias=self.antialias
+ )
diff --git a/llava_next/lib/python3.10/site-packages/torchvision/transforms/v2/_meta.py b/llava_next/lib/python3.10/site-packages/torchvision/transforms/v2/_meta.py
new file mode 100644
index 0000000000000000000000000000000000000000..9fa31ebef94f61eead6064ad065e141e3db23a6e
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/torchvision/transforms/v2/_meta.py
@@ -0,0 +1,42 @@
+from typing import Any, Dict, Union
+
+from torchvision import tv_tensors
+from torchvision.transforms.v2 import functional as F, Transform
+
+
+class ConvertBoundingBoxFormat(Transform):
+ """[BETA] Convert bounding box coordinates to the given ``format``, eg from "CXCYWH" to "XYXY".
+
+ .. v2betastatus:: ConvertBoundingBoxFormat transform
+
+ Args:
+ format (str or tv_tensors.BoundingBoxFormat): output bounding box format.
+ Possible values are defined by :class:`~torchvision.tv_tensors.BoundingBoxFormat` and
+ string values match the enums, e.g. "XYXY" or "XYWH" etc.
+ """
+
+ _transformed_types = (tv_tensors.BoundingBoxes,)
+
+ def __init__(self, format: Union[str, tv_tensors.BoundingBoxFormat]) -> None:
+ super().__init__()
+ if isinstance(format, str):
+ format = tv_tensors.BoundingBoxFormat[format]
+ self.format = format
+
+ def _transform(self, inpt: tv_tensors.BoundingBoxes, params: Dict[str, Any]) -> tv_tensors.BoundingBoxes:
+ return F.convert_bounding_box_format(inpt, new_format=self.format) # type: ignore[return-value]
+
+
+class ClampBoundingBoxes(Transform):
+ """[BETA] Clamp bounding boxes to their corresponding image dimensions.
+
+ The clamping is done according to the bounding boxes' ``canvas_size`` meta-data.
+
+ .. v2betastatus:: ClampBoundingBoxes transform
+
+ """
+
+ _transformed_types = (tv_tensors.BoundingBoxes,)
+
+ def _transform(self, inpt: tv_tensors.BoundingBoxes, params: Dict[str, Any]) -> tv_tensors.BoundingBoxes:
+ return F.clamp_bounding_boxes(inpt) # type: ignore[return-value]
diff --git a/llava_next/lib/python3.10/site-packages/torchvision/transforms/v2/_utils.py b/llava_next/lib/python3.10/site-packages/torchvision/transforms/v2/_utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..6147180a986f0b2272b1f4f24654b6cbe46edff1
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/torchvision/transforms/v2/_utils.py
@@ -0,0 +1,222 @@
+from __future__ import annotations
+
+import collections.abc
+import numbers
+from contextlib import suppress
+
+from typing import Any, Callable, Dict, List, Literal, Optional, Sequence, Tuple, Type, Union
+
+import PIL.Image
+import torch
+
+from torchvision import tv_tensors
+
+from torchvision._utils import sequence_to_str
+
+from torchvision.transforms.transforms import _check_sequence_input, _setup_angle, _setup_size # noqa: F401
+from torchvision.transforms.v2.functional import get_dimensions, get_size, is_pure_tensor
+from torchvision.transforms.v2.functional._utils import _FillType, _FillTypeJIT
+
+
+def _setup_number_or_seq(arg: Union[int, float, Sequence[Union[int, float]]], name: str) -> Sequence[float]:
+ if not isinstance(arg, (int, float, Sequence)):
+ raise TypeError(f"{name} should be a number or a sequence of numbers. Got {type(arg)}")
+ if isinstance(arg, Sequence) and len(arg) not in (1, 2):
+ raise ValueError(f"If {name} is a sequence its length should be 1 or 2. Got {len(arg)}")
+ if isinstance(arg, Sequence):
+ for element in arg:
+ if not isinstance(element, (int, float)):
+ raise ValueError(f"{name} should be a sequence of numbers. Got {type(element)}")
+
+ if isinstance(arg, (int, float)):
+ arg = [float(arg), float(arg)]
+ elif isinstance(arg, Sequence):
+ if len(arg) == 1:
+ arg = [float(arg[0]), float(arg[0])]
+ else:
+ arg = [float(arg[0]), float(arg[1])]
+ return arg
+
+
+def _check_fill_arg(fill: Union[_FillType, Dict[Union[Type, str], _FillType]]) -> None:
+ if isinstance(fill, dict):
+ for value in fill.values():
+ _check_fill_arg(value)
+ else:
+ if fill is not None and not isinstance(fill, (numbers.Number, tuple, list)):
+ raise TypeError("Got inappropriate fill arg, only Numbers, tuples, lists and dicts are allowed.")
+
+
+def _convert_fill_arg(fill: _FillType) -> _FillTypeJIT:
+ # Fill = 0 is not equivalent to None, https://github.com/pytorch/vision/issues/6517
+ # So, we can't reassign fill to 0
+ # if fill is None:
+ # fill = 0
+ if fill is None:
+ return fill
+
+ if not isinstance(fill, (int, float)):
+ fill = [float(v) for v in list(fill)]
+ return fill # type: ignore[return-value]
+
+
+def _setup_fill_arg(fill: Union[_FillType, Dict[Union[Type, str], _FillType]]) -> Dict[Union[Type, str], _FillTypeJIT]:
+ _check_fill_arg(fill)
+
+ if isinstance(fill, dict):
+ for k, v in fill.items():
+ fill[k] = _convert_fill_arg(v)
+ return fill # type: ignore[return-value]
+ else:
+ return {"others": _convert_fill_arg(fill)}
+
+
+def _get_fill(fill_dict, inpt_type):
+ if inpt_type in fill_dict:
+ return fill_dict[inpt_type]
+ elif "others" in fill_dict:
+ return fill_dict["others"]
+ else:
+ RuntimeError("This should never happen, please open an issue on the torchvision repo if you hit this.")
+
+
+def _check_padding_arg(padding: Union[int, Sequence[int]]) -> None:
+ if not isinstance(padding, (numbers.Number, tuple, list)):
+ raise TypeError("Got inappropriate padding arg")
+
+ if isinstance(padding, (tuple, list)) and len(padding) not in [1, 2, 4]:
+ raise ValueError(f"Padding must be an int or a 1, 2, or 4 element tuple, not a {len(padding)} element tuple")
+
+
+# TODO: let's use torchvision._utils.StrEnum to have the best of both worlds (strings and enums)
+# https://github.com/pytorch/vision/issues/6250
+def _check_padding_mode_arg(padding_mode: Literal["constant", "edge", "reflect", "symmetric"]) -> None:
+ if padding_mode not in ["constant", "edge", "reflect", "symmetric"]:
+ raise ValueError("Padding mode should be either constant, edge, reflect or symmetric")
+
+
+def _find_labels_default_heuristic(inputs: Any) -> torch.Tensor:
+ """
+ This heuristic covers three cases:
+
+ 1. The input is tuple or list whose second item is a labels tensor. This happens for already batched
+ classification inputs for MixUp and CutMix (typically after the Dataloder).
+ 2. The input is a tuple or list whose second item is a dictionary that contains the labels tensor
+ under a label-like (see below) key. This happens for the inputs of detection models.
+ 3. The input is a dictionary that is structured as the one from 2.
+
+ What is "label-like" key? We first search for an case-insensitive match of 'labels' inside the keys of the
+ dictionary. This is the name our detection models expect. If we can't find that, we look for a case-insensitive
+ match of the term 'label' anywhere inside the key, i.e. 'FooLaBeLBar'. If we can't find that either, the dictionary
+ contains no "label-like" key.
+ """
+
+ if isinstance(inputs, (tuple, list)):
+ inputs = inputs[1]
+
+ # MixUp, CutMix
+ if is_pure_tensor(inputs):
+ return inputs
+
+ if not isinstance(inputs, collections.abc.Mapping):
+ raise ValueError(
+ f"When using the default labels_getter, the input passed to forward must be a dictionary or a two-tuple "
+ f"whose second item is a dictionary or a tensor, but got {inputs} instead."
+ )
+
+ candidate_key = None
+ with suppress(StopIteration):
+ candidate_key = next(key for key in inputs.keys() if key.lower() == "labels")
+ if candidate_key is None:
+ with suppress(StopIteration):
+ candidate_key = next(key for key in inputs.keys() if "label" in key.lower())
+ if candidate_key is None:
+ raise ValueError(
+ "Could not infer where the labels are in the sample. Try passing a callable as the labels_getter parameter?"
+ "If there are no labels in the sample by design, pass labels_getter=None."
+ )
+
+ return inputs[candidate_key]
+
+
+def _parse_labels_getter(
+ labels_getter: Union[str, Callable[[Any], Optional[torch.Tensor]], None]
+) -> Callable[[Any], Optional[torch.Tensor]]:
+ if labels_getter == "default":
+ return _find_labels_default_heuristic
+ elif callable(labels_getter):
+ return labels_getter
+ elif labels_getter is None:
+ return lambda _: None
+ else:
+ raise ValueError(f"labels_getter should either be 'default', a callable, or None, but got {labels_getter}.")
+
+
+def get_bounding_boxes(flat_inputs: List[Any]) -> tv_tensors.BoundingBoxes:
+ # This assumes there is only one bbox per sample as per the general convention
+ try:
+ return next(inpt for inpt in flat_inputs if isinstance(inpt, tv_tensors.BoundingBoxes))
+ except StopIteration:
+ raise ValueError("No bounding boxes were found in the sample")
+
+
+def query_chw(flat_inputs: List[Any]) -> Tuple[int, int, int]:
+ chws = {
+ tuple(get_dimensions(inpt))
+ for inpt in flat_inputs
+ if check_type(inpt, (is_pure_tensor, tv_tensors.Image, PIL.Image.Image, tv_tensors.Video))
+ }
+ if not chws:
+ raise TypeError("No image or video was found in the sample")
+ elif len(chws) > 1:
+ raise ValueError(f"Found multiple CxHxW dimensions in the sample: {sequence_to_str(sorted(chws))}")
+ c, h, w = chws.pop()
+ return c, h, w
+
+
+def query_size(flat_inputs: List[Any]) -> Tuple[int, int]:
+ sizes = {
+ tuple(get_size(inpt))
+ for inpt in flat_inputs
+ if check_type(
+ inpt,
+ (
+ is_pure_tensor,
+ tv_tensors.Image,
+ PIL.Image.Image,
+ tv_tensors.Video,
+ tv_tensors.Mask,
+ tv_tensors.BoundingBoxes,
+ ),
+ )
+ }
+ if not sizes:
+ raise TypeError("No image, video, mask or bounding box was found in the sample")
+ elif len(sizes) > 1:
+ raise ValueError(f"Found multiple HxW dimensions in the sample: {sequence_to_str(sorted(sizes))}")
+ h, w = sizes.pop()
+ return h, w
+
+
+def check_type(obj: Any, types_or_checks: Tuple[Union[Type, Callable[[Any], bool]], ...]) -> bool:
+ for type_or_check in types_or_checks:
+ if isinstance(obj, type_or_check) if isinstance(type_or_check, type) else type_or_check(obj):
+ return True
+ return False
+
+
+def has_any(flat_inputs: List[Any], *types_or_checks: Union[Type, Callable[[Any], bool]]) -> bool:
+ for inpt in flat_inputs:
+ if check_type(inpt, types_or_checks):
+ return True
+ return False
+
+
+def has_all(flat_inputs: List[Any], *types_or_checks: Union[Type, Callable[[Any], bool]]) -> bool:
+ for type_or_check in types_or_checks:
+ for inpt in flat_inputs:
+ if isinstance(inpt, type_or_check) if isinstance(type_or_check, type) else type_or_check(inpt):
+ break
+ else:
+ return False
+ return True
diff --git a/llava_next/lib/python3.10/site-packages/torchvision/transforms/v2/functional/_misc.py b/llava_next/lib/python3.10/site-packages/torchvision/transforms/v2/functional/_misc.py
new file mode 100644
index 0000000000000000000000000000000000000000..2a6468bb46a7c3b75f9c0d301fc419f7a0c5a55a
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/torchvision/transforms/v2/functional/_misc.py
@@ -0,0 +1,277 @@
+import math
+from typing import List, Optional
+
+import PIL.Image
+import torch
+from torch.nn.functional import conv2d, pad as torch_pad
+
+from torchvision import tv_tensors
+from torchvision.transforms._functional_tensor import _max_value
+from torchvision.transforms.functional import pil_to_tensor, to_pil_image
+
+from torchvision.utils import _log_api_usage_once
+
+from ._utils import _get_kernel, _register_kernel_internal
+
+
+def normalize(
+ inpt: torch.Tensor,
+ mean: List[float],
+ std: List[float],
+ inplace: bool = False,
+) -> torch.Tensor:
+ """[BETA] See :class:`~torchvision.transforms.v2.Normalize` for details."""
+ if torch.jit.is_scripting():
+ return normalize_image(inpt, mean=mean, std=std, inplace=inplace)
+
+ _log_api_usage_once(normalize)
+
+ kernel = _get_kernel(normalize, type(inpt))
+ return kernel(inpt, mean=mean, std=std, inplace=inplace)
+
+
+@_register_kernel_internal(normalize, torch.Tensor)
+@_register_kernel_internal(normalize, tv_tensors.Image)
+def normalize_image(image: torch.Tensor, mean: List[float], std: List[float], inplace: bool = False) -> torch.Tensor:
+ if not image.is_floating_point():
+ raise TypeError(f"Input tensor should be a float tensor. Got {image.dtype}.")
+
+ if image.ndim < 3:
+ raise ValueError(f"Expected tensor to be a tensor image of size (..., C, H, W). Got {image.shape}.")
+
+ if isinstance(std, (tuple, list)):
+ divzero = not all(std)
+ elif isinstance(std, (int, float)):
+ divzero = std == 0
+ else:
+ divzero = False
+ if divzero:
+ raise ValueError("std evaluated to zero, leading to division by zero.")
+
+ dtype = image.dtype
+ device = image.device
+ mean = torch.as_tensor(mean, dtype=dtype, device=device)
+ std = torch.as_tensor(std, dtype=dtype, device=device)
+ if mean.ndim == 1:
+ mean = mean.view(-1, 1, 1)
+ if std.ndim == 1:
+ std = std.view(-1, 1, 1)
+
+ if inplace:
+ image = image.sub_(mean)
+ else:
+ image = image.sub(mean)
+
+ return image.div_(std)
+
+
+@_register_kernel_internal(normalize, tv_tensors.Video)
+def normalize_video(video: torch.Tensor, mean: List[float], std: List[float], inplace: bool = False) -> torch.Tensor:
+ return normalize_image(video, mean, std, inplace=inplace)
+
+
+def gaussian_blur(inpt: torch.Tensor, kernel_size: List[int], sigma: Optional[List[float]] = None) -> torch.Tensor:
+ """[BETA] See :class:`~torchvision.transforms.v2.GaussianBlur` for details."""
+ if torch.jit.is_scripting():
+ return gaussian_blur_image(inpt, kernel_size=kernel_size, sigma=sigma)
+
+ _log_api_usage_once(gaussian_blur)
+
+ kernel = _get_kernel(gaussian_blur, type(inpt))
+ return kernel(inpt, kernel_size=kernel_size, sigma=sigma)
+
+
+def _get_gaussian_kernel1d(kernel_size: int, sigma: float, dtype: torch.dtype, device: torch.device) -> torch.Tensor:
+ lim = (kernel_size - 1) / (2.0 * math.sqrt(2.0) * sigma)
+ x = torch.linspace(-lim, lim, steps=kernel_size, dtype=dtype, device=device)
+ kernel1d = torch.softmax(x.pow_(2).neg_(), dim=0)
+ return kernel1d
+
+
+def _get_gaussian_kernel2d(
+ kernel_size: List[int], sigma: List[float], dtype: torch.dtype, device: torch.device
+) -> torch.Tensor:
+ kernel1d_x = _get_gaussian_kernel1d(kernel_size[0], sigma[0], dtype, device)
+ kernel1d_y = _get_gaussian_kernel1d(kernel_size[1], sigma[1], dtype, device)
+ kernel2d = kernel1d_y.unsqueeze(-1) * kernel1d_x
+ return kernel2d
+
+
+@_register_kernel_internal(gaussian_blur, torch.Tensor)
+@_register_kernel_internal(gaussian_blur, tv_tensors.Image)
+def gaussian_blur_image(
+ image: torch.Tensor, kernel_size: List[int], sigma: Optional[List[float]] = None
+) -> torch.Tensor:
+ # TODO: consider deprecating integers from sigma on the future
+ if isinstance(kernel_size, int):
+ kernel_size = [kernel_size, kernel_size]
+ elif len(kernel_size) != 2:
+ raise ValueError(f"If kernel_size is a sequence its length should be 2. Got {len(kernel_size)}")
+ for ksize in kernel_size:
+ if ksize % 2 == 0 or ksize < 0:
+ raise ValueError(f"kernel_size should have odd and positive integers. Got {kernel_size}")
+
+ if sigma is None:
+ sigma = [ksize * 0.15 + 0.35 for ksize in kernel_size]
+ else:
+ if isinstance(sigma, (list, tuple)):
+ length = len(sigma)
+ if length == 1:
+ s = float(sigma[0])
+ sigma = [s, s]
+ elif length != 2:
+ raise ValueError(f"If sigma is a sequence, its length should be 2. Got {length}")
+ elif isinstance(sigma, (int, float)):
+ s = float(sigma)
+ sigma = [s, s]
+ else:
+ raise TypeError(f"sigma should be either float or sequence of floats. Got {type(sigma)}")
+ for s in sigma:
+ if s <= 0.0:
+ raise ValueError(f"sigma should have positive values. Got {sigma}")
+
+ if image.numel() == 0:
+ return image
+
+ dtype = image.dtype
+ shape = image.shape
+ ndim = image.ndim
+ if ndim == 3:
+ image = image.unsqueeze(dim=0)
+ elif ndim > 4:
+ image = image.reshape((-1,) + shape[-3:])
+
+ fp = torch.is_floating_point(image)
+ kernel = _get_gaussian_kernel2d(kernel_size, sigma, dtype=dtype if fp else torch.float32, device=image.device)
+ kernel = kernel.expand(shape[-3], 1, kernel.shape[0], kernel.shape[1])
+
+ output = image if fp else image.to(dtype=torch.float32)
+
+ # padding = (left, right, top, bottom)
+ padding = [kernel_size[0] // 2, kernel_size[0] // 2, kernel_size[1] // 2, kernel_size[1] // 2]
+ output = torch_pad(output, padding, mode="reflect")
+ output = conv2d(output, kernel, groups=shape[-3])
+
+ if ndim == 3:
+ output = output.squeeze(dim=0)
+ elif ndim > 4:
+ output = output.reshape(shape)
+
+ if not fp:
+ output = output.round_().to(dtype=dtype)
+
+ return output
+
+
+@_register_kernel_internal(gaussian_blur, PIL.Image.Image)
+def _gaussian_blur_image_pil(
+ image: PIL.Image.Image, kernel_size: List[int], sigma: Optional[List[float]] = None
+) -> PIL.Image.Image:
+ t_img = pil_to_tensor(image)
+ output = gaussian_blur_image(t_img, kernel_size=kernel_size, sigma=sigma)
+ return to_pil_image(output, mode=image.mode)
+
+
+@_register_kernel_internal(gaussian_blur, tv_tensors.Video)
+def gaussian_blur_video(
+ video: torch.Tensor, kernel_size: List[int], sigma: Optional[List[float]] = None
+) -> torch.Tensor:
+ return gaussian_blur_image(video, kernel_size, sigma)
+
+
+def to_dtype(inpt: torch.Tensor, dtype: torch.dtype = torch.float, scale: bool = False) -> torch.Tensor:
+ """[BETA] See :func:`~torchvision.transforms.v2.ToDtype` for details."""
+ if torch.jit.is_scripting():
+ return to_dtype_image(inpt, dtype=dtype, scale=scale)
+
+ _log_api_usage_once(to_dtype)
+
+ kernel = _get_kernel(to_dtype, type(inpt))
+ return kernel(inpt, dtype=dtype, scale=scale)
+
+
+def _num_value_bits(dtype: torch.dtype) -> int:
+ if dtype == torch.uint8:
+ return 8
+ elif dtype == torch.int8:
+ return 7
+ elif dtype == torch.int16:
+ return 15
+ elif dtype == torch.int32:
+ return 31
+ elif dtype == torch.int64:
+ return 63
+ else:
+ raise TypeError(f"Number of value bits is only defined for integer dtypes, but got {dtype}.")
+
+
+@_register_kernel_internal(to_dtype, torch.Tensor)
+@_register_kernel_internal(to_dtype, tv_tensors.Image)
+def to_dtype_image(image: torch.Tensor, dtype: torch.dtype = torch.float, scale: bool = False) -> torch.Tensor:
+
+ if image.dtype == dtype:
+ return image
+ elif not scale:
+ return image.to(dtype)
+
+ float_input = image.is_floating_point()
+ if torch.jit.is_scripting():
+ # TODO: remove this branch as soon as `dtype.is_floating_point` is supported by JIT
+ float_output = torch.tensor(0, dtype=dtype).is_floating_point()
+ else:
+ float_output = dtype.is_floating_point
+
+ if float_input:
+ # float to float
+ if float_output:
+ return image.to(dtype)
+
+ # float to int
+ if (image.dtype == torch.float32 and dtype in (torch.int32, torch.int64)) or (
+ image.dtype == torch.float64 and dtype == torch.int64
+ ):
+ raise RuntimeError(f"The conversion from {image.dtype} to {dtype} cannot be performed safely.")
+
+ # For data in the range `[0.0, 1.0]`, just multiplying by the maximum value of the integer range and converting
+ # to the integer dtype is not sufficient. For example, `torch.rand(...).mul(255).to(torch.uint8)` will only
+ # be `255` if the input is exactly `1.0`. See https://github.com/pytorch/vision/pull/2078#issuecomment-612045321
+ # for a detailed analysis.
+ # To mitigate this, we could round before we convert to the integer dtype, but this is an extra operation.
+ # Instead, we can also multiply by the maximum value plus something close to `1`. See
+ # https://github.com/pytorch/vision/pull/2078#issuecomment-613524965 for details.
+ eps = 1e-3
+ max_value = float(_max_value(dtype))
+ # We need to scale first since the conversion would otherwise turn the input range `[0.0, 1.0]` into the
+ # discrete set `{0, 1}`.
+ return image.mul(max_value + 1.0 - eps).to(dtype)
+ else:
+ # int to float
+ if float_output:
+ return image.to(dtype).mul_(1.0 / _max_value(image.dtype))
+
+ # int to int
+ num_value_bits_input = _num_value_bits(image.dtype)
+ num_value_bits_output = _num_value_bits(dtype)
+
+ if num_value_bits_input > num_value_bits_output:
+ return image.bitwise_right_shift(num_value_bits_input - num_value_bits_output).to(dtype)
+ else:
+ return image.to(dtype).bitwise_left_shift_(num_value_bits_output - num_value_bits_input)
+
+
+# We encourage users to use to_dtype() instead but we keep this for BC
+def convert_image_dtype(image: torch.Tensor, dtype: torch.dtype = torch.float32) -> torch.Tensor:
+ """[BETA] [DEPRECATED] Use to_dtype() instead."""
+ return to_dtype_image(image, dtype=dtype, scale=True)
+
+
+@_register_kernel_internal(to_dtype, tv_tensors.Video)
+def to_dtype_video(video: torch.Tensor, dtype: torch.dtype = torch.float, scale: bool = False) -> torch.Tensor:
+ return to_dtype_image(video, dtype, scale=scale)
+
+
+@_register_kernel_internal(to_dtype, tv_tensors.BoundingBoxes, tv_tensor_wrapper=False)
+@_register_kernel_internal(to_dtype, tv_tensors.Mask, tv_tensor_wrapper=False)
+def _to_dtype_tensor_dispatch(inpt: torch.Tensor, dtype: torch.dtype, scale: bool = False) -> torch.Tensor:
+ # We don't need to unwrap and rewrap here, since TVTensor.to() preserves the type
+ return inpt.to(dtype)
diff --git a/llava_next/lib/python3.10/site-packages/torchvision/transforms/v2/functional/_type_conversion.py b/llava_next/lib/python3.10/site-packages/torchvision/transforms/v2/functional/_type_conversion.py
new file mode 100644
index 0000000000000000000000000000000000000000..062f85198eeee6af73eebedc1ac34916c4c1623f
--- /dev/null
+++ b/llava_next/lib/python3.10/site-packages/torchvision/transforms/v2/functional/_type_conversion.py
@@ -0,0 +1,25 @@
+from typing import Union
+
+import numpy as np
+import PIL.Image
+import torch
+from torchvision import tv_tensors
+from torchvision.transforms import functional as _F
+
+
+@torch.jit.unused
+def to_image(inpt: Union[torch.Tensor, PIL.Image.Image, np.ndarray]) -> tv_tensors.Image:
+ """[BETA] See :class:`~torchvision.transforms.v2.ToImage` for details."""
+ if isinstance(inpt, np.ndarray):
+ output = torch.from_numpy(inpt).permute((2, 0, 1)).contiguous()
+ elif isinstance(inpt, PIL.Image.Image):
+ output = pil_to_tensor(inpt)
+ elif isinstance(inpt, torch.Tensor):
+ output = inpt
+ else:
+ raise TypeError(f"Input can either be a numpy array or a PIL image, but got {type(inpt)} instead.")
+ return tv_tensors.Image(output)
+
+
+to_pil_image = _F.to_pil_image
+pil_to_tensor = _F.pil_to_tensor
diff --git a/vlmpy310/lib/python3.10/site-packages/notebook/static/1684.17fc47c7fb30c0a8e713.js b/vlmpy310/lib/python3.10/site-packages/notebook/static/1684.17fc47c7fb30c0a8e713.js
new file mode 100644
index 0000000000000000000000000000000000000000..0509b6f103776512be3d616743401387e404da81
--- /dev/null
+++ b/vlmpy310/lib/python3.10/site-packages/notebook/static/1684.17fc47c7fb30c0a8e713.js
@@ -0,0 +1,99 @@
+"use strict";
+(self["webpackChunk_JUPYTERLAB_CORE_OUTPUT"] = self["webpackChunk_JUPYTERLAB_CORE_OUTPUT"] || []).push([[1684,5601],{
+
+/***/ 95601:
+/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
+
+__webpack_require__.r(__webpack_exports__);
+/* harmony export */ __webpack_require__.d(__webpack_exports__, {
+/* harmony export */ "default": () => (__WEBPACK_DEFAULT_EXPORT__)
+/* harmony export */ });
+/* harmony import */ var _jupyterlab_application__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(38075);
+/* harmony import */ var _jupyterlab_application__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(_jupyterlab_application__WEBPACK_IMPORTED_MODULE_0__);
+/* harmony import */ var _jupyterlab_coreutils__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(76107);
+/* harmony import */ var _jupyterlab_coreutils__WEBPACK_IMPORTED_MODULE_1___default = /*#__PURE__*/__webpack_require__.n(_jupyterlab_coreutils__WEBPACK_IMPORTED_MODULE_1__);
+/* harmony import */ var _jupyterlab_terminal__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(27368);
+/* harmony import */ var _jupyterlab_terminal__WEBPACK_IMPORTED_MODULE_2___default = /*#__PURE__*/__webpack_require__.n(_jupyterlab_terminal__WEBPACK_IMPORTED_MODULE_2__);
+/* harmony import */ var _jupyter_notebook_application__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(87362);
+/* harmony import */ var _jupyter_notebook_application__WEBPACK_IMPORTED_MODULE_3___default = /*#__PURE__*/__webpack_require__.n(_jupyter_notebook_application__WEBPACK_IMPORTED_MODULE_3__);
+/* harmony import */ var _lumino_algorithm__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(14931);
+/* harmony import */ var _lumino_algorithm__WEBPACK_IMPORTED_MODULE_4___default = /*#__PURE__*/__webpack_require__.n(_lumino_algorithm__WEBPACK_IMPORTED_MODULE_4__);
+// Copyright (c) Jupyter Development Team.
+// Distributed under the terms of the Modified BSD License.
+
+
+
+
+
+/**
+ * A plugin to open terminals in a new tab
+ */
+const opener = {
+ id: '@jupyter-notebook/terminal-extension:opener',
+ description: 'A plugin to open terminals in a new tab.',
+ requires: [_jupyterlab_application__WEBPACK_IMPORTED_MODULE_0__.IRouter, _jupyterlab_terminal__WEBPACK_IMPORTED_MODULE_2__.ITerminalTracker],
+ autoStart: true,
+ activate: (app, router, tracker) => {
+ const { commands } = app;
+ const terminalPattern = new RegExp('/terminals/(.*)');
+ const command = 'router:terminal';
+ commands.addCommand(command, {
+ execute: (args) => {
+ const parsed = args;
+ const matches = parsed.path.match(terminalPattern);
+ if (!matches) {
+ return;
+ }
+ const [, name] = matches;
+ if (!name) {
+ return;
+ }
+ tracker.widgetAdded.connect((send, terminal) => {
+ terminal.content.setOption('closeOnExit', false);
+ });
+ commands.execute('terminal:open', { name });
+ },
+ });
+ router.register({ command, pattern: terminalPattern });
+ },
+};
+/**
+ * Open terminals in a new tab.
+ */
+const redirect = {
+ id: '@jupyter-notebook/terminal-extension:redirect',
+ description: 'Open terminals in a new tab.',
+ requires: [_jupyterlab_terminal__WEBPACK_IMPORTED_MODULE_2__.ITerminalTracker],
+ optional: [_jupyter_notebook_application__WEBPACK_IMPORTED_MODULE_3__.INotebookPathOpener],
+ autoStart: true,
+ activate: (app, tracker, notebookPathOpener) => {
+ const baseUrl = _jupyterlab_coreutils__WEBPACK_IMPORTED_MODULE_1__.PageConfig.getBaseUrl();
+ const opener = notebookPathOpener !== null && notebookPathOpener !== void 0 ? notebookPathOpener : _jupyter_notebook_application__WEBPACK_IMPORTED_MODULE_3__.defaultNotebookPathOpener;
+ tracker.widgetAdded.connect((send, terminal) => {
+ const widget = (0,_lumino_algorithm__WEBPACK_IMPORTED_MODULE_4__.find)(app.shell.widgets('main'), (w) => w.id === terminal.id);
+ if (widget) {
+ // bail if the terminal is already added to the main area
+ return;
+ }
+ const name = terminal.content.session.name;
+ opener.open({
+ prefix: _jupyterlab_coreutils__WEBPACK_IMPORTED_MODULE_1__.URLExt.join(baseUrl, 'terminals'),
+ path: name,
+ target: '_blank',
+ });
+ // dispose the widget since it is not used on this page
+ terminal.dispose();
+ });
+ },
+};
+/**
+ * Export the plugins as default.
+ */
+const plugins = [opener, redirect];
+/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = (plugins);
+
+
+/***/ })
+
+}]);
+//# sourceMappingURL=1684.17fc47c7fb30c0a8e713.js.map?v=17fc47c7fb30c0a8e713
\ No newline at end of file
diff --git a/vlmpy310/lib/python3.10/site-packages/notebook/static/2324.4c423682e2c93316a122.js b/vlmpy310/lib/python3.10/site-packages/notebook/static/2324.4c423682e2c93316a122.js
new file mode 100644
index 0000000000000000000000000000000000000000..2030dc11712209269cef5dc57a1074e421c3be05
--- /dev/null
+++ b/vlmpy310/lib/python3.10/site-packages/notebook/static/2324.4c423682e2c93316a122.js
@@ -0,0 +1,7542 @@
+(self["webpackChunk_JUPYTERLAB_CORE_OUTPUT"] = self["webpackChunk_JUPYTERLAB_CORE_OUTPUT"] || []).push([[2324],{
+
+/***/ 26746:
+/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
+
+(function(f){if(true){module.exports=f()}else { var g; }})(function(){var define,module,exports;return (function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=undefined;if(!f&&c)return require(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=undefined,i=0;i 0 && arguments[0] !== undefined ? arguments[0] : {},
+ _ref$defaultLayoutOpt = _ref.defaultLayoutOptions,
+ defaultLayoutOptions = _ref$defaultLayoutOpt === undefined ? {} : _ref$defaultLayoutOpt,
+ _ref$algorithms = _ref.algorithms,
+ algorithms = _ref$algorithms === undefined ? ['layered', 'stress', 'mrtree', 'radial', 'force', 'disco', 'sporeOverlap', 'sporeCompaction', 'rectpacking'] : _ref$algorithms,
+ workerFactory = _ref.workerFactory,
+ workerUrl = _ref.workerUrl;
+
+ _classCallCheck(this, ELK);
+
+ this.defaultLayoutOptions = defaultLayoutOptions;
+ this.initialized = false;
+
+ // check valid worker construction possible
+ if (typeof workerUrl === 'undefined' && typeof workerFactory === 'undefined') {
+ throw new Error("Cannot construct an ELK without both 'workerUrl' and 'workerFactory'.");
+ }
+ var factory = workerFactory;
+ if (typeof workerUrl !== 'undefined' && typeof workerFactory === 'undefined') {
+ // use default Web Worker
+ factory = function factory(url) {
+ return new Worker(url);
+ };
+ }
+
+ // create the worker
+ var worker = factory(workerUrl);
+ if (typeof worker.postMessage !== 'function') {
+ throw new TypeError("Created worker does not provide" + " the required 'postMessage' function.");
+ }
+
+ // wrap the worker to return promises
+ this.worker = new PromisedWorker(worker);
+
+ // initially register algorithms
+ this.worker.postMessage({
+ cmd: 'register',
+ algorithms: algorithms
+ }).then(function (r) {
+ return _this.initialized = true;
+ }).catch(console.err);
+ }
+
+ _createClass(ELK, [{
+ key: 'layout',
+ value: function layout(graph) {
+ var _ref2 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
+ _ref2$layoutOptions = _ref2.layoutOptions,
+ layoutOptions = _ref2$layoutOptions === undefined ? this.defaultLayoutOptions : _ref2$layoutOptions,
+ _ref2$logging = _ref2.logging,
+ logging = _ref2$logging === undefined ? false : _ref2$logging,
+ _ref2$measureExecutio = _ref2.measureExecutionTime,
+ measureExecutionTime = _ref2$measureExecutio === undefined ? false : _ref2$measureExecutio;
+
+ if (!graph) {
+ return Promise.reject(new Error("Missing mandatory parameter 'graph'."));
+ }
+ return this.worker.postMessage({
+ cmd: 'layout',
+ graph: graph,
+ layoutOptions: layoutOptions,
+ options: {
+ logging: logging,
+ measureExecutionTime: measureExecutionTime
+ }
+ });
+ }
+ }, {
+ key: 'knownLayoutAlgorithms',
+ value: function knownLayoutAlgorithms() {
+ return this.worker.postMessage({ cmd: 'algorithms' });
+ }
+ }, {
+ key: 'knownLayoutOptions',
+ value: function knownLayoutOptions() {
+ return this.worker.postMessage({ cmd: 'options' });
+ }
+ }, {
+ key: 'knownLayoutCategories',
+ value: function knownLayoutCategories() {
+ return this.worker.postMessage({ cmd: 'categories' });
+ }
+ }, {
+ key: 'terminateWorker',
+ value: function terminateWorker() {
+ this.worker.terminate();
+ }
+ }]);
+
+ return ELK;
+}();
+
+exports.default = ELK;
+
+var PromisedWorker = function () {
+ function PromisedWorker(worker) {
+ var _this2 = this;
+
+ _classCallCheck(this, PromisedWorker);
+
+ if (worker === undefined) {
+ throw new Error("Missing mandatory parameter 'worker'.");
+ }
+ this.resolvers = {};
+ this.worker = worker;
+ this.worker.onmessage = function (answer) {
+ // why is this necessary?
+ setTimeout(function () {
+ _this2.receive(_this2, answer);
+ }, 0);
+ };
+ }
+
+ _createClass(PromisedWorker, [{
+ key: 'postMessage',
+ value: function postMessage(msg) {
+ var id = this.id || 0;
+ this.id = id + 1;
+ msg.id = id;
+ var self = this;
+ return new Promise(function (resolve, reject) {
+ // prepare the resolver
+ self.resolvers[id] = function (err, res) {
+ if (err) {
+ self.convertGwtStyleError(err);
+ reject(err);
+ } else {
+ resolve(res);
+ }
+ };
+ // post the message
+ self.worker.postMessage(msg);
+ });
+ }
+ }, {
+ key: 'receive',
+ value: function receive(self, answer) {
+ var json = answer.data;
+ var resolver = self.resolvers[json.id];
+ if (resolver) {
+ delete self.resolvers[json.id];
+ if (json.error) {
+ resolver(json.error);
+ } else {
+ resolver(null, json.data);
+ }
+ }
+ }
+ }, {
+ key: 'terminate',
+ value: function terminate() {
+ if (this.worker.terminate) {
+ this.worker.terminate();
+ }
+ }
+ }, {
+ key: 'convertGwtStyleError',
+ value: function convertGwtStyleError(err) {
+ if (!err) {
+ return;
+ }
+ // Somewhat flatten the way GWT stores nested exception(s)
+ var javaException = err['__java$exception'];
+ if (javaException) {
+ // Note that the property name of the nested exception is different
+ // in the non-minified ('cause') and the minified (not deterministic) version.
+ // Hence, the version below only works for the non-minified version.
+ // However, as the minified stack trace is not of much use anyway, one
+ // should switch the used version for debugging in such a case.
+ if (javaException.cause && javaException.cause.backingJsObject) {
+ err.cause = javaException.cause.backingJsObject;
+ this.convertGwtStyleError(err.cause);
+ }
+ delete err['__java$exception'];
+ }
+ }
+ }]);
+
+ return PromisedWorker;
+}();
+},{}],2:[function(require,module,exports){
+(function (global){(function (){
+'use strict';
+
+// -------------- FAKE ELEMENTS GWT ASSUMES EXIST --------------
+var $wnd;
+if (typeof window !== 'undefined')
+ $wnd = window
+else if (typeof global !== 'undefined')
+ $wnd = global // nodejs
+else if (typeof self !== 'undefined')
+ $wnd = self // web worker
+
+var $moduleName,
+ $moduleBase;
+
+// -------------- WORKAROUND STRICT MODE, SEE #127 --------------
+var g, i, o;
+
+// -------------- GENERATED CODE --------------
+function nb(){}
+function xb(){}
+function Fd(){}
+function hh(){}
+function lq(){}
+function Nq(){}
+function ir(){}
+function Ws(){}
+function Zw(){}
+function jx(){}
+function rx(){}
+function sx(){}
+function My(){}
+function bA(){}
+function mA(){}
+function tA(){}
+function aB(){}
+function dB(){}
+function jB(){}
+function dC(){}
+function keb(){}
+function geb(){}
+function oeb(){}
+function iob(){}
+function Job(){}
+function Rob(){}
+function apb(){}
+function ipb(){}
+function nrb(){}
+function wrb(){}
+function Brb(){}
+function Prb(){}
+function ltb(){}
+function svb(){}
+function xvb(){}
+function zvb(){}
+function $xb(){}
+function Gzb(){}
+function NAb(){}
+function VAb(){}
+function rBb(){}
+function RBb(){}
+function TBb(){}
+function XBb(){}
+function ZBb(){}
+function _Bb(){}
+function bCb(){}
+function dCb(){}
+function fCb(){}
+function jCb(){}
+function rCb(){}
+function uCb(){}
+function wCb(){}
+function yCb(){}
+function ACb(){}
+function ECb(){}
+function FEb(){}
+function IEb(){}
+function KEb(){}
+function MEb(){}
+function gFb(){}
+function FFb(){}
+function JFb(){}
+function xGb(){}
+function AGb(){}
+function YGb(){}
+function oHb(){}
+function tHb(){}
+function xHb(){}
+function pIb(){}
+function BJb(){}
+function kLb(){}
+function mLb(){}
+function oLb(){}
+function qLb(){}
+function FLb(){}
+function JLb(){}
+function KMb(){}
+function MMb(){}
+function OMb(){}
+function YMb(){}
+function MNb(){}
+function ONb(){}
+function aOb(){}
+function eOb(){}
+function xOb(){}
+function BOb(){}
+function DOb(){}
+function FOb(){}
+function IOb(){}
+function MOb(){}
+function POb(){}
+function UOb(){}
+function ZOb(){}
+function cPb(){}
+function gPb(){}
+function nPb(){}
+function qPb(){}
+function tPb(){}
+function wPb(){}
+function CPb(){}
+function qQb(){}
+function GQb(){}
+function bRb(){}
+function gRb(){}
+function kRb(){}
+function pRb(){}
+function wRb(){}
+function xSb(){}
+function TSb(){}
+function VSb(){}
+function XSb(){}
+function ZSb(){}
+function _Sb(){}
+function tTb(){}
+function DTb(){}
+function FTb(){}
+function FXb(){}
+function hXb(){}
+function hWb(){}
+function mWb(){}
+function CVb(){}
+function XXb(){}
+function $Xb(){}
+function bYb(){}
+function lYb(){}
+function FYb(){}
+function XYb(){}
+function aZb(){}
+function SZb(){}
+function ZZb(){}
+function Z_b(){}
+function j_b(){}
+function j$b(){}
+function b$b(){}
+function f$b(){}
+function n$b(){}
+function K_b(){}
+function V_b(){}
+function b0b(){}
+function l0b(){}
+function X1b(){}
+function _1b(){}
+function x3b(){}
+function r4b(){}
+function w4b(){}
+function A4b(){}
+function E4b(){}
+function I4b(){}
+function M4b(){}
+function o5b(){}
+function q5b(){}
+function w5b(){}
+function A5b(){}
+function E5b(){}
+function h6b(){}
+function j6b(){}
+function l6b(){}
+function q6b(){}
+function v6b(){}
+function y6b(){}
+function G6b(){}
+function K6b(){}
+function N6b(){}
+function P6b(){}
+function R6b(){}
+function b7b(){}
+function f7b(){}
+function j7b(){}
+function n7b(){}
+function C7b(){}
+function H7b(){}
+function J7b(){}
+function L7b(){}
+function N7b(){}
+function P7b(){}
+function a8b(){}
+function c8b(){}
+function e8b(){}
+function g8b(){}
+function i8b(){}
+function m8b(){}
+function Z8b(){}
+function f9b(){}
+function i9b(){}
+function o9b(){}
+function C9b(){}
+function F9b(){}
+function K9b(){}
+function Q9b(){}
+function aac(){}
+function bac(){}
+function eac(){}
+function mac(){}
+function pac(){}
+function rac(){}
+function tac(){}
+function xac(){}
+function Aac(){}
+function Dac(){}
+function Iac(){}
+function Oac(){}
+function Uac(){}
+function Ucc(){}
+function scc(){}
+function ycc(){}
+function Acc(){}
+function Ccc(){}
+function Ncc(){}
+function Wcc(){}
+function ydc(){}
+function Adc(){}
+function Gdc(){}
+function Ldc(){}
+function Zdc(){}
+function fec(){}
+function Dec(){}
+function Gec(){}
+function Kec(){}
+function efc(){}
+function jfc(){}
+function nfc(){}
+function Bfc(){}
+function Ifc(){}
+function Lfc(){}
+function Rfc(){}
+function Ufc(){}
+function Zfc(){}
+function cgc(){}
+function egc(){}
+function ggc(){}
+function igc(){}
+function kgc(){}
+function Dgc(){}
+function Hgc(){}
+function Lgc(){}
+function Ngc(){}
+function Pgc(){}
+function Vgc(){}
+function Ygc(){}
+function chc(){}
+function ehc(){}
+function ghc(){}
+function ihc(){}
+function mhc(){}
+function rhc(){}
+function uhc(){}
+function whc(){}
+function yhc(){}
+function Ahc(){}
+function Chc(){}
+function Ghc(){}
+function Nhc(){}
+function Phc(){}
+function Rhc(){}
+function Thc(){}
+function $hc(){}
+function aic(){}
+function cic(){}
+function eic(){}
+function jic(){}
+function nic(){}
+function pic(){}
+function ric(){}
+function vic(){}
+function yic(){}
+function Dic(){}
+function Ric(){}
+function Zic(){}
+function bjc(){}
+function djc(){}
+function jjc(){}
+function njc(){}
+function rjc(){}
+function tjc(){}
+function zjc(){}
+function Djc(){}
+function Fjc(){}
+function Ljc(){}
+function Pjc(){}
+function Rjc(){}
+function fkc(){}
+function Kkc(){}
+function Mkc(){}
+function Okc(){}
+function Qkc(){}
+function Skc(){}
+function Ukc(){}
+function Wkc(){}
+function clc(){}
+function elc(){}
+function klc(){}
+function mlc(){}
+function olc(){}
+function qlc(){}
+function wlc(){}
+function ylc(){}
+function Alc(){}
+function Jlc(){}
+function Joc(){}
+function poc(){}
+function roc(){}
+function toc(){}
+function voc(){}
+function Boc(){}
+function Foc(){}
+function Hoc(){}
+function Loc(){}
+function Noc(){}
+function Poc(){}
+function qnc(){}
+function unc(){}
+function upc(){}
+function kpc(){}
+function mpc(){}
+function opc(){}
+function qpc(){}
+function ypc(){}
+function Cpc(){}
+function Mpc(){}
+function Qpc(){}
+function dqc(){}
+function jqc(){}
+function Aqc(){}
+function Eqc(){}
+function Gqc(){}
+function Sqc(){}
+function arc(){}
+function lrc(){}
+function zrc(){}
+function Hrc(){}
+function bsc(){}
+function dsc(){}
+function fsc(){}
+function ksc(){}
+function msc(){}
+function Asc(){}
+function Csc(){}
+function Esc(){}
+function Ksc(){}
+function Nsc(){}
+function Ssc(){}
+function CCc(){}
+function tGc(){}
+function aHc(){}
+function gHc(){}
+function nIc(){}
+function PJc(){}
+function XKc(){}
+function fLc(){}
+function hLc(){}
+function lLc(){}
+function eNc(){}
+function IOc(){}
+function MOc(){}
+function WOc(){}
+function YOc(){}
+function $Oc(){}
+function cPc(){}
+function iPc(){}
+function mPc(){}
+function oPc(){}
+function qPc(){}
+function sPc(){}
+function wPc(){}
+function APc(){}
+function FPc(){}
+function HPc(){}
+function NPc(){}
+function PPc(){}
+function TPc(){}
+function VPc(){}
+function ZPc(){}
+function _Pc(){}
+function bQc(){}
+function dQc(){}
+function SQc(){}
+function hRc(){}
+function HRc(){}
+function HSc(){}
+function pSc(){}
+function xSc(){}
+function zSc(){}
+function BSc(){}
+function DSc(){}
+function FSc(){}
+function CTc(){}
+function ITc(){}
+function KTc(){}
+function MTc(){}
+function XTc(){}
+function ZTc(){}
+function jVc(){}
+function lVc(){}
+function zVc(){}
+function IVc(){}
+function KVc(){}
+function KWc(){}
+function uWc(){}
+function xWc(){}
+function AWc(){}
+function QWc(){}
+function UWc(){}
+function qXc(){}
+function KXc(){}
+function OXc(){}
+function SXc(){}
+function $Xc(){}
+function mYc(){}
+function rYc(){}
+function zYc(){}
+function DYc(){}
+function FYc(){}
+function HYc(){}
+function JYc(){}
+function cZc(){}
+function gZc(){}
+function iZc(){}
+function pZc(){}
+function tZc(){}
+function vZc(){}
+function AZc(){}
+function GZc(){}
+function l_c(){}
+function l1c(){}
+function b1c(){}
+function d1c(){}
+function h1c(){}
+function n1c(){}
+function r1c(){}
+function v1c(){}
+function x1c(){}
+function D1c(){}
+function H1c(){}
+function L1c(){}
+function R1c(){}
+function V1c(){}
+function Z1c(){}
+function Z0c(){}
+function a0c(){}
+function c0c(){}
+function e0c(){}
+function k0c(){}
+function o0c(){}
+function b2c(){}
+function l2c(){}
+function p2c(){}
+function Y2c(){}
+function _2c(){}
+function A3c(){}
+function F3c(){}
+function I3c(){}
+function K3c(){}
+function M3c(){}
+function Q3c(){}
+function U3c(){}
+function c5c(){}
+function D5c(){}
+function G5c(){}
+function J5c(){}
+function N5c(){}
+function V5c(){}
+function p6c(){}
+function s6c(){}
+function H6c(){}
+function K6c(){}
+function _7c(){}
+function h8c(){}
+function j8c(){}
+function o8c(){}
+function r8c(){}
+function u8c(){}
+function R8c(){}
+function X8c(){}
+function o9c(){}
+function s9c(){}
+function x9c(){}
+function Qad(){}
+function rcd(){}
+function Xcd(){}
+function vdd(){}
+function Tdd(){}
+function _dd(){}
+function qed(){}
+function sed(){}
+function ved(){}
+function Hed(){}
+function Zed(){}
+function bfd(){}
+function ifd(){}
+function Gfd(){}
+function Ifd(){}
+function Igd(){}
+function agd(){}
+function dgd(){}
+function pgd(){}
+function Hgd(){}
+function Kgd(){}
+function Mgd(){}
+function Ogd(){}
+function Qgd(){}
+function Sgd(){}
+function Ugd(){}
+function Wgd(){}
+function Ygd(){}
+function $gd(){}
+function ahd(){}
+function chd(){}
+function ehd(){}
+function ghd(){}
+function ihd(){}
+function khd(){}
+function mhd(){}
+function ohd(){}
+function qhd(){}
+function shd(){}
+function Shd(){}
+function lkd(){}
+function znd(){}
+function Jpd(){}
+function jrd(){}
+function Mrd(){}
+function Qrd(){}
+function Urd(){}
+function Yrd(){}
+function Yud(){}
+function eud(){}
+function asd(){}
+function Lsd(){}
+function btd(){}
+function dtd(){}
+function jtd(){}
+function otd(){}
+function ztd(){}
+function Xxd(){}
+function $yd(){}
+function rzd(){}
+function Rzd(){}
+function KAd(){}
+function hCd(){}
+function _Cd(){}
+function _Sd(){}
+function OSd(){}
+function BDd(){}
+function BId(){}
+function JId(){}
+function YHd(){}
+function fLd(){}
+function cPd(){}
+function hQd(){}
+function AQd(){}
+function kUd(){}
+function VUd(){}
+function pVd(){}
+function W$d(){}
+function Z$d(){}
+function a_d(){}
+function i_d(){}
+function v_d(){}
+function y_d(){}
+function f1d(){}
+function L5d(){}
+function v6d(){}
+function b8d(){}
+function e8d(){}
+function h8d(){}
+function k8d(){}
+function n8d(){}
+function q8d(){}
+function t8d(){}
+function w8d(){}
+function z8d(){}
+function X9d(){}
+function _9d(){}
+function Mae(){}
+function cbe(){}
+function ebe(){}
+function hbe(){}
+function kbe(){}
+function nbe(){}
+function qbe(){}
+function tbe(){}
+function wbe(){}
+function zbe(){}
+function Cbe(){}
+function Fbe(){}
+function Ibe(){}
+function Lbe(){}
+function Obe(){}
+function Rbe(){}
+function Ube(){}
+function Xbe(){}
+function $be(){}
+function bce(){}
+function ece(){}
+function hce(){}
+function kce(){}
+function nce(){}
+function qce(){}
+function tce(){}
+function wce(){}
+function zce(){}
+function Cce(){}
+function Fce(){}
+function Ice(){}
+function Lce(){}
+function Oce(){}
+function Rce(){}
+function Uce(){}
+function Xce(){}
+function $ce(){}
+function bde(){}
+function ede(){}
+function hde(){}
+function kde(){}
+function nde(){}
+function qde(){}
+function tde(){}
+function wde(){}
+function Hie(){}
+function rke(){}
+function rne(){}
+function Ene(){}
+function Gne(){}
+function Jne(){}
+function Mne(){}
+function Pne(){}
+function Sne(){}
+function Vne(){}
+function Yne(){}
+function _ne(){}
+function yme(){}
+function coe(){}
+function foe(){}
+function ioe(){}
+function loe(){}
+function ooe(){}
+function roe(){}
+function uoe(){}
+function xoe(){}
+function Aoe(){}
+function Doe(){}
+function Goe(){}
+function Joe(){}
+function Moe(){}
+function Poe(){}
+function Soe(){}
+function Voe(){}
+function Yoe(){}
+function _oe(){}
+function cpe(){}
+function fpe(){}
+function ipe(){}
+function lpe(){}
+function ope(){}
+function rpe(){}
+function upe(){}
+function xpe(){}
+function Ape(){}
+function Dpe(){}
+function Gpe(){}
+function Jpe(){}
+function Mpe(){}
+function Ppe(){}
+function Spe(){}
+function Vpe(){}
+function Ype(){}
+function _pe(){}
+function cqe(){}
+function fqe(){}
+function iqe(){}
+function lqe(){}
+function oqe(){}
+function rqe(){}
+function uqe(){}
+function Tqe(){}
+function sue(){}
+function Cue(){}
+function A2b(a){}
+function J3d(a){}
+function zl(){wb()}
+function z7b(){s7b()}
+function ZHb(){YHb()}
+function fSb(){eSb()}
+function vSb(){tSb()}
+function PUb(){OUb()}
+function AVb(){yVb()}
+function RVb(){QVb()}
+function fWb(){dWb()}
+function N5b(){H5b()}
+function $9b(){U9b()}
+function Lcc(){Hcc()}
+function pdc(){Zcc()}
+function pec(){iec()}
+function pGc(){nGc()}
+function jGc(){gGc()}
+function YGc(){SGc()}
+function cGc(){_Fc()}
+function NFc(){KFc()}
+function xgc(){sgc()}
+function xHc(){tHc()}
+function pHc(){lHc()}
+function IHc(){CHc()}
+function XHc(){RHc()}
+function boc(){Mnc()}
+function yqc(){mqc()}
+function Pzc(){Ozc()}
+function ACc(){yCc()}
+function aKc(){YJc()}
+function FLc(){DLc()}
+function DNc(){ANc()}
+function TNc(){JNc()}
+function iQc(){gQc()}
+function WRc(){TRc()}
+function C$c(){B$c()}
+function J0c(){B0c()}
+function x0c(){r0c()}
+function j_c(){h_c()}
+function N_c(){H_c()}
+function V_c(){R_c()}
+function E4c(){D4c()}
+function a5c(){$4c()}
+function v7c(){u7c()}
+function Z7c(){X7c()}
+function pcd(){ncd()}
+function Lcd(){Kcd()}
+function Vcd(){Tcd()}
+function fUd(){TTd()}
+function Bfd(){Afd()}
+function jkd(){hkd()}
+function vmd(){umd()}
+function xnd(){vnd()}
+function Hpd(){Fpd()}
+function HYd(){lYd()}
+function yAd(){qAd()}
+function gke(){rue()}
+function Yxb(a){uFb(a)}
+function Yb(a){this.a=a}
+function cc(a){this.a=a}
+function df(a){this.a=a}
+function kf(a){this.a=a}
+function kj(a){this.a=a}
+function qj(a){this.a=a}
+function Lj(a){this.a=a}
+function jh(a){this.a=a}
+function th(a){this.a=a}
+function Bh(a){this.a=a}
+function Xh(a){this.a=a}
+function Xn(a){this.a=a}
+function Di(a){this.a=a}
+function Ki(a){this.a=a}
+function Ik(a){this.a=a}
+function Qk(a){this.a=a}
+function mp(a){this.a=a}
+function Lp(a){this.a=a}
+function iq(a){this.a=a}
+function Eq(a){this.a=a}
+function Vq(a){this.a=a}
+function Or(a){this.a=a}
+function $r(a){this.b=a}
+function Aj(a){this.c=a}
+function vu(a){this.a=a}
+function vw(a){this.a=a}
+function gw(a){this.a=a}
+function lw(a){this.a=a}
+function Iw(a){this.a=a}
+function Nw(a){this.a=a}
+function Sw(a){this.a=a}
+function ex(a){this.a=a}
+function fx(a){this.a=a}
+function lx(a){this.a=a}
+function my(a){this.a=a}
+function qy(a){this.a=a}
+function Oy(a){this.a=a}
+function NB(a){this.a=a}
+function XB(a){this.a=a}
+function hC(a){this.a=a}
+function vC(a){this.a=a}
+function MB(){this.a=[]}
+function HEb(a,b){a.a=b}
+function E2b(a,b){a.a=b}
+function F2b(a,b){a.b=b}
+function PRb(a,b){a.b=b}
+function RRb(a,b){a.b=b}
+function QJb(a,b){a.j=b}
+function hQb(a,b){a.g=b}
+function iQb(a,b){a.i=b}
+function _Tb(a,b){a.c=b}
+function G2b(a,b){a.c=b}
+function H2b(a,b){a.d=b}
+function aUb(a,b){a.d=b}
+function h3b(a,b){a.k=b}
+function O3b(a,b){a.c=b}
+function Tmc(a,b){a.c=b}
+function Smc(a,b){a.a=b}
+function DJc(a,b){a.a=b}
+function EJc(a,b){a.f=b}
+function NSc(a,b){a.a=b}
+function OSc(a,b){a.b=b}
+function PSc(a,b){a.d=b}
+function QSc(a,b){a.i=b}
+function RSc(a,b){a.o=b}
+function SSc(a,b){a.r=b}
+function yUc(a,b){a.a=b}
+function zUc(a,b){a.b=b}
+function q3c(a,b){a.e=b}
+function r3c(a,b){a.f=b}
+function s3c(a,b){a.g=b}
+function Y9c(a,b){a.e=b}
+function Z9c(a,b){a.f=b}
+function kad(a,b){a.f=b}
+function Ntd(a,b){a.a=b}
+function Otd(a,b){a.b=b}
+function BWd(a,b){a.n=b}
+function $ee(a,b){a.a=b}
+function _ee(a,b){a.c=b}
+function ife(a,b){a.c=b}
+function Efe(a,b){a.c=b}
+function hfe(a,b){a.a=b}
+function Dfe(a,b){a.a=b}
+function jfe(a,b){a.d=b}
+function Ffe(a,b){a.d=b}
+function kfe(a,b){a.e=b}
+function Gfe(a,b){a.e=b}
+function lfe(a,b){a.g=b}
+function Hfe(a,b){a.f=b}
+function Ife(a,b){a.j=b}
+function wme(a,b){a.a=b}
+function Fme(a,b){a.a=b}
+function xme(a,b){a.b=b}
+function gmc(a){a.b=a.a}
+function Lg(a){a.c=a.d.d}
+function fgb(a){this.a=a}
+function zgb(a){this.a=a}
+function Xgb(a){this.a=a}
+function Xkb(a){this.a=a}
+function mkb(a){this.a=a}
+function reb(a){this.a=a}
+function Seb(a){this.a=a}
+function bfb(a){this.a=a}
+function Tfb(a){this.a=a}
+function blb(a){this.a=a}
+function glb(a){this.a=a}
+function llb(a){this.a=a}
+function Ulb(a){this.a=a}
+function _lb(a){this.a=a}
+function Plb(a){this.b=a}
+function Ppb(a){this.b=a}
+function xpb(a){this.b=a}
+function mpb(a){this.a=a}
+function Yqb(a){this.a=a}
+function uqb(a){this.c=a}
+function Anb(a){this.c=a}
+function zwb(a){this.c=a}
+function Dkb(a){this.d=a}
+function brb(a){this.a=a}
+function Frb(a){this.a=a}
+function hsb(a){this.a=a}
+function ctb(a){this.a=a}
+function cxb(a){this.a=a}
+function axb(a){this.a=a}
+function exb(a){this.a=a}
+function gxb(a){this.a=a}
+function wub(a){this.a=a}
+function zAb(a){this.a=a}
+function JAb(a){this.a=a}
+function LAb(a){this.a=a}
+function PAb(a){this.a=a}
+function VBb(a){this.a=a}
+function lCb(a){this.a=a}
+function nCb(a){this.a=a}
+function pCb(a){this.a=a}
+function CCb(a){this.a=a}
+function GCb(a){this.a=a}
+function bDb(a){this.a=a}
+function dDb(a){this.a=a}
+function fDb(a){this.a=a}
+function uDb(a){this.a=a}
+function $Db(a){this.a=a}
+function aEb(a){this.a=a}
+function eEb(a){this.a=a}
+function OEb(a){this.a=a}
+function SEb(a){this.a=a}
+function SFb(a){this.a=a}
+function HFb(a){this.a=a}
+function NFb(a){this.a=a}
+function WGb(a){this.a=a}
+function HJb(a){this.a=a}
+function PJb(a){this.a=a}
+function kNb(a){this.a=a}
+function tOb(a){this.a=a}
+function APb(a){this.a=a}
+function IQb(a){this.a=a}
+function bTb(a){this.a=a}
+function dTb(a){this.a=a}
+function wTb(a){this.a=a}
+function GWb(a){this.a=a}
+function UWb(a){this.a=a}
+function WWb(a){this.a=a}
+function fXb(a){this.a=a}
+function jXb(a){this.a=a}
+function M0b(a){this.a=a}
+function r1b(a){this.a=a}
+function D1b(a){this.e=a}
+function T3b(a){this.a=a}
+function W3b(a){this.a=a}
+function _3b(a){this.a=a}
+function c4b(a){this.a=a}
+function s5b(a){this.a=a}
+function u5b(a){this.a=a}
+function y5b(a){this.a=a}
+function C5b(a){this.a=a}
+function Q5b(a){this.a=a}
+function S5b(a){this.a=a}
+function U5b(a){this.a=a}
+function W5b(a){this.a=a}
+function l7b(a){this.a=a}
+function p7b(a){this.a=a}
+function k8b(a){this.a=a}
+function L8b(a){this.a=a}
+function Rac(a){this.a=a}
+function Xac(a){this.a=a}
+function $ac(a){this.a=a}
+function bbc(a){this.a=a}
+function Cdc(a){this.a=a}
+function Edc(a){this.a=a}
+function Ehc(a){this.a=a}
+function khc(a){this.a=a}
+function Ihc(a){this.a=a}
+function qfc(a){this.a=a}
+function tfc(a){this.a=a}
+function Wfc(a){this.a=a}
+function Fic(a){this.a=a}
+function Vic(a){this.a=a}
+function fjc(a){this.a=a}
+function pjc(a){this.a=a}
+function ckc(a){this.a=a}
+function hkc(a){this.a=a}
+function Ykc(a){this.a=a}
+function $kc(a){this.a=a}
+function alc(a){this.a=a}
+function glc(a){this.a=a}
+function ilc(a){this.a=a}
+function slc(a){this.a=a}
+function Clc(a){this.a=a}
+function xoc(a){this.a=a}
+function zoc(a){this.a=a}
+function spc(a){this.a=a}
+function Vqc(a){this.a=a}
+function Xqc(a){this.a=a}
+function Gsc(a){this.a=a}
+function Isc(a){this.a=a}
+function JGc(a){this.a=a}
+function NGc(a){this.a=a}
+function MHc(a){this.a=a}
+function JIc(a){this.a=a}
+function fJc(a){this.a=a}
+function BJc(a){this.a=a}
+function dJc(a){this.c=a}
+function Trc(a){this.b=a}
+function eKc(a){this.a=a}
+function IKc(a){this.a=a}
+function KKc(a){this.a=a}
+function MKc(a){this.a=a}
+function yLc(a){this.a=a}
+function HMc(a){this.a=a}
+function LMc(a){this.a=a}
+function PMc(a){this.a=a}
+function TMc(a){this.a=a}
+function XMc(a){this.a=a}
+function ZMc(a){this.a=a}
+function aNc(a){this.a=a}
+function jNc(a){this.a=a}
+function aPc(a){this.a=a}
+function gPc(a){this.a=a}
+function kPc(a){this.a=a}
+function yPc(a){this.a=a}
+function CPc(a){this.a=a}
+function JPc(a){this.a=a}
+function RPc(a){this.a=a}
+function XPc(a){this.a=a}
+function mRc(a){this.a=a}
+function xTc(a){this.a=a}
+function CWc(a){this.a=a}
+function EWc(a){this.a=a}
+function IWc(a){this.a=a}
+function OWc(a){this.a=a}
+function dXc(a){this.a=a}
+function gXc(a){this.a=a}
+function EXc(a){this.a=a}
+function WXc(a){this.a=a}
+function YXc(a){this.a=a}
+function aYc(a){this.a=a}
+function cYc(a){this.a=a}
+function eYc(a){this.a=a}
+function iYc(a){this.a=a}
+function i0c(a){this.a=a}
+function g0c(a){this.a=a}
+function P1c(a){this.a=a}
+function Sad(a){this.a=a}
+function Uad(a){this.a=a}
+function Wad(a){this.a=a}
+function Yad(a){this.a=a}
+function cbd(a){this.a=a}
+function ydd(a){this.a=a}
+function Kdd(a){this.a=a}
+function Mdd(a){this.a=a}
+function _ed(a){this.a=a}
+function dfd(a){this.a=a}
+function Kfd(a){this.a=a}
+function prd(a){this.a=a}
+function $rd(a){this.a=a}
+function csd(a){this.a=a}
+function Usd(a){this.a=a}
+function Vtd(a){this.a=a}
+function wud(a){this.a=a}
+function Rud(a){this.f=a}
+function LEd(a){this.a=a}
+function UEd(a){this.a=a}
+function VEd(a){this.a=a}
+function WEd(a){this.a=a}
+function XEd(a){this.a=a}
+function YEd(a){this.a=a}
+function ZEd(a){this.a=a}
+function $Ed(a){this.a=a}
+function _Ed(a){this.a=a}
+function aFd(a){this.a=a}
+function gFd(a){this.a=a}
+function iFd(a){this.a=a}
+function jFd(a){this.a=a}
+function kFd(a){this.a=a}
+function lFd(a){this.a=a}
+function nFd(a){this.a=a}
+function qFd(a){this.a=a}
+function wFd(a){this.a=a}
+function xFd(a){this.a=a}
+function zFd(a){this.a=a}
+function AFd(a){this.a=a}
+function BFd(a){this.a=a}
+function CFd(a){this.a=a}
+function DFd(a){this.a=a}
+function MFd(a){this.a=a}
+function OFd(a){this.a=a}
+function QFd(a){this.a=a}
+function SFd(a){this.a=a}
+function uGd(a){this.a=a}
+function QGd(a){this.a=a}
+function jGd(a){this.b=a}
+function YOd(a){this.a=a}
+function ePd(a){this.a=a}
+function kPd(a){this.a=a}
+function qPd(a){this.a=a}
+function IPd(a){this.a=a}
+function w$d(a){this.a=a}
+function e_d(a){this.a=a}
+function Q_d(a){this.b=a}
+function c1d(a){this.a=a}
+function c2d(a){this.a=a}
+function l5d(a){this.a=a}
+function I9d(a){this.a=a}
+function L6d(a){this.c=a}
+function t7d(a){this.e=a}
+function pae(a){this.a=a}
+function xae(a){this.a=a}
+function Zde(a){this.a=a}
+function Sde(a){this.d=a}
+function mee(a){this.a=a}
+function uje(a){this.a=a}
+function Bte(a){this.a=a}
+function Wse(a){this.e=a}
+function Xsd(){this.a=0}
+function Tsb(){akb(this)}
+function bnb(){Pmb(this)}
+function cHb(){bHb(this)}
+function I2b(){A2b(this)}
+function s2d(){this.c=d2d}
+function Prc(a,b){a.b+=b}
+function Uje(a,b){b.Wb(a)}
+function UC(a){return a.a}
+function nC(a){return a.a}
+function BC(a){return a.a}
+function TB(a){return a.a}
+function _B(a){return a.a}
+function Adb(a){return a.e}
+function gC(){return null}
+function MC(){return null}
+function leb(){MId();OId()}
+function qMb(a){a.b.Of(a.e)}
+function A$b(a){a.b=new Ri}
+function A8b(a,b){a.b=b-a.b}
+function x8b(a,b){a.a=b-a.a}
+function ZEb(a,b){a.push(b)}
+function bFb(a,b){a.sort(b)}
+function Q5c(a,b){b.jd(a.a)}
+function Voc(a,b){Q3b(b,a)}
+function tp(a,b,c){a.Yd(c,b)}
+function Ss(a,b){a.e=b;b.b=a}
+function im(a){_l();this.a=a}
+function xq(a){_l();this.a=a}
+function Gq(a){_l();this.a=a}
+function Xq(a){tm();this.a=a}
+function gA(a){fA();eA.le(a)}
+function vA(){vA=geb;new Tsb}
+function xz(){mz.call(this)}
+function Ceb(){mz.call(this)}
+function ueb(){xz.call(this)}
+function yeb(){xz.call(this)}
+function Hfb(){xz.call(this)}
+function _fb(){xz.call(this)}
+function cgb(){xz.call(this)}
+function Ngb(){xz.call(this)}
+function jib(){xz.call(this)}
+function Jrb(){xz.call(this)}
+function Srb(){xz.call(this)}
+function Dvb(){xz.call(this)}
+function Ied(){xz.call(this)}
+function R1d(){this.a=this}
+function k1d(){this.Bb|=256}
+function vWb(){this.b=new Et}
+function aFb(a,b){a.length=b}
+function dyb(a,b){Rmb(a.a,b)}
+function jNb(a,b){LKb(a.c,b)}
+function qRc(a,b){Ysb(a.b,b)}
+function VOd(a,b){UNd(a.a,b)}
+function WOd(a,b){VNd(a.a,b)}
+function eZd(a,b){qvd(a.e,b)}
+function Cke(a){bge(a.c,a.b)}
+function uj(a,b){a.kc().Nb(b)}
+function Ufb(a){this.a=Zfb(a)}
+function _sb(){this.a=new Tsb}
+function $Ab(){this.a=new Tsb}
+function xAb(){this.a=new dzb}
+function gyb(){this.a=new bnb}
+function BIb(){this.a=new bnb}
+function GIb(){this.a=new bnb}
+function wIb(){this.a=new pIb}
+function gJb(){this.a=new DIb}
+function TTb(){this.a=new DTb}
+function jGb(){this.a=new fGb}
+function qGb(){this.a=new kGb}
+function q_b(){this.a=new bnb}
+function E_b(){this.a=new bnb}
+function EZb(){this.a=new bnb}
+function J$b(){this.a=new bnb}
+function YNb(){this.d=new bnb}
+function lXb(){this.a=new RWb}
+function y_b(){this.a=new _sb}
+function k5b(){this.a=new Tsb}
+function E0b(){this.b=new Tsb}
+function jHc(){this.b=new bnb}
+function ZNc(){this.e=new bnb}
+function ahc(){this.a=new boc}
+function UQc(){this.d=new bnb}
+function uRc(){tRc.call(this)}
+function BRc(){tRc.call(this)}
+function VOc(){bnb.call(this)}
+function web(){ueb.call(this)}
+function Fyb(){gyb.call(this)}
+function fKb(){RJb.call(this)}
+function N$b(){J$b.call(this)}
+function P2b(){I2b.call(this)}
+function T2b(){P2b.call(this)}
+function z3b(){I2b.call(this)}
+function C3b(){z3b.call(this)}
+function cUc(){aUc.call(this)}
+function hUc(){aUc.call(this)}
+function mUc(){aUc.call(this)}
+function Hdd(){Ddd.call(this)}
+function ACd(){$yd.call(this)}
+function PCd(){$yd.call(this)}
+function Ejd(){Yub.call(this)}
+function LQd(){wQd.call(this)}
+function lRd(){wQd.call(this)}
+function MSd(){Tsb.call(this)}
+function VSd(){Tsb.call(this)}
+function eTd(){Tsb.call(this)}
+function mXd(){HWd.call(this)}
+function i1d(){_sb.call(this)}
+function A1d(){k1d.call(this)}
+function q4d(){dWd.call(this)}
+function O5d(){Tsb.call(this)}
+function R5d(){dWd.call(this)}
+function lae(){Tsb.call(this)}
+function Cae(){Tsb.call(this)}
+function ome(){kUd.call(this)}
+function Hme(){ome.call(this)}
+function Nme(){kUd.call(this)}
+function Gre(){Tqe.call(this)}
+function aUc(){this.a=new _sb}
+function nZc(){this.a=new Tsb}
+function DZc(){this.a=new bnb}
+function Ddd(){this.a=new Tsb}
+function Oqd(){this.a=new Yub}
+function Oed(){this.j=new bnb}
+function obd(){this.a=new nbd}
+function wQd(){this.a=new AQd}
+function R5c(){this.a=new V5c}
+function wb(){wb=geb;vb=new xb}
+function Wk(){Wk=geb;Vk=new Xk}
+function kl(){kl=geb;jl=new ll}
+function ll(){Qk.call(this,'')}
+function Xk(){Qk.call(this,'')}
+function Dd(a){yd.call(this,a)}
+function Hd(a){yd.call(this,a)}
+function xh(a){th.call(this,a)}
+function $h(a){Wc.call(this,a)}
+function Qi(a){Wc.call(this,a)}
+function wi(a){$h.call(this,a)}
+function Sp(a){$h.call(this,a)}
+function Js(a){$h.call(this,a)}
+function Jp(a){Xo.call(this,a)}
+function Qp(a){Xo.call(this,a)}
+function dq(a){ho.call(this,a)}
+function Fv(a){uv.call(this,a)}
+function aw(a){Tr.call(this,a)}
+function cw(a){Tr.call(this,a)}
+function _w(a){Tr.call(this,a)}
+function Mx(a){Gn.call(this,a)}
+function Nx(a){Mx.call(this,a)}
+function yz(a){nz.call(this,a)}
+function aC(a){yz.call(this,a)}
+function uC(){vC.call(this,{})}
+function cC(){cC=geb;bC=new dC}
+function zs(){zs=geb;ys=new As}
+function Az(){Az=geb;zz=new nb}
+function $z(){$z=geb;Zz=new bA}
+function $A(){$A=geb;ZA=new aB}
+function Ovb(a){Kvb();this.a=a}
+function FKc(a){jKc();this.a=a}
+function zud(a){nud();this.f=a}
+function Bud(a){nud();this.f=a}
+function Cde(a){KMd();this.a=a}
+function Lyb(a){a.b=null;a.c=0}
+function kz(a,b){a.e=b;hz(a,b)}
+function NYb(a,b){a.a=b;PYb(a)}
+function cLb(a,b,c){a.a[b.g]=c}
+function zsd(a,b,c){Hsd(c,a,b)}
+function shc(a,b){Xmc(b.i,a.n)}
+function HCc(a,b){ICc(a).Cd(b)}
+function yw(a,b){a.a.ec().Mc(b)}
+function ns(a,b){return a.g-b.g}
+function AUb(a,b){return a*a/b}
+function Heb(a){return uFb(a),a}
+function Kfb(a){return uFb(a),a}
+function Mfb(a){return uFb(a),a}
+function JC(a){return new hC(a)}
+function LC(a){return new OC(a)}
+function shb(a){return uFb(a),a}
+function Chb(a){return uFb(a),a}
+function teb(a){yz.call(this,a)}
+function veb(a){yz.call(this,a)}
+function zeb(a){yz.call(this,a)}
+function Aeb(a){nz.call(this,a)}
+function Ifb(a){yz.call(this,a)}
+function agb(a){yz.call(this,a)}
+function dgb(a){yz.call(this,a)}
+function Mgb(a){yz.call(this,a)}
+function Ogb(a){yz.call(this,a)}
+function kib(a){yz.call(this,a)}
+function Jed(a){yz.call(this,a)}
+function Ked(a){yz.call(this,a)}
+function CDd(a){yz.call(this,a)}
+function Mle(a){yz.call(this,a)}
+function Lqe(a){yz.call(this,a)}
+function mob(a){uFb(a);this.a=a}
+function yYb(a){sYb(a);return a}
+function Nnb(a){Snb(a,a.length)}
+function nmb(a){return a.b==a.c}
+function Vyb(a){return !!a&&a.b}
+function gLb(a){return !!a&&a.k}
+function hLb(a){return !!a&&a.j}
+function F_b(a,b,c){a.c.Ef(b,c)}
+function Ts(a,b){a.be(b);b.ae(a)}
+function Fy(a){_l();this.a=Qb(a)}
+function Gb(){this.a=WD(Qb(pve))}
+function jc(){throw Adb(new jib)}
+function jn(){throw Adb(new jib)}
+function Hh(){throw Adb(new jib)}
+function Xi(){throw Adb(new jib)}
+function Xj(){throw Adb(new jib)}
+function Yj(){throw Adb(new jib)}
+function Qz(){Qz=geb;!!(fA(),eA)}
+function Qhb(){reb.call(this,'')}
+function Rhb(){reb.call(this,'')}
+function bib(){reb.call(this,'')}
+function cib(){reb.call(this,'')}
+function eib(a){veb.call(this,a)}
+function xeb(a){veb.call(this,a)}
+function Vgb(a){agb.call(this,a)}
+function Lqb(a){xpb.call(this,a)}
+function Sqb(a){Lqb.call(this,a)}
+function irb(a){Upb.call(this,a)}
+function pc(a){qc.call(this,a,0)}
+function Ri(){Si.call(this,12,3)}
+function WC(a,b){return xfb(a,b)}
+function cFb(a,b){return dD(a,b)}
+function Reb(a,b){return a.a-b.a}
+function afb(a,b){return a.a-b.a}
+function Wgb(a,b){return a.a-b.a}
+function pC(b,a){return a in b.a}
+function Vvb(a){return a.a?a.b:0}
+function cwb(a){return a.a?a.b:0}
+function Fxb(a,b,c){b.Cd(a.a[c])}
+function Kxb(a,b,c){b.Pe(a.a[c])}
+function uKb(a,b){a.b=new sjd(b)}
+function QGb(a,b){a.b=b;return a}
+function RGb(a,b){a.c=b;return a}
+function SGb(a,b){a.f=b;return a}
+function TGb(a,b){a.g=b;return a}
+function yJb(a,b){a.a=b;return a}
+function zJb(a,b){a.f=b;return a}
+function AJb(a,b){a.k=b;return a}
+function WNb(a,b){a.a=b;return a}
+function XNb(a,b){a.e=b;return a}
+function BYb(a,b){a.e=b;return a}
+function CYb(a,b){a.f=b;return a}
+function BRb(a,b){a.b=true;a.d=b}
+function WNc(a,b){return a.b-b.b}
+function KSc(a,b){return a.g-b.g}
+function pmc(a,b){return a?0:b-1}
+function qKc(a,b){return a?0:b-1}
+function pKc(a,b){return a?b-1:0}
+function uVc(a,b){return a.s-b.s}
+function Xed(a,b){return b.rg(a)}
+function Xfd(a,b){a.b=b;return a}
+function Wfd(a,b){a.a=b;return a}
+function Yfd(a,b){a.c=b;return a}
+function Zfd(a,b){a.d=b;return a}
+function $fd(a,b){a.e=b;return a}
+function _fd(a,b){a.f=b;return a}
+function mgd(a,b){a.a=b;return a}
+function ngd(a,b){a.b=b;return a}
+function ogd(a,b){a.c=b;return a}
+function Khd(a,b){a.c=b;return a}
+function Jhd(a,b){a.b=b;return a}
+function Lhd(a,b){a.d=b;return a}
+function Mhd(a,b){a.e=b;return a}
+function Nhd(a,b){a.f=b;return a}
+function Ohd(a,b){a.g=b;return a}
+function Phd(a,b){a.a=b;return a}
+function Qhd(a,b){a.i=b;return a}
+function Rhd(a,b){a.j=b;return a}
+function coc(a,b){Mnc();P3b(b,a)}
+function bbd(a,b,c){_ad(a.a,b,c)}
+function Fjd(a){Zub.call(this,a)}
+function TRb(a){SRb.call(this,a)}
+function pLc(a){CIc.call(this,a)}
+function ILc(a){CIc.call(this,a)}
+function gLd(a){ZHd.call(this,a)}
+function DPd(a){xPd.call(this,a)}
+function FPd(a){xPd.call(this,a)}
+function x2b(){y2b.call(this,'')}
+function pjd(){this.a=0;this.b=0}
+function ATc(){this.b=0;this.a=0}
+function lXd(a,b){a.b=0;bWd(a,b)}
+function Kqd(a,b){a.k=b;return a}
+function Lqd(a,b){a.j=b;return a}
+function vfe(a,b){a.c=b;a.b=true}
+function Etb(){Etb=geb;Dtb=Gtb()}
+function bvd(){bvd=geb;avd=OAd()}
+function dvd(){dvd=geb;cvd=aCd()}
+function MId(){MId=geb;LId=ygd()}
+function jTd(){jTd=geb;iTd=Qae()}
+function Ole(){Ole=geb;Nle=vne()}
+function Qle(){Qle=geb;Ple=Cne()}
+function mfb(a){return a.e&&a.e()}
+function FD(a){return a.l|a.m<<22}
+function Oc(a,b){return a.c._b(b)}
+function En(a,b){return Wv(a.b,b)}
+function Vd(a){return !a?null:a.d}
+function Vv(a){return !a?null:a.g}
+function $v(a){return !a?null:a.i}
+function nfb(a){lfb(a);return a.o}
+function Khb(a,b){a.a+=b;return a}
+function Lhb(a,b){a.a+=b;return a}
+function Ohb(a,b){a.a+=b;return a}
+function Uhb(a,b){a.a+=b;return a}
+function _wb(a,b){while(a.Bd(b));}
+function atb(a){this.a=new Usb(a)}
+function $tb(){throw Adb(new jib)}
+function qpb(){throw Adb(new jib)}
+function rpb(){throw Adb(new jib)}
+function spb(){throw Adb(new jib)}
+function vpb(){throw Adb(new jib)}
+function Opb(){throw Adb(new jib)}
+function yAb(a){this.a=new ezb(a)}
+function H2c(){this.a=new Wed(s0)}
+function TVc(){this.b=new Wed(H$)}
+function l6c(){this.a=new Wed(V0)}
+function $ad(){this.b=new Wed(I1)}
+function nbd(){this.b=new Wed(I1)}
+function T2c(a){this.a=0;this.b=a}
+function Bib(a){tib();vib(this,a)}
+function QDb(a){LCb(a);return a.a}
+function dvb(a){return a.b!=a.d.c}
+function AMc(a,b){return a.d[b.p]}
+function ued(a,b){return ned(a,b)}
+function $Eb(a,b,c){a.splice(b,c)}
+function ixb(a,b){while(a.Re(b));}
+function NKb(a){a.c?MKb(a):OKb(a)}
+function mQd(){throw Adb(new jib)}
+function nQd(){throw Adb(new jib)}
+function oQd(){throw Adb(new jib)}
+function pQd(){throw Adb(new jib)}
+function qQd(){throw Adb(new jib)}
+function rQd(){throw Adb(new jib)}
+function sQd(){throw Adb(new jib)}
+function tQd(){throw Adb(new jib)}
+function uQd(){throw Adb(new jib)}
+function vQd(){throw Adb(new jib)}
+function zue(){throw Adb(new Dvb)}
+function Aue(){throw Adb(new Dvb)}
+function oue(a){this.a=new Dte(a)}
+function Dte(a){Cte(this,a,sse())}
+function cve(a){return !a||bve(a)}
+function Cqe(a){return xqe[a]!=-1}
+function Yz(){Nz!=0&&(Nz=0);Pz=-1}
+function beb(){_db==null&&(_db=[])}
+function eg(a,b){zf.call(this,a,b)}
+function gg(a,b){eg.call(this,a,b)}
+function Nj(a,b){this.a=a;this.b=b}
+function hk(a,b){this.a=a;this.b=b}
+function nk(a,b){this.a=a;this.b=b}
+function pk(a,b){this.a=a;this.b=b}
+function xk(a,b){this.a=a;this.b=b}
+function zk(a,b){this.a=a;this.b=b}
+function Kk(a,b){this.a=a;this.b=b}
+function ne(a,b){this.e=a;this.d=b}
+function Hf(a,b){this.b=a;this.c=b}
+function cp(a,b){this.b=a;this.a=b}
+function Cp(a,b){this.b=a;this.a=b}
+function qr(a,b){this.b=a;this.a=b}
+function Rr(a,b){this.b=a;this.a=b}
+function vr(a,b){this.a=a;this.b=b}
+function su(a,b){this.a=a;this.b=b}
+function Hu(a,b){this.a=a;this.f=b}
+function gp(a,b){this.g=a;this.i=b}
+function qs(a,b){this.f=a;this.g=b}
+function Gv(a,b){this.b=a;this.c=b}
+function Wc(a){Lb(a.dc());this.c=a}
+function Ex(a,b){this.a=a;this.b=b}
+function ey(a,b){this.a=a;this.b=b}
+function pv(a){this.a=RD(Qb(a),15)}
+function uv(a){this.a=RD(Qb(a),15)}
+function nw(a){this.a=RD(Qb(a),85)}
+function rf(a){this.b=RD(Qb(a),85)}
+function Tr(a){this.b=RD(Qb(a),51)}
+function uB(){this.q=new $wnd.Date}
+function CC(a,b){this.a=a;this.b=b}
+function Bt(a,b){return Ujb(a.b,b)}
+function tpb(a,b){return a.b.Hc(b)}
+function upb(a,b){return a.b.Ic(b)}
+function wpb(a,b){return a.b.Qc(b)}
+function Pqb(a,b){return a.b.Hc(b)}
+function pqb(a,b){return a.c.uc(b)}
+function rqb(a,b){return pb(a.c,b)}
+function Zsb(a,b){return a.a._b(b)}
+function Xp(a,b){return a>b&&b0}
+function Ldb(a,b){return Ddb(a,b)<0}
+function Urb(a,b){return Bsb(a.a,b)}
+function Beb(a,b){oz.call(this,a,b)}
+function Qx(a){Px();ho.call(this,a)}
+function Lnb(a,b){Pnb(a,a.length,b)}
+function Mnb(a,b){Rnb(a,a.length,b)}
+function Ktb(a,b){return a.a.get(b)}
+function bub(a,b){return Ujb(a.e,b)}
+function Zxb(a){return uFb(a),false}
+function zw(a){this.a=RD(Qb(a),229)}
+function $wb(a){Swb.call(this,a,21)}
+function dAb(a,b){qs.call(this,a,b)}
+function yBb(a,b){qs.call(this,a,b)}
+function ssb(a,b){this.b=a;this.a=b}
+function xlb(a,b){this.d=a;this.e=b}
+function jEb(a,b){this.a=a;this.b=b}
+function pEb(a,b){this.a=a;this.b=b}
+function vEb(a,b){this.a=a;this.b=b}
+function BEb(a,b){this.a=a;this.b=b}
+function TFb(a,b){this.a=a;this.b=b}
+function QEb(a,b){this.b=a;this.a=b}
+function sHb(a,b){this.b=a;this.a=b}
+function EHb(a,b){qs.call(this,a,b)}
+function MHb(a,b){qs.call(this,a,b)}
+function jIb(a,b){qs.call(this,a,b)}
+function $Jb(a,b){qs.call(this,a,b)}
+function FKb(a,b){qs.call(this,a,b)}
+function wLb(a,b){qs.call(this,a,b)}
+function nOb(a,b){qs.call(this,a,b)}
+function kPb(a,b){this.b=a;this.a=b}
+function JPb(a,b){qs.call(this,a,b)}
+function fRb(a,b){this.b=a;this.a=b}
+function JRb(a,b){qs.call(this,a,b)}
+function OTb(a,b){this.b=a;this.a=b}
+function UUb(a,b){qs.call(this,a,b)}
+function BWb(a,b){qs.call(this,a,b)}
+function tXb(a,b){qs.call(this,a,b)}
+function XEb(a,b,c){a.splice(b,0,c)}
+function pr(a,b,c){a.Mb(c)&&b.Cd(c)}
+function lEb(a,b,c){b.Pe(a.a.Ye(c))}
+function rEb(a,b,c){b.Dd(a.a.Ze(c))}
+function xEb(a,b,c){b.Cd(a.a.Kb(c))}
+function eYb(a,b){return Csb(a.c,b)}
+function cGb(a,b){return Csb(a.e,b)}
+function qZb(a,b){qs.call(this,a,b)}
+function V$b(a,b){qs.call(this,a,b)}
+function s3b(a,b){qs.call(this,a,b)}
+function Q8b(a,b){qs.call(this,a,b)}
+function icc(a,b){qs.call(this,a,b)}
+function xec(a,b){qs.call(this,a,b)}
+function gic(a,b){this.a=a;this.b=b}
+function Xic(a,b){this.a=a;this.b=b}
+function h4b(a,b){this.a=a;this.b=b}
+function vjc(a,b){this.a=a;this.b=b}
+function xjc(a,b){this.a=a;this.b=b}
+function Hjc(a,b){this.a=a;this.b=b}
+function hjc(a,b){this.b=a;this.a=b}
+function Jjc(a,b){this.b=a;this.a=b}
+function _Yb(a,b){this.b=a;this.a=b}
+function eZb(a,b){this.c=a;this.d=b}
+function Q1b(a,b){this.e=a;this.d=b}
+function Tjc(a,b){this.a=a;this.b=b}
+function ulc(a,b){this.a=a;this.b=b}
+function Elc(a,b){this.a=a;this.b=b}
+function fqc(a,b){this.b=a;this.a=b}
+function smc(a,b){this.b=b;this.c=a}
+function fnc(a,b){qs.call(this,a,b)}
+function Cnc(a,b){qs.call(this,a,b)}
+function koc(a,b){qs.call(this,a,b)}
+function ktc(a,b){qs.call(this,a,b)}
+function ctc(a,b){qs.call(this,a,b)}
+function utc(a,b){qs.call(this,a,b)}
+function Ftc(a,b){qs.call(this,a,b)}
+function Rtc(a,b){qs.call(this,a,b)}
+function _tc(a,b){qs.call(this,a,b)}
+function iuc(a,b){qs.call(this,a,b)}
+function vuc(a,b){qs.call(this,a,b)}
+function Duc(a,b){qs.call(this,a,b)}
+function Puc(a,b){qs.call(this,a,b)}
+function _uc(a,b){qs.call(this,a,b)}
+function pvc(a,b){qs.call(this,a,b)}
+function yvc(a,b){qs.call(this,a,b)}
+function Hvc(a,b){qs.call(this,a,b)}
+function Pvc(a,b){qs.call(this,a,b)}
+function dxc(a,b){qs.call(this,a,b)}
+function bDc(a,b){qs.call(this,a,b)}
+function nDc(a,b){qs.call(this,a,b)}
+function yDc(a,b){qs.call(this,a,b)}
+function LDc(a,b){qs.call(this,a,b)}
+function bEc(a,b){qs.call(this,a,b)}
+function lEc(a,b){qs.call(this,a,b)}
+function tEc(a,b){qs.call(this,a,b)}
+function CEc(a,b){qs.call(this,a,b)}
+function LEc(a,b){qs.call(this,a,b)}
+function UEc(a,b){qs.call(this,a,b)}
+function mFc(a,b){qs.call(this,a,b)}
+function vFc(a,b){qs.call(this,a,b)}
+function EFc(a,b){qs.call(this,a,b)}
+function SKc(a,b){qs.call(this,a,b)}
+function cNc(a,b){this.b=a;this.a=b}
+function tNc(a,b){qs.call(this,a,b)}
+function QOc(a,b){this.a=a;this.b=b}
+function ePc(a,b){this.a=a;this.b=b}
+function LPc(a,b){this.a=a;this.b=b}
+function xQc(a,b){qs.call(this,a,b)}
+function FQc(a,b){qs.call(this,a,b)}
+function MQc(a,b){this.a=a;this.b=b}
+function FMc(a,b){dMc();return b!=a}
+function Uvb(a){sFb(a.a);return a.b}
+function qYb(a){rYb(a,a.c);return a}
+function Itb(){Etb();return new Dtb}
+function _ec(){Rec();this.a=new e6b}
+function lSc(){dSc();this.a=new _sb}
+function aRc(){WQc();this.b=new _sb}
+function xRc(a,b){this.b=a;this.d=b}
+function nVc(a,b){this.a=a;this.b=b}
+function pVc(a,b){this.a=a;this.b=b}
+function GWc(a,b){this.a=a;this.b=b}
+function IXc(a,b){this.b=a;this.a=b}
+function gTc(a,b){qs.call(this,a,b)}
+function eVc(a,b){qs.call(this,a,b)}
+function $Vc(a,b){qs.call(this,a,b)}
+function XYc(a,b){qs.call(this,a,b)}
+function MZc(a,b){qs.call(this,a,b)}
+function t_c(a,b){qs.call(this,a,b)}
+function B_c(a,b){qs.call(this,a,b)}
+function z2c(a,b){qs.call(this,a,b)}
+function h3c(a,b){qs.call(this,a,b)}
+function $3c(a,b){qs.call(this,a,b)}
+function i4c(a,b){qs.call(this,a,b)}
+function l5c(a,b){qs.call(this,a,b)}
+function v5c(a,b){qs.call(this,a,b)}
+function g6c(a,b){qs.call(this,a,b)}
+function A6c(a,b){qs.call(this,a,b)}
+function a7c(a,b){qs.call(this,a,b)}
+function B8c(a,b){qs.call(this,a,b)}
+function d9c(a,b){qs.call(this,a,b)}
+function D9c(a,b){qs.call(this,a,b)}
+function tad(a,b){qs.call(this,a,b)}
+function hbd(a,b){qs.call(this,a,b)}
+function Nbd(a,b){qs.call(this,a,b)}
+function Ybd(a,b){qs.call(this,a,b)}
+function ndd(a,b){qs.call(this,a,b)}
+function z1c(a,b){this.b=a;this.a=b}
+function B1c(a,b){this.b=a;this.a=b}
+function d2c(a,b){this.b=a;this.a=b}
+function f2c(a,b){this.b=a;this.a=b}
+function m9c(a,b){this.a=a;this.b=b}
+function xed(a,b){this.a=a;this.b=b}
+function ffd(a,b){this.a=a;this.b=b}
+function rjd(a,b){this.a=a;this.b=b}
+function Sjd(a,b){qs.call(this,a,b)}
+function Zhd(a,b){qs.call(this,a,b)}
+function lid(a,b){qs.call(this,a,b)}
+function vkd(a,b){qs.call(this,a,b)}
+function Gmd(a,b){qs.call(this,a,b)}
+function Pmd(a,b){qs.call(this,a,b)}
+function Zmd(a,b){qs.call(this,a,b)}
+function jnd(a,b){qs.call(this,a,b)}
+function Gnd(a,b){qs.call(this,a,b)}
+function Rnd(a,b){qs.call(this,a,b)}
+function eod(a,b){qs.call(this,a,b)}
+function qod(a,b){qs.call(this,a,b)}
+function Eod(a,b){qs.call(this,a,b)}
+function Qod(a,b){qs.call(this,a,b)}
+function upd(a,b){qs.call(this,a,b)}
+function Rpd(a,b){qs.call(this,a,b)}
+function eqd(a,b){qs.call(this,a,b)}
+function nqd(a,b){qs.call(this,a,b)}
+function vqd(a,b){qs.call(this,a,b)}
+function Hrd(a,b){qs.call(this,a,b)}
+function esd(a,b){this.a=a;this.b=b}
+function gsd(a,b){this.a=a;this.b=b}
+function isd(a,b){this.a=a;this.b=b}
+function Osd(a,b){this.a=a;this.b=b}
+function Qsd(a,b){this.a=a;this.b=b}
+function Ssd(a,b){this.a=a;this.b=b}
+function Ptd(a,b){this.a=a;this.b=b}
+function JEd(a,b){this.a=a;this.b=b}
+function KEd(a,b){this.a=a;this.b=b}
+function MEd(a,b){this.a=a;this.b=b}
+function NEd(a,b){this.a=a;this.b=b}
+function QEd(a,b){this.a=a;this.b=b}
+function REd(a,b){this.a=a;this.b=b}
+function SEd(a,b){this.b=a;this.a=b}
+function TEd(a,b){this.b=a;this.a=b}
+function bFd(a,b){this.b=a;this.a=b}
+function dFd(a,b){this.b=a;this.a=b}
+function fFd(a,b){this.a=a;this.b=b}
+function hFd(a,b){this.a=a;this.b=b}
+function utd(a,b){qs.call(this,a,b)}
+function sFd(a,b){this.a=a;this.b=b}
+function uFd(a,b){this.a=a;this.b=b}
+function bGd(a,b){qs.call(this,a,b)}
+function uId(a,b){this.f=a;this.c=b}
+function Ofd(a,b){return Csb(a.g,b)}
+function Tqc(a,b){return Csb(b.b,a)}
+function HPd(a,b){return QNd(a.a,b)}
+function Idd(a,b){return -a.b.af(b)}
+function IId(a,b){!!a&&Zjb(CId,a,b)}
+function yWd(a,b){a.i=null;zWd(a,b)}
+function kEd(a,b,c){pDd(b,KDd(a,c))}
+function lEd(a,b,c){pDd(b,KDd(a,c))}
+function mFd(a,b){vEd(a.a,RD(b,58))}
+function _Mc(a,b){GMc(a.a,RD(b,12))}
+function KTd(a,b){this.a=a;this.b=b}
+function NTd(a,b){this.a=a;this.b=b}
+function B5d(a,b){this.a=a;this.b=b}
+function Z6d(a,b){this.a=a;this.b=b}
+function Ble(a,b){this.a=a;this.b=b}
+function afe(a,b){this.d=a;this.b=b}
+function wfe(a,b){this.e=a;this.a=b}
+function Eke(a,b){this.b=a;this.c=b}
+function zNd(a,b){this.i=a;this.g=b}
+function kZd(a,b){this.d=a;this.e=b}
+function ave(a,b){eve(new dMd(a),b)}
+function Dke(a){return pge(a.c,a.b)}
+function Wd(a){return !a?null:a.md()}
+function dE(a){return a==null?null:a}
+function bE(a){return typeof a===jve}
+function $D(a){return typeof a===hve}
+function _D(a){return typeof a===ive}
+function Gdb(a,b){return Ddb(a,b)==0}
+function Jdb(a,b){return Ddb(a,b)>=0}
+function Pdb(a,b){return Ddb(a,b)!=0}
+function ar(a,b){return zr(a.Kc(),b)}
+function Qm(a,b){return a.Rd().Xb(b)}
+function kg(a){ig(a);return a.d.gc()}
+function fE(a){CFb(a==null);return a}
+function Mhb(a,b){a.a+=''+b;return a}
+function Nhb(a,b){a.a+=''+b;return a}
+function Whb(a,b){a.a+=''+b;return a}
+function Yhb(a,b){a.a+=''+b;return a}
+function Zhb(a,b){a.a+=''+b;return a}
+function Vhb(a,b){return a.a+=''+b,a}
+function Pfb(a){return ''+(uFb(a),a)}
+function Vsb(a){akb(this);Ld(this,a)}
+function YFc(){RFc();UFc.call(this)}
+function pxb(a,b){kxb.call(this,a,b)}
+function txb(a,b){kxb.call(this,a,b)}
+function xxb(a,b){kxb.call(this,a,b)}
+function Oub(a,b){Pub(a,b,a.c.b,a.c)}
+function Nub(a,b){Pub(a,b,a.a,a.a.a)}
+function Iob(a){tFb(a,0);return null}
+function Xvb(){this.b=0;this.a=false}
+function dwb(){this.b=0;this.a=false}
+function Et(){this.b=new Usb(Sv(12))}
+function pMb(){pMb=geb;oMb=ss(nMb())}
+function ncc(){ncc=geb;mcc=ss(lcc())}
+function aZc(){aZc=geb;_Yc=ss($Yc())}
+function WA(){WA=geb;vA();VA=new Tsb}
+function hjd(a){a.a=0;a.b=0;return a}
+function qfd(a,b){a.a=b.g+1;return a}
+function yNd(a,b){aMd.call(this,a,b)}
+function lGd(a,b){kGd.call(this,a,b)}
+function N$d(a,b){zNd.call(this,a,b)}
+function Whe(a,b){Q2d.call(this,a,b)}
+function She(a,b){Phe.call(this,a,b)}
+function RRd(a,b){PRd();Zjb(ORd,a,b)}
+function sB(a,b){a.q.setTime(Xdb(b))}
+function Xz(a){$wnd.clearTimeout(a)}
+function cr(a){return Qb(a),new Dl(a)}
+function mb(a,b){return dE(a)===dE(b)}
+function Mw(a,b){return a.a.a.a.cc(b)}
+function qeb(a,b){return zhb(a.a,0,b)}
+function SSb(a){return MSb(RD(a,74))}
+function Nfb(a){return eE((uFb(a),a))}
+function Ofb(a){return eE((uFb(a),a))}
+function gD(a){return hD(a.l,a.m,a.h)}
+function egb(a,b){return hgb(a.a,b.a)}
+function ygb(a,b){return Agb(a.a,b.a)}
+function Sfb(a,b){return Qfb(a.a,b.a)}
+function qhb(a,b){return a.indexOf(b)}
+function nOc(a,b){return a.j[b.p]==2}
+function cz(a,b){return a==b?0:a?1:-1}
+function AB(a){return a<10?'0'+a:''+a}
+function Kdb(a){return typeof a===ive}
+function oZb(a){return a==jZb||a==mZb}
+function pZb(a){return a==jZb||a==kZb}
+function ELb(a,b){return hgb(a.g,b.g)}
+function Q4b(a){return Wmb(a.b.b,a,0)}
+function Q2b(){J2b.call(this,0,0,0,0)}
+function Iub(){ctb.call(this,new gub)}
+function Znb(a,b){Wnb(a,0,a.length,b)}
+function Eyb(a,b){Rmb(a.a,b);return b}
+function Fkc(a,b){lkc();return b.a+=a}
+function Hkc(a,b){lkc();return b.a+=a}
+function Gkc(a,b){lkc();return b.c+=a}
+function ied(a,b){Rmb(a.c,b);return a}
+function Ped(a,b){ofd(a.a,b);return a}
+function ttb(a){this.a=Itb();this.b=a}
+function Ntb(a){this.a=Itb();this.b=a}
+function sjd(a){this.a=a.a;this.b=a.b}
+function Dl(a){this.a=a;zl.call(this)}
+function Gl(a){this.a=a;zl.call(this)}
+function Tid(){Uid.call(this,0,0,0,0)}
+function vfd(a){return ofd(new ufd,a)}
+function Ksd(a){return iyd(RD(a,123))}
+function Mvd(a){return a.vh()&&a.wh()}
+function Dod(a){return a!=zod&&a!=Aod}
+function Dmd(a){return a==ymd||a==zmd}
+function Emd(a){return a==Bmd||a==xmd}
+function xDc(a){return a==tDc||a==sDc}
+function yrc(a,b){return hgb(a.g,b.g)}
+function Yfe(a,b){return new Phe(b,a)}
+function Zfe(a,b){return new Phe(b,a)}
+function lr(a){return Dr(a.b.Kc(),a.a)}
+function IXd(a,b){yXd(a,b);zXd(a,a.D)}
+function Uxd(a,b,c){Vxd(a,b);Wxd(a,c)}
+function zyd(a,b,c){Cyd(a,b);Ayd(a,c)}
+function Byd(a,b,c){Dyd(a,b);Eyd(a,c)}
+function Gzd(a,b,c){Hzd(a,b);Izd(a,c)}
+function Nzd(a,b,c){Ozd(a,b);Pzd(a,c)}
+function eh(a,b,c){bh.call(this,a,b,c)}
+function zId(a){uId.call(this,a,true)}
+function nAb(){dAb.call(this,'Tail',3)}
+function iAb(){dAb.call(this,'Head',1)}
+function ejb(a){Pib();fjb.call(this,a)}
+function A3b(a){J2b.call(this,a,a,a,a)}
+function Pmb(a){a.c=$C(jJ,rve,1,0,5,1)}
+function yRb(a){a.b&&CRb(a);return a.a}
+function zRb(a){a.b&&CRb(a);return a.c}
+function mBb(a,b){if(dBb){return}a.b=b}
+function YCb(a,b){return a[a.length]=b}
+function _Cb(a,b){return a[a.length]=b}
+function l5b(a,b){return NGd(b,MCd(a))}
+function m5b(a,b){return NGd(b,MCd(a))}
+function DDd(a,b){return lp(Co(a.d),b)}
+function EDd(a,b){return lp(Co(a.g),b)}
+function FDd(a,b){return lp(Co(a.j),b)}
+function mGd(a,b){kGd.call(this,a.b,b)}
+function s0d(a,b){WGd(tYd(a.a),v0d(b))}
+function B4d(a,b){WGd(o4d(a.a),E4d(b))}
+function Asd(a,b,c){Byd(c,c.i+a,c.j+b)}
+function eFc(a,b,c){bD(a.c[b.g],b.g,c)}
+function zVd(a,b,c){RD(a.c,71).Gi(b,c)}
+function LMd(a,b,c){bD(a,b,c);return c}
+function DJb(a){Umb(a.Sf(),new HJb(a))}
+function Gvb(a){return a!=null?tb(a):0}
+function aOd(a){return a==null?0:tb(a)}
+function iue(a){Vse();Wse.call(this,a)}
+function Ug(a){this.a=a;Og.call(this,a)}
+function Zy(){Zy=geb;$wnd.Math.log(2)}
+function s7d(){s7d=geb;r7d=($Sd(),ZSd)}
+function FRc(){FRc=geb;ERc=new Zrb(u3)}
+function Hde(){Hde=geb;new Ide;new bnb}
+function Ide(){new Tsb;new Tsb;new Tsb}
+function yue(){throw Adb(new kib(bMe))}
+function Nue(){throw Adb(new kib(bMe))}
+function Bue(){throw Adb(new kib(cMe))}
+function Que(){throw Adb(new kib(cMe))}
+function Gp(a){this.a=a;rf.call(this,a)}
+function Np(a){this.a=a;rf.call(this,a)}
+function Sq(a,b){tm();this.a=a;this.b=b}
+function Jh(a,b){Qb(b);Ih(a).Jc(new jx)}
+function _mb(a,b){Ynb(a.c,a.c.length,b)}
+function xnb(a){return a.ab?1:0}
+function Kgb(a,b){return Ddb(a,b)>0?a:b}
+function hD(a,b,c){return {l:a,m:b,h:c}}
+function Mvb(a,b){a.a!=null&&_Mc(b,a.a)}
+function Lhc(a){Y0b(a,null);Z0b(a,null)}
+function xkc(a,b,c){return Zjb(a.g,c,b)}
+function bFc(a,b,c){return _Ec(b,c,a.c)}
+function jOc(a,b,c){return Zjb(a.k,c,b)}
+function pOc(a,b,c){qOc(a,b,c);return c}
+function FOc(a,b){dOc();return b.n.b+=a}
+function lUb(a){VTb.call(this);this.b=a}
+function y2b(a){v2b.call(this);this.a=a}
+function kAb(){dAb.call(this,'Range',2)}
+function $Fb(a){this.b=a;this.a=new bnb}
+function WQb(a){this.b=new gRb;this.a=a}
+function Lub(a){a.a=new svb;a.c=new svb}
+function nrc(a){a.a=new Tsb;a.d=new Tsb}
+function $Sc(a){_Sc(a,null);aTc(a,null)}
+function a2d(a,b){return xA(a.a,b,null)}
+function Cdd(a,b){return Zjb(a.a,b.a,b)}
+function ajd(a){return new rjd(a.a,a.b)}
+function Pid(a){return new rjd(a.c,a.d)}
+function Qid(a){return new rjd(a.c,a.d)}
+function Ake(a,b){return Tfe(a.c,a.b,b)}
+function ZD(a,b){return a!=null&&QD(a,b)}
+function br(a,b){return Jr(a.Kc(),b)!=-1}
+function Hr(a){return a.Ob()?a.Pb():null}
+function _p(a){this.b=(yob(),new uqb(a))}
+function zke(a){this.a=a;Tsb.call(this)}
+function Uhe(){Q2d.call(this,null,null)}
+function Yhe(){p3d.call(this,null,null)}
+function As(){qs.call(this,'INSTANCE',0)}
+function dXb(){_Wb();this.a=new Wed(UP)}
+function Hhb(a){return Ihb(a,0,a.length)}
+function Rv(a,b){return new ew(a.Kc(),b)}
+function $sb(a,b){return a.a.Bc(b)!=null}
+function hZd(a,b){sLd(a);a.Gc(RD(b,15))}
+function ONd(a,b,c){a.c.bd(b,RD(c,136))}
+function eOd(a,b,c){a.c.Ui(b,RD(c,136))}
+function eub(a,b){if(a.c){rub(b);qub(b)}}
+function oB(a,b){a.q.setHours(b);mB(a,b)}
+function vTb(a,b){Zid(b,a.a.a.a,a.a.a.b)}
+function tKb(a,b,c,d){bD(a.a[b.g],c.g,d)}
+function oKb(a,b,c){return a.a[b.g][c.g]}
+function AIc(a,b){return a.e[b.c.p][b.p]}
+function TIc(a,b){return a.c[b.c.p][b.p]}
+function pJc(a,b){return a.a[b.c.p][b.p]}
+function mOc(a,b){return a.j[b.p]=AOc(b)}
+function wAb(a,b){return a.a.Bc(b)!=null}
+function wXc(a,b){return Kfb(UD(b.a))<=a}
+function xXc(a,b){return Kfb(UD(b.a))>=a}
+function vhd(a,b){return jhb(a.f,b.Pg())}
+function cjd(a,b){return a.a*b.a+a.b*b.b}
+function Wsd(a,b){return a.a0?b/(a*a):b*100}
+function FUb(a,b){return a>0?b*b/a:b*b*100}
+function $5b(a,b){return RD(cub(a.a,b),34)}
+function doc(a,b){Mnc();return Rc(a,b.e,b)}
+function NCc(a,b,c){GCc();return c.Mg(a,b)}
+function L0c(a){B0c();return a.e.a+a.f.a/2}
+function N0c(a,b,c){B0c();return c.e.a-a*b}
+function V0c(a){B0c();return a.e.b+a.f.b/2}
+function X0c(a,b,c){B0c();return c.e.b-a*b}
+function _tb(a){a.d=new tub(a);a.e=new Tsb}
+function x3c(){this.a=new Tp;this.b=new Tp}
+function hmc(a){this.c=a;this.a=1;this.b=1}
+function C$b(a){z$b();A$b(this);this.Ff(a)}
+function Efd(a,b,c){Afd();a.pf(b)&&c.Cd(a)}
+function Red(a,b,c){return Rmb(b,Ted(a,c))}
+function Zid(a,b,c){a.a+=b;a.b+=c;return a}
+function jjd(a,b,c){a.a*=b;a.b*=c;return a}
+function mjd(a,b){a.a=b.a;a.b=b.b;return a}
+function fjd(a){a.a=-a.a;a.b=-a.b;return a}
+function njd(a,b,c){a.a-=b;a.b-=c;return a}
+function Gjd(a){Yub.call(this);zjd(this,a)}
+function Dbd(){qs.call(this,'GROW_TREE',0)}
+function WRb(){qs.call(this,'POLYOMINO',0)}
+function SVd(a,b,c){DVd.call(this,a,b,c,2)}
+function r0d(a,b,c){VGd(tYd(a.a),b,v0d(c))}
+function e3d(a,b){N2d();Q2d.call(this,a,b)}
+function D3d(a,b){j3d();p3d.call(this,a,b)}
+function F3d(a,b){j3d();D3d.call(this,a,b)}
+function H3d(a,b){j3d();p3d.call(this,a,b)}
+function PNd(a,b){return a.c.Fc(RD(b,136))}
+function A4d(a,b,c){VGd(o4d(a.a),b,E4d(c))}
+function Ard(a){this.c=a;Dyd(a,0);Eyd(a,0)}
+function Z8d(a,b){s7d();N8d.call(this,a,b)}
+function _8d(a,b){s7d();Z8d.call(this,a,b)}
+function b9d(a,b){s7d();Z8d.call(this,a,b)}
+function n9d(a,b){s7d();N8d.call(this,a,b)}
+function d9d(a,b){s7d();b9d.call(this,a,b)}
+function p9d(a,b){s7d();n9d.call(this,a,b)}
+function v9d(a,b){s7d();N8d.call(this,a,b)}
+function lge(a,b,c){return b.zl(a.e,a.c,c)}
+function nge(a,b,c){return b.Al(a.e,a.c,c)}
+function Wee(a,b,c){return tfe(Pee(a,b),c)}
+function Age(a,b){return Vvd(a.e,RD(b,54))}
+function _me(a){return a==null?null:Bqe(a)}
+function dne(a){return a==null?null:Iqe(a)}
+function gne(a){return a==null?null:jeb(a)}
+function hne(a){return a==null?null:jeb(a)}
+function TD(a){CFb(a==null||$D(a));return a}
+function UD(a){CFb(a==null||_D(a));return a}
+function WD(a){CFb(a==null||bE(a));return a}
+function lfb(a){if(a.o!=null){return}Bfb(a)}
+function lFb(a){if(!a){throw Adb(new _fb)}}
+function pFb(a){if(!a){throw Adb(new yeb)}}
+function sFb(a){if(!a){throw Adb(new Dvb)}}
+function yFb(a){if(!a){throw Adb(new cgb)}}
+function zmb(a){if(!a){throw Adb(new Jrb)}}
+function jQd(){jQd=geb;iQd=new LQd;new lRd}
+function u2c(){u2c=geb;t2c=new jGd('root')}
+function d6d(){HWd.call(this);this.Bb|=txe}
+function Pg(a,b){this.d=a;Lg(this);this.b=b}
+function WCb(a,b){NCb.call(this,a);this.a=b}
+function oDb(a,b){NCb.call(this,a);this.a=b}
+function bh(a,b,c){lg.call(this,a,b,c,null)}
+function fh(a,b,c){lg.call(this,a,b,c,null)}
+function Mf(a,b){this.c=a;ne.call(this,a,b)}
+function Uf(a,b){this.a=a;Mf.call(this,a,b)}
+function wB(a){this.q=new $wnd.Date(Xdb(a))}
+function OPb(a){if(a>8){return 0}return a+1}
+function iBb(a,b){if(dBb){return}Rmb(a.a,b)}
+function P5b(a,b){H5b();return n2b(b.d.i,a)}
+function qdc(a,b){Zcc();return new xdc(b,a)}
+function HAb(a,b,c){return a.Ne(b,c)<=0?c:b}
+function IAb(a,b,c){return a.Ne(b,c)<=0?b:c}
+function rgd(a,b){return RD(cub(a.b,b),143)}
+function tgd(a,b){return RD(cub(a.c,b),233)}
+function amc(a){return RD(Vmb(a.a,a.b),293)}
+function Mid(a){return new rjd(a.c,a.d+a.a)}
+function Jeb(a){return (uFb(a),a)?1231:1237}
+function EPc(a){return dOc(),xDc(RD(a,203))}
+function RMb(){RMb=geb;QMb=xsb((Qpd(),Ppd))}
+function YQb(a,b){b.a?ZQb(a,b):wAb(a.a,b.b)}
+function aJd(a,b,c){++a.j;a.tj();$Gd(a,b,c)}
+function $Id(a,b,c){++a.j;a.qj(b,a.Zi(b,c))}
+function B2d(a,b,c){var d;d=a.fd(b);d.Rb(c)}
+function Bzd(a,b,c){c=xvd(a,b,6,c);return c}
+function izd(a,b,c){c=xvd(a,b,3,c);return c}
+function KCd(a,b,c){c=xvd(a,b,9,c);return c}
+function SKb(a,b){Ivb(b,Pye);a.f=b;return a}
+function bOd(a,b){return (b&lve)%a.d.length}
+function Bke(a,b,c){return age(a.c,a.b,b,c)}
+function ZLd(a,b){this.c=a;ZHd.call(this,b)}
+function w0d(a,b){this.a=a;Q_d.call(this,b)}
+function F4d(a,b){this.a=a;Q_d.call(this,b)}
+function kGd(a,b){jGd.call(this,a);this.a=b}
+function U6d(a,b){L6d.call(this,a);this.a=b}
+function S9d(a,b){L6d.call(this,a);this.a=b}
+function jQb(a){gQb.call(this,0,0);this.f=a}
+function _hb(a,b,c){a.a+=Ihb(b,0,c);return a}
+function _A(a){!a.a&&(a.a=new jB);return a.a}
+function qlb(a,b){var c;c=a.e;a.e=b;return c}
+function Clb(a,b){var c;c=b;return !!a.Fe(c)}
+function Keb(a,b){Geb();return a==b?0:a?1:-1}
+function Ikb(a,b){a.a.bd(a.b,b);++a.b;a.c=-1}
+function hg(a){a.b?hg(a.b):a.f.c.zc(a.e,a.d)}
+function aub(a){akb(a.e);a.d.b=a.d;a.d.a=a.d}
+function VDb(a,b,c){xDb();HEb(a,b.Ve(a.a,c))}
+function Xrb(a,b,c){return Wrb(a,RD(b,22),c)}
+function WEb(a,b){return cFb(new Array(b),a)}
+function Fgb(a){return Ydb(Udb(a,32))^Ydb(a)}
+function XD(a){return String.fromCharCode(a)}
+function Dz(a){return a==null?null:a.message}
+function Rz(a,b,c){return a.apply(b,c);var d}
+function Btb(a,b){var c;c=a[Jxe];c.call(a,b)}
+function Ctb(a,b){var c;c=a[Jxe];c.call(a,b)}
+function O5b(a,b){H5b();return !n2b(b.d.i,a)}
+function R2b(a,b,c,d){J2b.call(this,a,b,c,d)}
+function TJb(){RJb.call(this);this.a=new pjd}
+function v2b(){this.n=new pjd;this.o=new pjd}
+function kGb(){this.b=new pjd;this.c=new bnb}
+function cUb(){this.a=new bnb;this.b=new bnb}
+function kWb(){this.a=new DTb;this.b=new vWb}
+function e6b(){this.b=new gub;this.a=new gub}
+function jIc(){this.b=new _sb;this.a=new _sb}
+function vYc(){this.b=new Tsb;this.a=new Tsb}
+function fWc(){this.b=new TVc;this.a=new IVc}
+function Yhc(){this.a=new yqc;this.b=new Sqc}
+function lNc(){this.a=new bnb;this.d=new bnb}
+function RJb(){this.n=new z3b;this.i=new Tid}
+function hq(a){this.a=(dk(a,iwe),new cnb(a))}
+function oq(a){this.a=(dk(a,iwe),new cnb(a))}
+function tLd(a){return a<100?null:new gLd(a)}
+function Lac(a,b){return a.n.a=(uFb(b),b)+10}
+function Mac(a,b){return a.n.a=(uFb(b),b)+10}
+function DYd(a,b){return b==a||PHd(sYd(b),a)}
+function nae(a,b){return Zjb(a.a,b,'')==null}
+function Hee(a,b){var c;c=b.qi(a.a);return c}
+function $id(a,b){a.a+=b.a;a.b+=b.b;return a}
+function ojd(a,b){a.a-=b.a;a.b-=b.b;return a}
+function sfd(a){aFb(a.j.c,0);a.a=-1;return a}
+function rCd(a,b,c){c=xvd(a,b,11,c);return c}
+function SDd(a,b,c){c!=null&&Kzd(b,uEd(a,c))}
+function TDd(a,b,c){c!=null&&Lzd(b,uEd(a,c))}
+function G5d(a,b,c,d){C5d.call(this,a,b,c,d)}
+function oie(a,b,c,d){C5d.call(this,a,b,c,d)}
+function sie(a,b,c,d){oie.call(this,a,b,c,d)}
+function Nie(a,b,c,d){Iie.call(this,a,b,c,d)}
+function Pie(a,b,c,d){Iie.call(this,a,b,c,d)}
+function Vie(a,b,c,d){Iie.call(this,a,b,c,d)}
+function Tie(a,b,c,d){Pie.call(this,a,b,c,d)}
+function $ie(a,b,c,d){Pie.call(this,a,b,c,d)}
+function Yie(a,b,c,d){Vie.call(this,a,b,c,d)}
+function bje(a,b,c,d){$ie.call(this,a,b,c,d)}
+function Dje(a,b,c,d){wje.call(this,a,b,c,d)}
+function aMd(a,b){veb.call(this,HJe+a+NIe+b)}
+function Hje(a,b){return a.jk().wi().ri(a,b)}
+function Ije(a,b){return a.jk().wi().ti(a,b)}
+function Lfb(a,b){return uFb(a),dE(a)===dE(b)}
+function lhb(a,b){return uFb(a),dE(a)===dE(b)}
+function mEb(a,b){return a.b.Bd(new pEb(a,b))}
+function sEb(a,b){return a.b.Bd(new vEb(a,b))}
+function yEb(a,b){return a.b.Bd(new BEb(a,b))}
+function Bk(a,b){return a.e=RD(a.d.Kb(b),159)}
+function uhb(a,b,c){return a.lastIndexOf(b,c)}
+function wWb(a,b,c){return Qfb(a[b.a],a[c.a])}
+function TWb(a,b){return pQb(b,(yCc(),gAc),a)}
+function Lpc(a,b){return hgb(b.a.d.p,a.a.d.p)}
+function Kpc(a,b){return hgb(a.a.d.p,b.a.d.p)}
+function zTc(a,b){return Qfb(a.c-a.s,b.c-b.s)}
+function qWc(a,b){return Qfb(a.b.e.a,b.b.e.a)}
+function sWc(a,b){return Qfb(a.c.e.a,b.c.e.a)}
+function $2b(a){return !a.c?-1:Wmb(a.c.a,a,0)}
+function Cod(a){return a==vod||a==xod||a==wod}
+function CMd(a,b){this.c=a;nMd.call(this,a,b)}
+function fq(a,b,c){this.a=a;qc.call(this,b,c)}
+function YDb(a){this.c=a;xxb.call(this,Sve,0)}
+function rk(a,b,c){this.c=b;this.b=c;this.a=a}
+function DMc(a){dMc();this.d=a;this.a=new wmb}
+function ho(a){_l();this.a=(yob(),new Lqb(a))}
+function Xmc(a,b){Dmd(a.f)?Ymc(a,b):Zmc(a,b)}
+function Lxb(a,b){Mxb.call(this,a,a.length,b)}
+function nBb(a,b){if(dBb){return}!!b&&(a.d=b)}
+function ZNd(a,b){return ZD(b,15)&&_Gd(a.c,b)}
+function AVd(a,b,c){return RD(a.c,71).Wk(b,c)}
+function BVd(a,b,c){return RD(a.c,71).Xk(b,c)}
+function mge(a,b,c){return lge(a,RD(b,343),c)}
+function oge(a,b,c){return nge(a,RD(b,343),c)}
+function Ige(a,b,c){return Hge(a,RD(b,343),c)}
+function Kge(a,b,c){return Jge(a,RD(b,343),c)}
+function Fn(a,b){return b==null?null:Xv(a.b,b)}
+function Qeb(a){return _D(a)?(uFb(a),a):a.ue()}
+function Rfb(a){return !isNaN(a)&&!isFinite(a)}
+function Zub(a){Lub(this);Xub(this);ye(this,a)}
+function dnb(a){Pmb(this);YEb(this.c,0,a.Pc())}
+function Fsb(a,b,c){this.a=a;this.b=b;this.c=c}
+function Vtb(a,b,c){this.a=a;this.b=b;this.c=c}
+function hvb(a,b,c){this.d=a;this.b=c;this.a=b}
+function aBb(a){this.a=a;gib();Hdb(Date.now())}
+function wzb(a){Ckb(a.a);Yyb(a.c,a.b);a.b=null}
+function wvb(){wvb=geb;uvb=new xvb;vvb=new zvb}
+function KMd(){KMd=geb;JMd=$C(jJ,rve,1,0,5,1)}
+function TTd(){TTd=geb;STd=$C(jJ,rve,1,0,5,1)}
+function yUd(){yUd=geb;xUd=$C(jJ,rve,1,0,5,1)}
+function _l(){_l=geb;new im((yob(),yob(),vob))}
+function gAb(a){cAb();return ws((qAb(),pAb),a)}
+function zBb(a){xBb();return ws((CBb(),BBb),a)}
+function FHb(a){DHb();return ws((IHb(),HHb),a)}
+function NHb(a){LHb();return ws((QHb(),PHb),a)}
+function kIb(a){iIb();return ws((nIb(),mIb),a)}
+function _Jb(a){ZJb();return ws((cKb(),bKb),a)}
+function GKb(a){EKb();return ws((JKb(),IKb),a)}
+function xLb(a){vLb();return ws((ALb(),zLb),a)}
+function mMb(a){hMb();return ws((pMb(),oMb),a)}
+function oOb(a){mOb();return ws((rOb(),qOb),a)}
+function KPb(a){IPb();return ws((NPb(),MPb),a)}
+function KRb(a){IRb();return ws((NRb(),MRb),a)}
+function XRb(a){VRb();return ws(($Rb(),ZRb),a)}
+function VUb(a){TUb();return ws((YUb(),XUb),a)}
+function CWb(a){AWb();return ws((FWb(),EWb),a)}
+function uXb(a){sXb();return ws((xXb(),wXb),a)}
+function tZb(a){nZb();return ws((wZb(),vZb),a)}
+function W$b(a){U$b();return ws((Z$b(),Y$b),a)}
+function Mb(a,b){if(!a){throw Adb(new agb(b))}}
+function Vb(a){if(!a){throw Adb(new dgb(tve))}}
+function rFb(a,b){if(a!=b){throw Adb(new Jrb)}}
+function KQb(a,b,c){this.a=a;this.b=b;this.c=c}
+function lRb(a,b,c){this.a=a;this.b=b;this.c=c}
+function h7b(a,b,c){this.a=a;this.b=b;this.c=c}
+function J0b(a,b,c){this.b=a;this.a=b;this.c=c}
+function dNb(a,b,c){this.b=a;this.c=b;this.a=c}
+function oac(a,b,c){this.a=a;this.b=b;this.c=c}
+function F1b(a,b,c){this.e=b;this.b=a;this.d=c}
+function Ecc(a,b,c){this.b=a;this.a=b;this.c=c}
+function UDb(a,b,c){xDb();a.a.Yd(b,c);return b}
+function CJb(a){var b;b=new BJb;b.e=a;return b}
+function _Nb(a){var b;b=new YNb;b.b=a;return b}
+function U9b(){U9b=geb;S9b=new bac;T9b=new eac}
+function Rec(){Rec=geb;Qec=new efc;Pec=new jfc}
+function lkc(){lkc=geb;jkc=new Mkc;kkc=new Okc}
+function loc(a){joc();return ws((ooc(),noc),a)}
+function kcc(a){hcc();return ws((ncc(),mcc),a)}
+function yec(a){vec();return ws((Bec(),Aec),a)}
+function gnc(a){enc();return ws((jnc(),inc),a)}
+function Enc(a){Bnc();return ws((Hnc(),Gnc),a)}
+function gpc(a){epc();return ws((jpc(),ipc),a)}
+function dtc(a){btc();return ws((gtc(),ftc),a)}
+function ltc(a){jtc();return ws((otc(),ntc),a)}
+function xtc(a){stc();return ws((Atc(),ztc),a)}
+function Gtc(a){Etc();return ws((Jtc(),Itc),a)}
+function Utc(a){Ptc();return ws((Xtc(),Wtc),a)}
+function auc(a){$tc();return ws((duc(),cuc),a)}
+function avc(a){$uc();return ws((dvc(),cvc),a)}
+function qvc(a){ovc();return ws((tvc(),svc),a)}
+function zvc(a){xvc();return ws((Cvc(),Bvc),a)}
+function Ivc(a){Gvc();return ws((Lvc(),Kvc),a)}
+function Qvc(a){Ovc();return ws((Tvc(),Svc),a)}
+function Quc(a){Ouc();return ws((Tuc(),Suc),a)}
+function juc(a){huc();return ws((muc(),luc),a)}
+function wuc(a){tuc();return ws((zuc(),yuc),a)}
+function Euc(a){Cuc();return ws((Huc(),Guc),a)}
+function exc(a){cxc();return ws((hxc(),gxc),a)}
+function eDc(a){_Cc();return ws((hDc(),gDc),a)}
+function oDc(a){lDc();return ws((rDc(),qDc),a)}
+function ADc(a){wDc();return ws((DDc(),CDc),a)}
+function ODc(a){JDc();return ws((RDc(),QDc),a)}
+function cEc(a){aEc();return ws((fEc(),eEc),a)}
+function mEc(a){kEc();return ws((pEc(),oEc),a)}
+function uEc(a){sEc();return ws((xEc(),wEc),a)}
+function DEc(a){BEc();return ws((GEc(),FEc),a)}
+function MEc(a){KEc();return ws((PEc(),OEc),a)}
+function VEc(a){TEc();return ws((YEc(),XEc),a)}
+function nFc(a){lFc();return ws((qFc(),pFc),a)}
+function wFc(a){uFc();return ws((zFc(),yFc),a)}
+function FFc(a){DFc();return ws((IFc(),HFc),a)}
+function TKc(a){RKc();return ws((WKc(),VKc),a)}
+function uNc(a){sNc();return ws((xNc(),wNc),a)}
+function yQc(a){wQc();return ws((BQc(),AQc),a)}
+function GQc(a){EQc();return ws((JQc(),IQc),a)}
+function hTc(a){fTc();return ws((kTc(),jTc),a)}
+function fVc(a){dVc();return ws((iVc(),hVc),a)}
+function bWc(a){YVc();return ws((eWc(),dWc),a)}
+function ZYc(a){WYc();return ws((aZc(),_Yc),a)}
+function NZc(a){LZc();return ws((QZc(),PZc),a)}
+function u_c(a){s_c();return ws((x_c(),w_c),a)}
+function C_c(a){A_c();return ws((F_c(),E_c),a)}
+function C2c(a){x2c();return ws((F2c(),E2c),a)}
+function j3c(a){g3c();return ws((m3c(),l3c),a)}
+function j4c(a){g4c();return ws((m4c(),l4c),a)}
+function _3c(a){Y3c();return ws((c4c(),b4c),a)}
+function m5c(a){j5c();return ws((p5c(),o5c),a)}
+function w5c(a){t5c();return ws((z5c(),y5c),a)}
+function h6c(a){f6c();return ws((k6c(),j6c),a)}
+function C6c(a){z6c();return ws((F6c(),E6c),a)}
+function b7c(a){_6c();return ws((e7c(),d7c),a)}
+function E8c(a){z8c();return ws((H8c(),G8c),a)}
+function R8b(a){P8b();return ws((U8b(),T8b),a)}
+function t3b(a){r3b();return ws((w3b(),v3b),a)}
+function g9c(a){b9c();return ws((j9c(),i9c),a)}
+function G9c(a){B9c();return ws((J9c(),I9c),a)}
+function uad(a){sad();return ws((xad(),wad),a)}
+function xbd(a){sbd();return ws((Abd(),zbd),a)}
+function ibd(a){gbd();return ws((lbd(),kbd),a)}
+function Gbd(a){Cbd();return ws((Jbd(),Ibd),a)}
+function Obd(a){Mbd();return ws((Rbd(),Qbd),a)}
+function Zbd(a){Xbd();return ws((acd(),_bd),a)}
+function fdd(a){_cd();return ws((idd(),hdd),a)}
+function qdd(a){ldd();return ws((tdd(),sdd),a)}
+function $hd(a){Yhd();return ws((bid(),aid),a)}
+function mid(a){kid();return ws((pid(),oid),a)}
+function Tjd(a){Rjd();return ws((Wjd(),Vjd),a)}
+function wkd(a){ukd();return ws((zkd(),ykd),a)}
+function Hmd(a){Cmd();return ws((Kmd(),Jmd),a)}
+function Qmd(a){Omd();return ws((Tmd(),Smd),a)}
+function $md(a){Ymd();return ws((bnd(),and),a)}
+function knd(a){ind();return ws((nnd(),mnd),a)}
+function Hnd(a){Fnd();return ws((Knd(),Jnd),a)}
+function Snd(a){Pnd();return ws((Vnd(),Und),a)}
+function god(a){dod();return ws((jod(),iod),a)}
+function rod(a){pod();return ws((uod(),tod),a)}
+function Fod(a){Bod();return ws((Iod(),Hod),a)}
+function Tod(a){Pod();return ws((Wod(),Vod),a)}
+function wpd(a){qpd();return ws((zpd(),ypd),a)}
+function Spd(a){Qpd();return ws((Vpd(),Upd),a)}
+function fqd(a){dqd();return ws((iqd(),hqd),a)}
+function oqd(a){mqd();return ws((rqd(),qqd),a)}
+function zsc(a,b){return (uFb(a),a)+(uFb(b),b)}
+function wqd(a){uqd();return ws((Eqd(),Dqd),a)}
+function Ird(a){Grd();return ws((Lrd(),Krd),a)}
+function vtd(a){ttd();return ws((ytd(),xtd),a)}
+function dMc(){dMc=geb;bMc=(qpd(),ppd);cMc=Xod}
+function uqd(){uqd=geb;sqd=new zqd;tqd=new Bqd}
+function wJc(a){!a.e&&(a.e=new bnb);return a.e}
+function BTc(a,b){this.c=a;this.a=b;this.b=b-a}
+function g8c(a,b,c){this.a=a;this.b=b;this.c=c}
+function gud(a,b,c){this.a=a;this.b=b;this.c=c}
+function Wdd(a,b,c){this.a=a;this.b=b;this.c=c}
+function ced(a,b,c){this.a=a;this.b=b;this.c=c}
+function pFd(a,b,c){this.a=a;this.b=b;this.c=c}
+function ZPd(a,b,c){this.a=a;this.b=b;this.c=c}
+function g7d(a,b,c){this.e=a;this.a=b;this.c=c}
+function K7d(a,b,c){s7d();C7d.call(this,a,b,c)}
+function f9d(a,b,c){s7d();O8d.call(this,a,b,c)}
+function r9d(a,b,c){s7d();O8d.call(this,a,b,c)}
+function x9d(a,b,c){s7d();O8d.call(this,a,b,c)}
+function h9d(a,b,c){s7d();f9d.call(this,a,b,c)}
+function j9d(a,b,c){s7d();f9d.call(this,a,b,c)}
+function l9d(a,b,c){s7d();j9d.call(this,a,b,c)}
+function t9d(a,b,c){s7d();r9d.call(this,a,b,c)}
+function z9d(a,b,c){s7d();x9d.call(this,a,b,c)}
+function S2b(a){J2b.call(this,a.d,a.c,a.a,a.b)}
+function B3b(a){J2b.call(this,a.d,a.c,a.a,a.b)}
+function Og(a){this.d=a;Lg(this);this.b=ed(a.d)}
+function cGd(a){aGd();return ws((fGd(),eGd),a)}
+function gk(a,b){Qb(a);Qb(b);return new hk(a,b)}
+function dr(a,b){Qb(a);Qb(b);return new mr(a,b)}
+function hr(a,b){Qb(a);Qb(b);return new sr(a,b)}
+function Dr(a,b){Qb(a);Qb(b);return new Rr(a,b)}
+function Uub(a){sFb(a.b!=0);return Wub(a,a.a.a)}
+function Vub(a){sFb(a.b!=0);return Wub(a,a.c.b)}
+function q$d(a){!a.c&&(a.c=new X9d);return a.c}
+function cv(a){var b;b=new bnb;xr(b,a);return b}
+function Vx(a){var b;b=new _sb;xr(b,a);return b}
+function Yx(a){var b;b=new xAb;_q(b,a);return b}
+function gv(a){var b;b=new Yub;_q(b,a);return b}
+function RD(a,b){CFb(a==null||QD(a,b));return a}
+function Mxb(a,b,c){Axb.call(this,b,c);this.a=a}
+function kB(a,b){this.c=a;this.b=b;this.a=false}
+function hCb(){this.a=';,;';this.b='';this.c=''}
+function $Cb(a,b,c){this.b=a;pxb.call(this,b,c)}
+function uub(a,b,c){this.c=a;xlb.call(this,b,c)}
+function fZb(a,b,c){eZb.call(this,a,b);this.b=c}
+function YEb(a,b,c){VEb(c,0,a,b,c.length,false)}
+function JYb(a,b,c,d,e){a.b=b;a.c=c;a.d=d;a.a=e}
+function D2b(a,b,c,d,e){a.d=b;a.c=c;a.a=d;a.b=e}
+function XDb(a,b){if(b){a.b=b;a.a=(LCb(b),b.a)}}
+function mFb(a,b){if(!a){throw Adb(new agb(b))}}
+function zFb(a,b){if(!a){throw Adb(new dgb(b))}}
+function qFb(a,b){if(!a){throw Adb(new zeb(b))}}
+function zqc(a,b){mqc();return hgb(a.d.p,b.d.p)}
+function T0c(a,b){B0c();return Qfb(a.e.b,b.e.b)}
+function U0c(a,b){B0c();return Qfb(a.e.a,b.e.a)}
+function Xoc(a,b){return hgb(N3b(a.d),N3b(b.d))}
+function Izb(a,b){return !!b&&Jzb(a,b.d)?b:null}
+function $lc(a,b){return b==(qpd(),ppd)?a.c:a.d}
+function Qdb(a){return Edb(yD(Kdb(a)?Wdb(a):a))}
+function Nid(a){return new rjd(a.c+a.b,a.d+a.a)}
+function GSd(a){return a!=null&&!mSd(a,aSd,bSd)}
+function DSd(a,b){return (JSd(a)<<4|JSd(b))&Bwe}
+function Rid(a,b,c,d,e){a.c=b;a.d=c;a.b=d;a.a=e}
+function y8b(a){var b,c;b=a.b;c=a.c;a.b=c;a.c=b}
+function B8b(a){var b,c;c=a.d;b=a.a;a.d=b;a.a=c}
+function u6d(a,b){var c;c=a.c;t6d(a,b);return c}
+function Nqd(a,b){b<0?(a.g=-1):(a.g=b);return a}
+function kjd(a,b){gjd(a);a.a*=b;a.b*=b;return a}
+function hrc(a,b,c){grc.call(this,b,c);this.d=a}
+function PZd(a,b,c){kZd.call(this,a,b);this.c=c}
+function Kfe(a,b,c){kZd.call(this,a,b);this.c=c}
+function zUd(a){yUd();kUd.call(this);this.ci(a)}
+function Yee(){ree();Zee.call(this,(YSd(),XSd))}
+function Yse(a){Vse();++Use;return new Hte(0,a)}
+function uke(){uke=geb;tke=(yob(),new mpb(eLe))}
+function ux(){ux=geb;new wx((kl(),jl),(Wk(),Vk))}
+function ugb(){ugb=geb;tgb=$C(bJ,Nve,17,256,0,1)}
+function zUb(){this.b=Kfb(UD(iGd((yVb(),sVb))))}
+function Pq(a){this.b=a;this.a=gn(this.b.a).Od()}
+function mr(a,b){this.b=a;this.a=b;zl.call(this)}
+function sr(a,b){this.a=a;this.b=b;zl.call(this)}
+function s_d(a,b,c){this.a=a;N$d.call(this,b,c)}
+function n_d(a,b,c){this.a=a;N$d.call(this,b,c)}
+function sDd(a,b,c){var d;d=new OC(c);sC(a,b,d)}
+function _Eb(a,b,c){var d;d=a[b];a[b]=c;return d}
+function UEb(a){var b;b=a.slice();return dD(b,a)}
+function SJb(a){var b;b=a.n;return a.a.b+b.d+b.a}
+function PKb(a){var b;b=a.n;return a.e.b+b.d+b.a}
+function QKb(a){var b;b=a.n;return a.e.a+b.b+b.c}
+function rub(a){a.a.b=a.b;a.b.a=a.a;a.a=a.b=null}
+function Mub(a,b){Pub(a,b,a.c.b,a.c);return true}
+function w2b(a){if(a.a){return a.a}return R0b(a)}
+function NSb(a){HSb();return JGd(a)==vCd(LGd(a))}
+function OSb(a){HSb();return LGd(a)==vCd(JGd(a))}
+function l_b(a,b){return k_b(a,new eZb(b.a,b.b))}
+function xn(a,b){return fn(),ck(a,b),new zy(a,b)}
+function fmc(a,b){return a.c=b){throw Adb(new web)}}
+function JDb(a,b){return MDb(a,(uFb(b),new JAb(b)))}
+function KDb(a,b){return MDb(a,(uFb(b),new LAb(b)))}
+function prc(a,b,c){return qrc(a,RD(b,12),RD(c,12))}
+function q4b(a){return J3b(),RD(a,12).g.c.length!=0}
+function v4b(a){return J3b(),RD(a,12).e.c.length!=0}
+function sdc(a,b){Zcc();return Qfb(b.a.o.a,a.a.o.a)}
+function d_d(a,b){(b.Bb&QHe)!=0&&!a.a.o&&(a.a.o=b)}
+function T3c(a,b){b.Ug("General 'Rotator",1);S3c(a)}
+function MCc(a,b,c){b.qf(c,Kfb(UD(Wjb(a.b,c)))*a.a)}
+function yid(a,b,c){tid();return xid(a,b)&&xid(a,c)}
+function Rod(a){Pod();return !a.Hc(Lod)&&!a.Hc(Nod)}
+function Nrc(a){if(a.e){return Src(a.e)}return null}
+function Zdb(a){if(Kdb(a)){return ''+a}return GD(a)}
+function XNc(a){var b;b=a;while(b.f){b=b.f}return b}
+function HBb(a,b,c){bD(b,0,tCb(b[0],c[0]));return b}
+function Gpc(a,b,c,d){var e;e=a.i;e.i=b;e.a=c;e.b=d}
+function C5d(a,b,c,d){XZd.call(this,a,b,c);this.b=d}
+function N3d(a,b,c,d,e){O3d.call(this,a,b,c,d,e,-1)}
+function b4d(a,b,c,d,e){c4d.call(this,a,b,c,d,e,-1)}
+function Iie(a,b,c,d){PZd.call(this,a,b,c);this.b=d}
+function Xde(a){uId.call(this,a,false);this.a=false}
+function Bqd(){vqd.call(this,'LOOKAHEAD_LAYOUT',1)}
+function nNd(a){this.b=a;mMd.call(this,a);mNd(this)}
+function vNd(a){this.b=a;BMd.call(this,a);uNd(this)}
+function J5d(a,b,c){this.a=a;G5d.call(this,b,c,5,6)}
+function wje(a,b,c,d){this.b=a;XZd.call(this,b,c,d)}
+function Tj(a,b){this.b=a;Aj.call(this,a.b);this.a=b}
+function NLc(a){this.a=LLc(a.a);this.b=new dnb(a.b)}
+function Fx(a,b){tm();Ex.call(this,a,Pm(new mob(b)))}
+function _se(a,b){Vse();++Use;return new aue(a,b,0)}
+function bte(a,b){Vse();++Use;return new aue(6,a,b)}
+function Ztb(a,b){uFb(b);while(a.Ob()){b.Cd(a.Pb())}}
+function Ujb(a,b){return bE(b)?Yjb(a,b):!!qtb(a.f,b)}
+function O_d(a,b){return b.Vh()?Vvd(a.b,RD(b,54)):b}
+function whb(a,b){return lhb(a.substr(0,b.length),b)}
+function Fl(a){return new is(new Il(a.a.length,a.a))}
+function Oid(a){return new rjd(a.c+a.b/2,a.d+a.a/2)}
+function yD(a){return hD(~a.l&dxe,~a.m&dxe,~a.h&exe)}
+function cE(a){return typeof a===gve||typeof a===kve}
+function akb(a){a.f=new ttb(a);a.i=new Ntb(a);++a.g}
+function Klb(a){if(!a){throw Adb(new Dvb)}return a.d}
+function smb(a){var b;b=omb(a);sFb(b!=null);return b}
+function tmb(a){var b;b=pmb(a);sFb(b!=null);return b}
+function tv(a,b){var c;c=a.a.gc();Sb(b,c);return c-b}
+function Ysb(a,b){var c;c=a.a.zc(b,a);return c==null}
+function rAb(a,b){return a.a.zc(b,(Geb(),Eeb))==null}
+function _nb(a){return new SDb(null,$nb(a,a.length))}
+function yPb(a,b,c){return zPb(a,RD(b,42),RD(c,176))}
+function Wrb(a,b,c){zsb(a.a,b);return _Eb(a.b,b.g,c)}
+function fyb(a,b,c){lyb(c,a.a.c.length);$mb(a.a,c,b)}
+function Knb(a,b,c,d){nFb(b,c,a.length);Onb(a,b,c,d)}
+function Onb(a,b,c,d){var e;for(e=b;e0?$wnd.Math.log(a/b):-100}
+function Agb(a,b){return Ddb(a,b)<0?-1:Ddb(a,b)>0?1:0}
+function Dge(a,b){hZd(a,ZD(b,160)?b:RD(b,2036).Rl())}
+function vFb(a,b){if(a==null){throw Adb(new Ogb(b))}}
+function $nb(a,b){return jxb(b,a.length),new Gxb(a,b)}
+function hsc(a,b){if(!b){return false}return ye(a,b)}
+function Gs(){zs();return cD(WC(RG,1),jwe,549,0,[ys])}
+function Xib(a){return a.e==0?a:new cjb(-a.e,a.d,a.a)}
+function $Nb(a,b){return Qfb(a.c.c+a.c.b,b.c.c+b.c.b)}
+function cvb(a,b){Pub(a.d,b,a.b.b,a.b);++a.a;a.c=null}
+function JCb(a,b){!a.c?Rmb(a.b,b):JCb(a.c,b);return a}
+function KB(a,b,c){var d;d=JB(a,b);LB(a,b,c);return d}
+function Rnb(a,b,c){var d;for(d=0;d=a.g}
+function bD(a,b,c){pFb(c==null||VC(a,c));return a[b]=c}
+function yhb(a,b){BFb(b,a.length+1);return a.substr(b)}
+function yxb(a,b){uFb(b);while(a.c=a){return new rDb}return iDb(a-1)}
+function Y2b(a){if(!a.a&&!!a.c){return a.c.b}return a.a}
+function Zx(a){if(ZD(a,616)){return a}return new sy(a)}
+function LCb(a){if(!a.c){MCb(a);a.d=true}else{LCb(a.c)}}
+function ICb(a){if(!a.c){a.d=true;KCb(a)}else{a.c.$e()}}
+function bHb(a){a.b=false;a.c=false;a.d=false;a.a=false}
+function uMc(a){var b,c;b=a.c.i.c;c=a.d.i.c;return b==c}
+function _vd(a,b){var c;c=a.Ih(b);c>=0?a.ki(c):Tvd(a,b)}
+function mtd(a,b){a.c<0||a.b.b0){a=a<<1|(a<0?1:0)}return a}
+function BGc(a,b){var c;c=new R4b(a);ZEb(b.c,c);return c}
+function FMb(a,b){a.u.Hc((Pod(),Lod))&&DMb(a,b);HMb(a,b)}
+function Fvb(a,b){return dE(a)===dE(b)||a!=null&&pb(a,b)}
+function Vrb(a,b){return Bsb(a.a,b)?a.b[RD(b,22).g]:null}
+function YRb(){VRb();return cD(WC($O,1),jwe,488,0,[URb])}
+function ybd(){sbd();return cD(WC(M1,1),jwe,489,0,[rbd])}
+function Hbd(){Cbd();return cD(WC(N1,1),jwe,558,0,[Bbd])}
+function gdd(){_cd();return cD(WC(V1,1),jwe,539,0,[$cd])}
+function iyd(a){!a.n&&(a.n=new C5d(I4,a,1,7));return a.n}
+function wCd(a){!a.c&&(a.c=new C5d(K4,a,9,9));return a.c}
+function mzd(a){!a.c&&(a.c=new Yie(E4,a,5,8));return a.c}
+function lzd(a){!a.b&&(a.b=new Yie(E4,a,4,7));return a.b}
+function Sed(a){a.j.c.length=0;Ae(a.c);sfd(a.a);return a}
+function Afe(a){a.e==fLe&&Gfe(a,Aee(a.g,a.b));return a.e}
+function Bfe(a){a.f==fLe&&Hfe(a,Bee(a.g,a.b));return a.f}
+function xBd(a,b,c,d){wBd(a,b,c,false);j1d(a,d);return a}
+function oNd(a,b){this.b=a;nMd.call(this,a,b);mNd(this)}
+function wNd(a,b){this.b=a;CMd.call(this,a,b);uNd(this)}
+function Kmb(a){this.d=a;this.a=this.d.b;this.b=this.d.c}
+function oy(a,b){this.b=a;this.c=b;this.a=new Osb(this.b)}
+function ihb(a,b){BFb(b,a.length);return a.charCodeAt(b)}
+function NDd(a,b){CGd(a,Kfb(vDd(b,'x')),Kfb(vDd(b,'y')))}
+function $Dd(a,b){CGd(a,Kfb(vDd(b,'x')),Kfb(vDd(b,'y')))}
+function CDb(a,b){MCb(a);return new SDb(a,new hEb(b,a.a))}
+function GDb(a,b){MCb(a);return new SDb(a,new zEb(b,a.a))}
+function HDb(a,b){MCb(a);return new WCb(a,new nEb(b,a.a))}
+function IDb(a,b){MCb(a);return new oDb(a,new tEb(b,a.a))}
+function Ty(a,b){return new Ry(RD(Qb(a),50),RD(Qb(b),50))}
+function nHb(a,b){return Qfb(a.d.c+a.d.b/2,b.d.c+b.d.b/2)}
+function gTb(a,b,c){c.a?Eyd(a,b.b-a.f/2):Dyd(a,b.a-a.g/2)}
+function WYb(a,b){return Qfb(a.g.c+a.g.b/2,b.g.c+b.g.b/2)}
+function RZb(a,b){NZb();return Qfb((uFb(a),a),(uFb(b),b))}
+function wSd(a){return a!=null&&tpb(eSd,a.toLowerCase())}
+function Ae(a){var b;for(b=a.Kc();b.Ob();){b.Pb();b.Qb()}}
+function Ih(a){var b;b=a.b;!b&&(a.b=b=new Xh(a));return b}
+function R0b(a){var b;b=Z5b(a);if(b){return b}return null}
+function BSb(a,b){var c,d;c=a/b;d=eE(c);c>d&&++d;return d}
+function Ck(a,b,c){var d;d=RD(a.d.Kb(c),159);!!d&&d.Nb(b)}
+function Vhc(a,b,c){tqc(a.a,c);Jpc(c);Kqc(a.b,c);bqc(b,c)}
+function oNc(a,b,c,d){this.a=a;this.c=b;this.b=c;this.d=d}
+function ROc(a,b,c,d){this.c=a;this.b=b;this.a=c;this.d=d}
+function uPc(a,b,c,d){this.c=a;this.b=b;this.d=c;this.a=d}
+function Uid(a,b,c,d){this.c=a;this.d=b;this.b=c;this.a=d}
+function GTc(a,b,c,d){this.a=a;this.d=b;this.c=c;this.b=d}
+function t1b(a,b,c,d){this.a=a;this.e=b;this.d=c;this.c=d}
+function $td(a,b,c,d){this.a=a;this.c=b;this.d=c;this.b=d}
+function ehb(a,b,c){this.a=ywe;this.d=a;this.b=b;this.c=c}
+function fpc(a,b,c,d){qs.call(this,a,b);this.a=c;this.b=d}
+function Uwb(a,b){this.d=(uFb(a),a);this.a=16449;this.c=b}
+function CIc(a){this.a=new bnb;this.e=$C(kE,Nve,53,a,0,2)}
+function ELc(a){a.Ug('No crossing minimization',1);a.Vg()}
+function Evb(){yz.call(this,'There is no more element.')}
+function OEd(a,b,c,d){this.a=a;this.b=b;this.c=c;this.d=d}
+function PEd(a,b,c,d){this.a=a;this.b=b;this.c=c;this.d=d}
+function h7d(a,b,c,d){this.e=a;this.a=b;this.c=c;this.d=d}
+function x7d(a,b,c,d){this.a=a;this.c=b;this.d=c;this.b=d}
+function C8d(a,b,c,d){s7d();M7d.call(this,b,c,d);this.a=a}
+function J8d(a,b,c,d){s7d();M7d.call(this,b,c,d);this.a=a}
+function lwd(a,b,c){var d,e;d=oSd(a);e=b.ti(c,d);return e}
+function lBd(a){var b,c;c=(b=new s2d,b);l2d(c,a);return c}
+function mBd(a){var b,c;c=(b=new s2d,b);p2d(c,a);return c}
+function HDd(a,b){var c;c=Wjb(a.f,b);wEd(b,c);return null}
+function uCd(a){!a.b&&(a.b=new C5d(G4,a,12,3));return a.b}
+function VD(a){CFb(a==null||cE(a)&&!(a.Tm===keb));return a}
+function gz(a){if(a.n){a.e!==rwe&&a.je();a.j=null}return a}
+function Ng(a){ig(a.d);if(a.d.d!=a.c){throw Adb(new Jrb)}}
+function Bkb(a){sFb(a.b0&&wPd(this)}
+function Vg(a,b){this.a=a;Pg.call(this,a,RD(a.d,15).fd(b))}
+function lrd(a,b){return Qfb(urd(a)*trd(a),urd(b)*trd(b))}
+function mrd(a,b){return Qfb(urd(a)*trd(a),urd(b)*trd(b))}
+function n5b(a){return ozd(a)&&Heb(TD(Gxd(a,(yCc(),OAc))))}
+function Sfc(a,b){return Rc(a,RD(mQb(b,(yCc(),tBc)),17),b)}
+function lic(a,b){RD(mQb(a,(Ywc(),qwc)),15).Fc(b);return b}
+function C2b(a,b){a.b=b.b;a.c=b.c;a.d=b.d;a.a=b.a;return a}
+function cEb(a,b,c,d){this.b=a;this.c=d;xxb.call(this,b,c)}
+function Ulc(a,b,c){a.i=0;a.e=0;if(b==c){return}Qlc(a,b,c)}
+function Vlc(a,b,c){a.i=0;a.e=0;if(b==c){return}Rlc(a,b,c)}
+function akc(a,b,c){Wjc();return _Gb(RD(Wjb(a.e,b),529),c)}
+function nd(a){var b;return b=a.f,!b?(a.f=new ne(a,a.c)):b}
+function nTc(a,b){return VTc(a.j,b.s,b.c)+VTc(b.e,a.s,a.c)}
+function Rrc(a,b){if(!!a.e&&!a.e.a){Prc(a.e,b);Rrc(a.e,b)}}
+function Qrc(a,b){if(!!a.d&&!a.d.a){Prc(a.d,b);Qrc(a.d,b)}}
+function krd(a,b){return -Qfb(urd(a)*trd(a),urd(b)*trd(b))}
+function gtd(a){return RD(a.ld(),149).Pg()+':'+jeb(a.md())}
+function EBd(){BBd(this,new yAd);this.wb=(lTd(),kTd);jTd()}
+function G7b(a){this.b=new bnb;Tmb(this.b,this.b);this.a=a}
+function WWc(a,b){new Yub;this.a=new Ejd;this.b=a;this.c=b}
+function urb(){urb=geb;rrb=new wrb;srb=new wrb;trb=new Brb}
+function yob(){yob=geb;vob=new Job;wob=new apb;xob=new ipb}
+function FGb(){FGb=geb;CGb=new AGb;EGb=new fHb;DGb=new YGb}
+function HSb(){HSb=geb;GSb=new bnb;FSb=new Tsb;ESb=new bnb}
+function Rb(a,b){if(a==null){throw Adb(new Ogb(b))}return a}
+function tCd(a){!a.a&&(a.a=new C5d(J4,a,10,11));return a.a}
+function uYd(a){!a.q&&(a.q=new C5d(s7,a,11,10));return a.q}
+function xYd(a){!a.s&&(a.s=new C5d(y7,a,21,17));return a.s}
+function er(a){Qb(a);return Er(new is(Mr(a.a.Kc(),new ir)))}
+function hfd(a,b){rb(a);rb(b);return ns(RD(a,22),RD(b,22))}
+function qDd(a,b,c){var d,e;d=Qeb(c);e=new hC(d);sC(a,b,e)}
+function d4d(a,b,c,d,e,f){c4d.call(this,a,b,c,d,e,f?-2:-1)}
+function sje(a,b,c,d){kZd.call(this,b,c);this.b=a;this.a=d}
+function Ry(a,b){wi.call(this,new ezb(a));this.a=a;this.b=b}
+function Gu(a){this.b=a;this.c=a;a.e=null;a.c=null;this.a=1}
+function Dkc(a){lkc();var b;b=RD(a.g,10);b.n.a=a.d.c+b.d.b}
+function fA(){fA=geb;var a,b;b=!lA();a=new tA;eA=b?new mA:a}
+function Hob(a){yob();return ZD(a,59)?new irb(a):new Upb(a)}
+function Ux(a){return ZD(a,16)?new btb(RD(a,16)):Vx(a.Kc())}
+function Vi(a){return new ij(a,a.e.Rd().gc()*a.c.Rd().gc())}
+function fj(a){return new sj(a,a.e.Rd().gc()*a.c.Rd().gc())}
+function Iz(a){return !!a&&!!a.hashCode?a.hashCode():kFb(a)}
+function Yjb(a,b){return b==null?!!qtb(a.f,null):Jtb(a.i,b)}
+function hYb(a,b){var c;c=$sb(a.a,b);c&&(b.d=null);return c}
+function MGb(a,b,c){if(a.f){return a.f.ef(b,c)}return false}
+function cFc(a,b,c,d){bD(a.c[b.g],c.g,d);bD(a.c[c.g],b.g,d)}
+function fFc(a,b,c,d){bD(a.c[b.g],b.g,c);bD(a.b[b.g],b.g,d)}
+function sXc(a,b,c){return Kfb(UD(c.a))<=a&&Kfb(UD(c.b))>=b}
+function yJc(a,b){this.g=a;this.d=cD(WC(jR,1),WAe,10,0,[b])}
+function lHb(a){this.c=a;this.b=new yAb(RD(Qb(new oHb),50))}
+function UYb(a){this.c=a;this.b=new yAb(RD(Qb(new XYb),50))}
+function $Qb(a){this.b=a;this.a=new yAb(RD(Qb(new bRb),50))}
+function tRc(){this.b=new _sb;this.d=new Yub;this.e=new Fyb}
+function VTb(){this.c=new pjd;this.d=new pjd;this.e=new pjd}
+function a1b(){this.a=new Ejd;this.b=(dk(3,iwe),new cnb(3))}
+function i7d(a,b){this.e=a;this.a=jJ;this.b=pje(b);this.c=b}
+function Vid(a){this.c=a.c;this.d=a.d;this.b=a.b;this.a=a.a}
+function VLd(a,b,c,d,e,f){this.a=a;NKd.call(this,b,c,d,e,f)}
+function aLd(a,b,c,d,e,f){this.a=a;NKd.call(this,b,c,d,e,f)}
+function fge(a,b,c,d,e,f,g){return new lle(a.e,b,c,d,e,f,g)}
+function xhb(a,b,c){return c>=0&&lhb(a.substr(c,b.length),b)}
+function hGd(a,b){return ZD(b,149)&&lhb(a.b,RD(b,149).Pg())}
+function Tde(a,b){return a.a?b.Gh().Kc():RD(b.Gh(),71).Ii()}
+function Qqb(a,b){var c;c=a.b.Qc(b);Rqb(c,a.b.gc());return c}
+function Ivb(a,b){if(a==null){throw Adb(new Ogb(b))}return a}
+function zYd(a){if(!a.u){yYd(a);a.u=new w0d(a,a)}return a.u}
+function Kx(a){this.a=(yob(),ZD(a,59)?new irb(a):new Upb(a))}
+function Uwd(a){var b;b=RD(Ywd(a,16),29);return !b?a.ii():b}
+function lz(a,b){var c;c=nfb(a.Rm);return b==null?c:c+': '+b}
+function zhb(a,b,c){AFb(b,c,a.length);return a.substr(b,c-b)}
+function VKb(a,b){RJb.call(this);KKb(this);this.a=a;this.c=b}
+function neb(a){!a?vve:lz(a,a.ie());String.fromCharCode(10)}
+function Wz(a){Qz();$wnd.setTimeout(function(){throw a},0)}
+function GHb(){DHb();return cD(WC(uN,1),jwe,436,0,[CHb,BHb])}
+function OHb(){LHb();return cD(WC(vN,1),jwe,435,0,[JHb,KHb])}
+function WUb(){TUb();return cD(WC(BP,1),jwe,432,0,[RUb,SUb])}
+function S8b(){P8b();return cD(WC(vS,1),jwe,517,0,[O8b,N8b])}
+function Fuc(){Cuc();return cD(WC(fX,1),jwe,487,0,[Buc,Auc])}
+function buc(){$tc();return cD(WC(cX,1),jwe,428,0,[Ytc,Ztc])}
+function mtc(){jtc();return cD(WC($W,1),jwe,431,0,[htc,itc])}
+function vEc(){sEc();return cD(WC(xX,1),jwe,430,0,[qEc,rEc])}
+function vNc(){sNc();return cD(WC(MY,1),jwe,531,0,[rNc,qNc])}
+function zQc(){wQc();return cD(WC(FZ,1),jwe,523,0,[vQc,uQc])}
+function HQc(){EQc();return cD(WC(GZ,1),jwe,522,0,[CQc,DQc])}
+function iTc(){fTc();return cD(WC(b$,1),jwe,528,0,[eTc,dTc])}
+function Rvc(){Ovc();return cD(WC(lX,1),jwe,429,0,[Mvc,Nvc])}
+function F8c(){z8c();return cD(WC(l1,1),jwe,490,0,[x8c,y8c])}
+function H9c(){B9c();return cD(WC(t1,1),jwe,491,0,[z9c,A9c])}
+function D_c(){A_c();return cD(WC(K_,1),jwe,433,0,[z_c,y_c])}
+function a4c(){Y3c();return cD(WC(H0,1),jwe,434,0,[W3c,X3c])}
+function gVc(){dVc();return cD(WC(w$,1),jwe,464,0,[bVc,cVc])}
+function D2c(){x2c();return cD(WC(s0,1),jwe,500,0,[v2c,w2c])}
+function Pbd(){Mbd();return cD(WC(O1,1),jwe,438,0,[Lbd,Kbd])}
+function rdd(){ldd();return cD(WC(W1,1),jwe,437,0,[kdd,jdd])}
+function xqd(){uqd();return cD(WC(M3,1),jwe,347,0,[sqd,tqd])}
+function Jvd(a,b,c,d){return c>=0?a.Uh(b,c,d):a.Ch(null,c,d)}
+function ltd(a){if(a.b.b==0){return a.a.sf()}return Uub(a.b)}
+function vKd(a){if(a.p!=5)throw Adb(new cgb);return Ydb(a.f)}
+function EKd(a){if(a.p!=5)throw Adb(new cgb);return Ydb(a.k)}
+function P$d(a){dE(a.a)===dE((lYd(),kYd))&&Q$d(a);return a.a}
+function iad(a,b){a.b=b;a.c>0&&a.b>0&&(a.g=Aad(a.c,a.b,a.a))}
+function jad(a,b){a.c=b;a.c>0&&a.b>0&&(a.g=Aad(a.c,a.b,a.a))}
+function BUc(a,b){yUc(this,new rjd(a.a,a.b));zUc(this,gv(b))}
+function Tp(){Sp.call(this,new Usb(Sv(12)));Lb(true);this.a=2}
+function eue(a,b,c){Vse();Wse.call(this,a);this.b=b;this.a=c}
+function C7d(a,b,c){s7d();t7d.call(this,b);this.a=a;this.b=c}
+function qub(a){var b;b=a.c.d.b;a.b=b;a.a=a.c.d;b.a=a.c.d.b=a}
+function Tub(a){return a.b==0?null:(sFb(a.b!=0),Wub(a,a.a.a))}
+function Xjb(a,b){return b==null?Wd(qtb(a.f,null)):Ktb(a.i,b)}
+function bzb(a,b,c,d,e){return new Kzb(a,(cAb(),aAb),b,c,d,e)}
+function Fnb(a,b){oFb(b);return Hnb(a,$C(kE,Pwe,28,b,15,1),b)}
+function Tx(a,b){Rb(a,'set1');Rb(b,'set2');return new ey(a,b)}
+function Kz(a,b){var c=Jz[a.charCodeAt(0)];return c==null?a:c}
+function Xyb(a,b){var c,d;c=b;d=new Gzb;Zyb(a,c,d);return d.d}
+function EMb(a,b,c,d){var e;e=new TJb;b.a[c.g]=e;Wrb(a.b,d,e)}
+function SXb(a,b){var c;c=BXb(a.f,b);return $id(fjd(c),a.f.d)}
+function RFb(a){var b;EJb(a.a);DJb(a.a);b=new PJb(a.a);LJb(b)}
+function _Mb(a,b){$Mb(a,true);Umb(a.e.Rf(),new dNb(a,true,b))}
+function PSb(a,b){HSb();return a==vCd(JGd(b))||a==vCd(LGd(b))}
+function R0c(a,b){B0c();return RD(mQb(b,(h_c(),f_c)),17).a==a}
+function eE(a){return Math.max(Math.min(a,lve),-2147483648)|0}
+function sy(a){this.a=RD(Qb(a),277);this.b=(yob(),new jrb(a))}
+function qbd(a,b,c){this.i=new bnb;this.b=a;this.g=b;this.a=c}
+function had(a,b,c){this.a=new bnb;this.e=a;this.f=b;this.c=c}
+function _9c(a,b,c){this.c=new bnb;this.e=a;this.f=b;this.b=c}
+function TKb(a){RJb.call(this);KKb(this);this.a=a;this.c=true}
+function ieb(a){function b(){}
+;b.prototype=a||{};return new b}
+function zfb(a){if(a.Ae()){return null}var b=a.n;return eeb[b]}
+function kzd(a){if(a.Db>>16!=3)return null;return RD(a.Cb,27)}
+function MCd(a){if(a.Db>>16!=9)return null;return RD(a.Cb,27)}
+function Fzd(a){if(a.Db>>16!=6)return null;return RD(a.Cb,74)}
+function dVc(){dVc=geb;bVc=new eVc(Nye,0);cVc=new eVc(Oye,1)}
+function wQc(){wQc=geb;vQc=new xQc(Oye,0);uQc=new xQc(Nye,1)}
+function EQc(){EQc=geb;CQc=new FQc(Zye,0);DQc=new FQc('UP',1)}
+function Is(){Is=geb;Hs=ss((zs(),cD(WC(RG,1),jwe,549,0,[ys])))}
+function Wx(a){var b;b=new atb(Sv(a.length));zob(b,a);return b}
+function B2b(a,b){a.b+=b.b;a.c+=b.c;a.d+=b.d;a.a+=b.a;return a}
+function qmb(a,b){if(kmb(a,b)){Jmb(a);return true}return false}
+function qC(a,b){if(b==null){throw Adb(new Ngb)}return rC(a,b)}
+function nB(a,b){var c;c=a.q.getHours();a.q.setDate(b);mB(a,c)}
+function Xvd(a,b,c){var d;d=a.Ih(b);d>=0?a.bi(d,c):Svd(a,b,c)}
+function Lvd(a,b){var c;c=a.Ih(b);return c>=0?a.Wh(c):Rvd(a,b)}
+function zo(a,b){var c;Qb(b);for(c=a.a;c;c=c.c){b.Yd(c.g,c.i)}}
+function pMc(a,b,c){var d;d=qMc(a,b,c);a.b=new _Lc(d.c.length)}
+function HId(a,b,c){EId();!!a&&Zjb(DId,a,b);!!a&&Zjb(CId,a,c)}
+function bfc(a,b){Rec();return Geb(),RD(b.a,17).a0}
+function sId(a){var b;b=a.d;b=a.bj(a.f);WGd(a,b);return b.Ob()}
+function bHd(a,b){var c;c=new Kub(b);Ve(c,a);return new dnb(c)}
+function qKd(a){if(a.p!=0)throw Adb(new cgb);return Pdb(a.f,0)}
+function zKd(a){if(a.p!=0)throw Adb(new cgb);return Pdb(a.k,0)}
+function gBd(a){if(a.Db>>16!=7)return null;return RD(a.Cb,241)}
+function xXd(a){if(a.Db>>16!=6)return null;return RD(a.Cb,241)}
+function dCd(a){if(a.Db>>16!=7)return null;return RD(a.Cb,167)}
+function vCd(a){if(a.Db>>16!=11)return null;return RD(a.Cb,27)}
+function uWd(a){if(a.Db>>16!=17)return null;return RD(a.Cb,29)}
+function kVd(a){if(a.Db>>16!=3)return null;return RD(a.Cb,155)}
+function BDb(a){var b;MCb(a);b=new _sb;return CDb(a,new aEb(b))}
+function xfb(a,b){var c=a.a=a.a||[];return c[b]||(c[b]=a.ve(b))}
+function qB(a,b){var c;c=a.q.getHours();a.q.setMonth(b);mB(a,c)}
+function oz(a,b){ez(this);this.f=b;this.g=a;gz(this);this.je()}
+function TQb(a,b){this.a=a;this.c=ajd(this.a);this.b=new Vid(b)}
+function aGb(a,b,c){this.a=b;this.c=a;this.b=(Qb(c),new dnb(c))}
+function s$b(a,b,c){this.a=b;this.c=a;this.b=(Qb(c),new dnb(c))}
+function _Kc(a){this.a=a;this.b=$C(qY,Nve,2043,a.e.length,0,2)}
+function fGb(){this.a=new Iub;this.e=new _sb;this.g=0;this.i=0}
+function EId(){EId=geb;DId=new Tsb;CId=new Tsb;IId(zK,new JId)}
+function KFc(){KFc=geb;JFc=nfd(new ufd,(sXb(),rXb),(hcc(),$bc))}
+function RFc(){RFc=geb;QFc=nfd(new ufd,(sXb(),rXb),(hcc(),$bc))}
+function gGc(){gGc=geb;fGc=nfd(new ufd,(sXb(),rXb),(hcc(),$bc))}
+function ANc(){ANc=geb;zNc=pfd(new ufd,(sXb(),rXb),(hcc(),ybc))}
+function dOc(){dOc=geb;cOc=pfd(new ufd,(sXb(),rXb),(hcc(),ybc))}
+function gQc(){gQc=geb;fQc=pfd(new ufd,(sXb(),rXb),(hcc(),ybc))}
+function WQc(){WQc=geb;VQc=pfd(new ufd,(sXb(),rXb),(hcc(),ybc))}
+function dZd(a,b,c,d,e,f){return new P3d(a.e,b,a.Lj(),c,d,e,f)}
+function $jb(a,b,c){return b==null?rtb(a.f,null,c):Ltb(a.i,b,c)}
+function Y0b(a,b){!!a.c&&Ymb(a.c.g,a);a.c=b;!!a.c&&Rmb(a.c.g,a)}
+function g3b(a,b){!!a.c&&Ymb(a.c.a,a);a.c=b;!!a.c&&Rmb(a.c.a,a)}
+function P3b(a,b){!!a.i&&Ymb(a.i.j,a);a.i=b;!!a.i&&Rmb(a.i.j,a)}
+function Z0b(a,b){!!a.d&&Ymb(a.d.e,a);a.d=b;!!a.d&&Rmb(a.d.e,a)}
+function _Sc(a,b){!!a.a&&Ymb(a.a.k,a);a.a=b;!!a.a&&Rmb(a.a.k,a)}
+function aTc(a,b){!!a.b&&Ymb(a.b.f,a);a.b=b;!!a.b&&Rmb(a.b.f,a)}
+function Odd(a,b){Pdd(a,a.b,a.c);RD(a.b.b,68);!!b&&RD(b.b,68).b}
+function j2c(a,b){return Qfb(RD(a.c,65).c.e.b,RD(b.c,65).c.e.b)}
+function k2c(a,b){return Qfb(RD(a.c,65).c.e.a,RD(b.c,65).c.e.a)}
+function YXb(a){NXb();return Geb(),RD(a.a,86).d.e!=0?true:false}
+function LXd(a,b){ZD(a.Cb,184)&&(RD(a.Cb,184).tb=null);PAd(a,b)}
+function CWd(a,b){ZD(a.Cb,90)&&v$d(yYd(RD(a.Cb,90)),4);PAd(a,b)}
+function _5d(a,b){a6d(a,b);ZD(a.Cb,90)&&v$d(yYd(RD(a.Cb,90)),2)}
+function JFd(a,b){var c,d;c=b.c;d=c!=null;d&&oDd(a,new OC(b.c))}
+function v0d(a){var b,c;c=(jTd(),b=new s2d,b);l2d(c,a);return c}
+function E4d(a){var b,c;c=(jTd(),b=new s2d,b);l2d(c,a);return c}
+function Fr(a){var b;while(true){b=a.Pb();if(!a.Ob()){return b}}}
+function nq(a,b,c){Rmb(a.a,(fn(),ck(b,c),new gp(b,c)));return a}
+function rge(a,b){return nke(),wWd(b)?new ole(b,a):new Eke(b,a)}
+function ojb(a){Pib();return Ddb(a,0)>=0?jjb(a):Xib(jjb(Odb(a)))}
+function Asb(a){var b;b=RD(UEb(a.b),9);return new Fsb(a.a,b,a.c)}
+function Qw(a,b){var c;c=RD(Xv(nd(a.a),b),16);return !c?0:c.gc()}
+function Zmb(a,b,c){var d;xFb(b,c,a.c.length);d=c-b;$Eb(a.c,b,d)}
+function Rkb(a,b,c){xFb(b,c,a.gc());this.c=a;this.a=b;this.b=c-b}
+function fgd(a){this.c=new Yub;this.b=a.b;this.d=a.c;this.a=a.a}
+function qjd(a){this.a=$wnd.Math.cos(a);this.b=$wnd.Math.sin(a)}
+function bTc(a,b,c,d){this.c=a;this.d=d;_Sc(this,b);aTc(this,c)}
+function Si(a,b){Qi.call(this,new Usb(Sv(a)));dk(b,Mve);this.a=b}
+function Ryb(a,b,c){return new Kzb(a,(cAb(),_zb),null,false,b,c)}
+function czb(a,b,c){return new Kzb(a,(cAb(),bAb),b,c,null,false)}
+function ABb(){xBb();return cD(WC(QL,1),jwe,108,0,[uBb,vBb,wBb])}
+function yLb(){vLb();return cD(WC(TN,1),jwe,471,0,[uLb,tLb,sLb])}
+function HKb(){EKb();return cD(WC(MN,1),jwe,470,0,[CKb,BKb,DKb])}
+function aKb(){ZJb();return cD(WC(JN,1),jwe,237,0,[WJb,XJb,YJb])}
+function DWb(){AWb();return cD(WC(JP,1),jwe,391,0,[yWb,xWb,zWb])}
+function moc(){joc();return cD(WC(UV,1),jwe,372,0,[ioc,hoc,goc])}
+function ytc(){stc();return cD(WC(_W,1),jwe,322,0,[qtc,ptc,rtc])}
+function Htc(){Etc();return cD(WC(aX,1),jwe,351,0,[Btc,Dtc,Ctc])}
+function kuc(){huc();return cD(WC(dX,1),jwe,459,0,[fuc,euc,guc])}
+function Avc(){xvc();return cD(WC(jX,1),jwe,298,0,[vvc,wvc,uvc])}
+function Jvc(){Gvc();return cD(WC(kX,1),jwe,311,0,[Evc,Fvc,Dvc])}
+function pDc(){lDc();return cD(WC(sX,1),jwe,390,0,[iDc,jDc,kDc])}
+function EEc(){BEc();return cD(WC(yX,1),jwe,462,0,[AEc,yEc,zEc])}
+function NEc(){KEc();return cD(WC(zX,1),jwe,387,0,[HEc,IEc,JEc])}
+function WEc(){TEc();return cD(WC(AX,1),jwe,349,0,[SEc,QEc,REc])}
+function oFc(){lFc();return cD(WC(CX,1),jwe,350,0,[iFc,jFc,kFc])}
+function xFc(){uFc();return cD(WC(DX,1),jwe,352,0,[tFc,rFc,sFc])}
+function GFc(){DFc();return cD(WC(EX,1),jwe,388,0,[BFc,CFc,AFc])}
+function UKc(){RKc();return cD(WC(nY,1),jwe,463,0,[OKc,PKc,QKc])}
+function K3b(a){return xjd(cD(WC(l3,1),Nve,8,0,[a.i.n,a.n,a.a]))}
+function OZc(){LZc();return cD(WC(F_,1),jwe,392,0,[KZc,JZc,IZc])}
+function H_c(){H_c=geb;G_c=nfd(new ufd,(YVc(),WVc),(WYc(),MYc))}
+function A_c(){A_c=geb;z_c=new B_c('DFS',0);y_c=new B_c('BFS',1)}
+function TQc(a,b,c){var d;d=new SQc;d.b=b;d.a=c;++b.b;Rmb(a.d,d)}
+function NTb(a,b,c){var d;d=new sjd(c.d);$id(d,a);CGd(b,d.a,d.b)}
+function Nwb(a,b){Mwb(a,Ydb(Cdb(Tdb(b,24),Pxe)),Ydb(Cdb(b,Pxe)))}
+function wFb(a,b){if(a<0||a>b){throw Adb(new veb(cye+a+dye+b))}}
+function tFb(a,b){if(a<0||a>=b){throw Adb(new veb(cye+a+dye+b))}}
+function BFb(a,b){if(a<0||a>=b){throw Adb(new eib(cye+a+dye+b))}}
+function Swb(a,b){this.b=(uFb(a),a);this.a=(b&qxe)==0?b|64|Ove:b}
+function ODb(a){var b;MCb(a);b=(urb(),urb(),srb);return PDb(a,b)}
+function R9c(a,b,c){var d;d=S9c(a,b,false);return d.b<=b&&d.a<=c}
+function h9c(){b9c();return cD(WC(o1,1),jwe,439,0,[$8c,a9c,_8c])}
+function c7c(){_6c();return cD(WC(a1,1),jwe,394,0,[Z6c,$6c,Y6c])}
+function i6c(){f6c();return cD(WC(V0,1),jwe,445,0,[c6c,d6c,e6c])}
+function D6c(){z6c();return cD(WC(Z0,1),jwe,455,0,[w6c,y6c,x6c])}
+function k4c(){g4c();return cD(WC(I0,1),jwe,393,0,[d4c,e4c,f4c])}
+function x5c(){t5c();return cD(WC(N0,1),jwe,299,0,[r5c,s5c,q5c])}
+function Rmd(){Omd();return cD(WC(t3,1),jwe,278,0,[Lmd,Mmd,Nmd])}
+function pqd(){mqd();return cD(WC(J3,1),jwe,280,0,[kqd,jqd,lqd])}
+function Ind(){Fnd();return cD(WC(y3,1),jwe,346,0,[Dnd,Cnd,End])}
+function jbd(){gbd();return cD(WC(I1,1),jwe,444,0,[dbd,ebd,fbd])}
+function bv(a){Qb(a);return ZD(a,16)?new dnb(RD(a,16)):cv(a.Kc())}
+function Hz(a,b){return !!a&&!!a.equals?a.equals(b):dE(a)===dE(b)}
+function Cdb(a,b){return Edb(tD(Kdb(a)?Wdb(a):a,Kdb(b)?Wdb(b):b))}
+function Rdb(a,b){return Edb(zD(Kdb(a)?Wdb(a):a,Kdb(b)?Wdb(b):b))}
+function $db(a,b){return Edb(HD(Kdb(a)?Wdb(a):a,Kdb(b)?Wdb(b):b))}
+function xs(a,b){var c;c=(uFb(a),a).g;lFb(!!c);uFb(b);return c(b)}
+function rv(a,b){var c,d;d=tv(a,b);c=a.a.fd(d);return new Gv(a,c)}
+function CXd(a){if(a.Db>>16!=6)return null;return RD(yvd(a),241)}
+function sKd(a){if(a.p!=2)throw Adb(new cgb);return Ydb(a.f)&Bwe}
+function BKd(a){if(a.p!=2)throw Adb(new cgb);return Ydb(a.k)&Bwe}
+function ynb(a){sFb(a.ad?1:0}
+function Hmc(a,b){var c,d;c=Gmc(b);d=c;return RD(Wjb(a.c,d),17).a}
+function CMc(a,b,c){var d;d=a.d[b.p];a.d[b.p]=a.d[c.p];a.d[c.p]=d}
+function Jqd(a,b,c){var d;if(a.n&&!!b&&!!c){d=new otd;Rmb(a.e,d)}}
+function gYb(a,b){Ysb(a.a,b);if(b.d){throw Adb(new yz(jye))}b.d=a}
+function Had(a,b){this.a=new bnb;this.d=new bnb;this.f=a;this.c=b}
+function RWb(){this.c=new dXb;this.a=new I_b;this.b=new E0b;g0b()}
+function med(){hed();this.b=new Tsb;this.a=new Tsb;this.c=new bnb}
+function KKd(a,b,c){this.d=a;this.j=b;this.e=c;this.o=-1;this.p=3}
+function LKd(a,b,c){this.d=a;this.k=b;this.f=c;this.o=-1;this.p=5}
+function S3d(a,b,c,d,e,f){R3d.call(this,a,b,c,d,e);f&&(this.o=-2)}
+function U3d(a,b,c,d,e,f){T3d.call(this,a,b,c,d,e);f&&(this.o=-2)}
+function W3d(a,b,c,d,e,f){V3d.call(this,a,b,c,d,e);f&&(this.o=-2)}
+function Y3d(a,b,c,d,e,f){X3d.call(this,a,b,c,d,e);f&&(this.o=-2)}
+function $3d(a,b,c,d,e,f){Z3d.call(this,a,b,c,d,e);f&&(this.o=-2)}
+function a4d(a,b,c,d,e,f){_3d.call(this,a,b,c,d,e);f&&(this.o=-2)}
+function f4d(a,b,c,d,e,f){e4d.call(this,a,b,c,d,e);f&&(this.o=-2)}
+function h4d(a,b,c,d,e,f){g4d.call(this,a,b,c,d,e);f&&(this.o=-2)}
+function N7d(a,b,c,d){t7d.call(this,c);this.b=a;this.c=b;this.d=d}
+function mfe(a,b){this.f=a;this.a=(ree(),pee);this.c=pee;this.b=b}
+function Jfe(a,b){this.g=a;this.d=(ree(),qee);this.a=qee;this.b=b}
+function Gme(a,b){!a.c&&(a.c=new Uge(a,0));Fge(a.c,(nme(),fme),b)}
+function Oge(a,b){return Pge(a,b,ZD(b,102)&&(RD(b,19).Bb&txe)!=0)}
+function lB(a,b){return Agb(Hdb(a.q.getTime()),Hdb(b.q.getTime()))}
+function gj(a){return fk(a.e.Rd().gc()*a.c.Rd().gc(),16,new qj(a))}
+function CYd(a){return !!a.u&&tYd(a.u.a).i!=0&&!(!!a.n&&d$d(a.n))}
+function p4d(a){return !!a.a&&o4d(a.a.a).i!=0&&!(!!a.b&&o5d(a.b))}
+function Cxd(a,b){if(b==0){return !!a.o&&a.o.f!=0}return Kvd(a,b)}
+function Cc(a,b,c){var d;d=RD(a.Zb().xc(b),16);return !!d&&d.Hc(c)}
+function Gc(a,b,c){var d;d=RD(a.Zb().xc(b),16);return !!d&&d.Mc(c)}
+function _yb(a,b){var c;c=1-b;a.a[c]=azb(a.a[c],c);return azb(a,b)}
+function DFb(a,b){var c,d;d=Cdb(a,yxe);c=Sdb(b,32);return Rdb(c,d)}
+function bGb(a,b,c){var d;d=(Qb(a),new dnb(a));_Fb(new aGb(d,b,c))}
+function t$b(a,b,c){var d;d=(Qb(a),new dnb(a));r$b(new s$b(d,b,c))}
+function vBd(a,b,c,d,e,f){wBd(a,b,c,f);EYd(a,d);FYd(a,e);return a}
+function Xhb(a,b,c,d){a.a+=''+zhb(b==null?vve:jeb(b),c,d);return a}
+function Jkb(a,b){this.a=a;Dkb.call(this,a);wFb(b,a.gc());this.b=b}
+function xmb(a){this.a=$C(jJ,rve,1,mgb($wnd.Math.max(8,a))<<1,5,1)}
+function t2b(a){return RD(anb(a,$C(jR,WAe,10,a.c.length,0,1)),199)}
+function s2b(a){return RD(anb(a,$C(WQ,VAe,18,a.c.length,0,1)),482)}
+function Iyb(a){return !a.a?a.c:a.e.length==0?a.a.a:a.a.a+(''+a.e)}
+function Rib(a){while(a.d>0&&a.a[--a.d]==0);a.a[a.d++]==0&&(a.e=0)}
+function fvb(a){sFb(a.b.b!=a.d.a);a.c=a.b=a.b.b;--a.a;return a.c.c}
+function sRc(a,b,c){a.a=b;a.c=c;a.b.a.$b();Xub(a.d);aFb(a.e.a.c,0)}
+function Z5c(a,b){var c;a.e=new R5c;c=Q2c(b);_mb(c,a.c);$5c(a,c,0)}
+function zgd(a,b,c,d){var e;e=new Hgd;e.a=b;e.b=c;e.c=d;Mub(a.a,e)}
+function Agd(a,b,c,d){var e;e=new Hgd;e.a=b;e.b=c;e.c=d;Mub(a.b,e)}
+function Tb(a,b,c){if(a<0||bc){throw Adb(new veb(Kb(a,b,c)))}}
+function Pb(a,b){if(a<0||a>=b){throw Adb(new veb(Ib(a,b)))}return a}
+function qz(b){if(!('stack' in b)){try{throw b}catch(a){}}return b}
+function Zjc(a){Wjc();if(ZD(a.g,10)){return RD(a.g,10)}return null}
+function nx(a){if(Ih(a).dc()){return false}Jh(a,new rx);return true}
+function Xdb(a){var b;if(Kdb(a)){b=a;return b==-0.?0:b}return ED(a)}
+function lkb(a,b){if(ZD(b,44)){return Jd(a.a,RD(b,44))}return false}
+function gsb(a,b){if(ZD(b,44)){return Jd(a.a,RD(b,44))}return false}
+function vub(a,b){if(ZD(b,44)){return Jd(a.a,RD(b,44))}return false}
+function RCb(a){var b;LCb(a);b=new Prb;ixb(a.a,new fDb(b));return b}
+function Vae(){var a,b,c;b=(c=(a=new s2d,a),c);Rmb(Rae,b);return b}
+function mDb(a){var b;LCb(a);b=new ltb;ixb(a.a,new uDb(b));return b}
+function jDb(a,b){if(a.a<=a.b){b.Dd(a.a++);return true}return false}
+function xzb(a){yzb.call(this,a,(cAb(),$zb),null,false,null,false)}
+function $Rb(){$Rb=geb;ZRb=ss((VRb(),cD(WC($O,1),jwe,488,0,[URb])))}
+function CHc(){CHc=geb;BHc=yx(sgb(1),sgb(4));AHc=yx(sgb(1),sgb(2))}
+function yXc(a,b){return new gud(b,njd(ajd(b.e),a,a),(Geb(),true))}
+function fv(a){return new cnb((dk(a,lwe),dz(Bdb(Bdb(5,a),a/10|0))))}
+function Wi(a){return fk(a.e.Rd().gc()*a.c.Rd().gc(),273,new kj(a))}
+function u2b(a){return RD(anb(a,$C(xR,XAe,12,a.c.length,0,1)),2042)}
+function COc(a){dOc();return !W0b(a)&&!(!W0b(a)&&a.c.i.c==a.d.i.c)}
+function Y_c(a,b){R_c();return RD(mQb(b,(h_c(),W$c)),17).a>=a.gc()}
+function q8b(a,b){w8b(b,a);y8b(a.d);y8b(RD(mQb(a,(yCc(),cBc)),214))}
+function r8b(a,b){z8b(b,a);B8b(a.d);B8b(RD(mQb(a,(yCc(),cBc)),214))}
+function $0b(a,b,c){!!a.d&&Ymb(a.d.e,a);a.d=b;!!a.d&&Qmb(a.d.e,c,a)}
+function jPb(a,b,c){return c.f.c.length>0?yPb(a.a,b,c):yPb(a.b,b,c)}
+function Uz(a,b,c){var d;d=Sz();try{return Rz(a,b,c)}finally{Vz(d)}}
+function wDd(a,b){var c,d;c=qC(a,b);d=null;!!c&&(d=c.pe());return d}
+function yDd(a,b){var c,d;c=qC(a,b);d=null;!!c&&(d=c.se());return d}
+function xDd(a,b){var c,d;c=JB(a,b);d=null;!!c&&(d=c.se());return d}
+function zDd(a,b){var c,d;c=qC(a,b);d=null;!!c&&(d=ADd(c));return d}
+function rEd(a,b,c){var d;d=uDd(c);Do(a.g,d,b);Do(a.i,b,c);return b}
+function UIc(a,b,c){this.d=new fJc(this);this.e=a;this.i=b;this.f=c}
+function Mk(a,b,c,d){this.e=null;this.c=a;this.d=b;this.a=c;this.b=d}
+function urc(a,b,c,d){nrc(this);this.c=a;this.e=b;this.f=c;this.b=d}
+function MKd(a,b,c,d){this.d=a;this.n=b;this.g=c;this.o=d;this.p=-1}
+function Vc(a,b,c,d){return ZD(c,59)?new Kg(a,b,c,d):new yg(a,b,c,d)}
+function gr(a){if(ZD(a,16)){return RD(a,16).dc()}return !a.Kc().Ob()}
+function Wo(a){if(a.e.g!=a.b){throw Adb(new Jrb)}return !!a.c&&a.d>0}
+function evb(a){sFb(a.b!=a.d.c);a.c=a.b;a.b=a.b.a;++a.a;return a.c.c}
+function imb(a,b){uFb(b);bD(a.a,a.c,b);a.c=a.c+1&a.a.length-1;mmb(a)}
+function hmb(a,b){uFb(b);a.b=a.b-1&a.a.length-1;bD(a.a,a.b,b);mmb(a)}
+function _je(a){var b;b=a.Gh();this.a=ZD(b,71)?RD(b,71).Ii():b.Kc()}
+function px(a){return new Swb(Dob(RD(a.a.md(),16).gc(),a.a.ld()),16)}
+function Abd(){Abd=geb;zbd=ss((sbd(),cD(WC(M1,1),jwe,489,0,[rbd])))}
+function Jbd(){Jbd=geb;Ibd=ss((Cbd(),cD(WC(N1,1),jwe,558,0,[Bbd])))}
+function idd(){idd=geb;hdd=ss((_cd(),cD(WC(V1,1),jwe,539,0,[$cd])))}
+function X$b(){U$b();return cD(WC(CQ,1),jwe,389,0,[T$b,R$b,Q$b,S$b])}
+function hAb(){cAb();return cD(WC(AL,1),jwe,303,0,[$zb,_zb,aAb,bAb])}
+function LPb(){IPb();return cD(WC(DO,1),jwe,332,0,[FPb,EPb,GPb,HPb])}
+function LRb(){IRb();return cD(WC(WO,1),jwe,406,0,[FRb,ERb,GRb,HRb])}
+function pOb(){mOb();return cD(WC(hO,1),jwe,417,0,[lOb,iOb,jOb,kOb])}
+function uZb(){nZb();return cD(WC(lQ,1),jwe,416,0,[jZb,mZb,kZb,lZb])}
+function hnc(){enc();return cD(WC(LV,1),jwe,421,0,[anc,bnc,cnc,dnc])}
+function zec(){vec();return cD(WC(qT,1),jwe,371,0,[uec,sec,tec,rec])}
+function BDc(){wDc();return cD(WC(tX,1),jwe,203,0,[uDc,vDc,tDc,sDc])}
+function nEc(){kEc();return cD(WC(wX,1),jwe,284,0,[hEc,gEc,iEc,jEc])}
+function Unc(a){var b;return a.j==(qpd(),npd)&&(b=Vnc(a),Csb(b,Xod))}
+function qhc(a,b){var c;c=b.a;Y0b(c,b.c.d);Z0b(c,b.d.d);Cjd(c.a,a.n)}
+function _5b(a,b){var c;c=RD(cub(a.b,b),67);!c&&(c=new Yub);return c}
+function $jc(a){Wjc();if(ZD(a.g,154)){return RD(a.g,154)}return null}
+function gRc(a){a.a=null;a.e=null;aFb(a.b.c,0);aFb(a.f.c,0);a.c=null}
+function Ovc(){Ovc=geb;Mvc=new Pvc(Kye,0);Nvc=new Pvc('TOP_LEFT',1)}
+function sNc(){sNc=geb;rNc=new tNc('UPPER',0);qNc=new tNc('LOWER',1)}
+function nWc(a,b){return cjd(new rjd(b.e.a+b.f.a/2,b.e.b+b.f.b/2),a)}
+function wqc(a,b){return RD(Lvb(JDb(RD(Qc(a.k,b),15).Oc(),lqc)),113)}
+function xqc(a,b){return RD(Lvb(KDb(RD(Qc(a.k,b),15).Oc(),lqc)),113)}
+function cWc(){YVc();return cD(WC(H$,1),jwe,405,0,[UVc,VVc,WVc,XVc])}
+function v_c(){s_c();return cD(WC(J_,1),jwe,353,0,[r_c,p_c,q_c,o_c])}
+function n5c(){j5c();return cD(WC(M0,1),jwe,354,0,[i5c,g5c,h5c,f5c])}
+function Tpd(){Qpd();return cD(WC(H3,1),jwe,386,0,[Opd,Ppd,Npd,Mpd])}
+function Tnd(){Pnd();return cD(WC(z3,1),jwe,290,0,[Ond,Lnd,Mnd,Nnd])}
+function _md(){Ymd();return cD(WC(u3,1),jwe,223,0,[Xmd,Vmd,Umd,Wmd])}
+function Jrd(){Grd();return cD(WC(R3,1),jwe,320,0,[Frd,Crd,Erd,Drd])}
+function wtd(){ttd();return cD(WC(n4,1),jwe,415,0,[qtd,rtd,ptd,std])}
+function GId(a){EId();return Ujb(DId,a)?RD(Wjb(DId,a),341).Qg():null}
+function Avd(a,b,c){return b<0?Rvd(a,c):RD(c,69).wk().Bk(a,a.hi(),b)}
+function sEd(a,b,c){var d;d=uDd(c);Do(a.j,d,b);Zjb(a.k,b,c);return b}
+function qEd(a,b,c){var d;d=uDd(c);Do(a.d,d,b);Zjb(a.e,b,c);return b}
+function DGd(a){var b,c;b=(bvd(),c=new rzd,c);!!a&&pzd(b,a);return b}
+function WHd(a){var b;b=a.aj(a.i);a.i>0&&hib(a.g,0,b,0,a.i);return b}
+function Led(a,b){var c;for(c=a.j.c.length;c>24}
+function AKd(a){if(a.p!=1)throw Adb(new cgb);return Ydb(a.k)<<24>>24}
+function GKd(a){if(a.p!=7)throw Adb(new cgb);return Ydb(a.k)<<16>>16}
+function xKd(a){if(a.p!=7)throw Adb(new cgb);return Ydb(a.f)<<16>>16}
+function Wib(a,b){if(b.e==0||a.e==0){return Oib}return Ljb(),Mjb(a,b)}
+function Nd(a,b){return dE(b)===dE(a)?'(this Map)':b==null?vve:jeb(b)}
+function MFb(a,b,c){return Jfb(UD(Wd(qtb(a.f,b))),UD(Wd(qtb(a.f,c))))}
+function wkc(a,b,c){var d;d=RD(Wjb(a.g,c),60);Rmb(a.a.c,new Ptd(b,d))}
+function Slc(a,b,c){a.i=0;a.e=0;if(b==c){return}Rlc(a,b,c);Qlc(a,b,c)}
+function rTc(a,b,c,d,e){var f;f=mTc(e,c,d);Rmb(b,TSc(e,f));vTc(a,e,b)}
+function Jrc(a,b,c,d,e){this.i=a;this.a=b;this.e=c;this.j=d;this.f=e}
+function iUb(a,b){VTb.call(this);this.a=a;this.b=b;Rmb(this.a.b,this)}
+function rTb(a){this.b=new Tsb;this.c=new Tsb;this.d=new Tsb;this.a=a}
+function Dx(a,b){var c;c=new cib;a.Gd(c);c.a+='..';b.Hd(c);return c.a}
+function Fsd(a,b){var c;c=b;while(c){Zid(a,c.i,c.j);c=vCd(c)}return a}
+function pEd(a,b,c){var d;d=uDd(c);Zjb(a.b,d,b);Zjb(a.c,b,c);return b}
+function Kr(a){var b;b=0;while(a.Ob()){a.Pb();b=Bdb(b,1)}return dz(b)}
+function oke(a,b){nke();var c;c=RD(a,69).vk();K6d(c,b);return c.xl(b)}
+function tC(d,a,b){if(b){var c=b.oe();d.a[a]=c(b)}else{delete d.a[a]}}
+function tB(a,b){var c;c=a.q.getHours();a.q.setFullYear(b+Owe);mB(a,c)}
+function KSd(a,b){return RD(b==null?Wd(qtb(a.f,null)):Ktb(a.i,b),288)}
+function hOc(a,b){return a==(r3b(),p3b)&&b==p3b?4:a==p3b||b==p3b?8:32}
+function cge(a,b,c){return dge(a,b,c,ZD(b,102)&&(RD(b,19).Bb&txe)!=0)}
+function jge(a,b,c){return kge(a,b,c,ZD(b,102)&&(RD(b,19).Bb&txe)!=0)}
+function Qge(a,b,c){return Rge(a,b,c,ZD(b,102)&&(RD(b,19).Bb&txe)!=0)}
+function jmb(a){if(a.b==a.c){return}a.a=$C(jJ,rve,1,8,5,1);a.b=0;a.c=0}
+function Nsb(a){sFb(a.a=0&&a.a[c]===b[c];c--);return c<0}
+function Xx(a){var b;if(a){return new Kub(a)}b=new Iub;_q(b,a);return b}
+function nmc(a,b){var c,d;d=false;do{c=qmc(a,b);d=d|c}while(c);return d}
+function Vz(a){a&&aA(($z(),Zz));--Nz;if(a){if(Pz!=-1){Xz(Pz);Pz=-1}}}
+function Pwb(a){Hwb();Mwb(this,Ydb(Cdb(Tdb(a,24),Pxe)),Ydb(Cdb(a,Pxe)))}
+function IHb(){IHb=geb;HHb=ss((DHb(),cD(WC(uN,1),jwe,436,0,[CHb,BHb])))}
+function QHb(){QHb=geb;PHb=ss((LHb(),cD(WC(vN,1),jwe,435,0,[JHb,KHb])))}
+function YUb(){YUb=geb;XUb=ss((TUb(),cD(WC(BP,1),jwe,432,0,[RUb,SUb])))}
+function U8b(){U8b=geb;T8b=ss((P8b(),cD(WC(vS,1),jwe,517,0,[O8b,N8b])))}
+function Huc(){Huc=geb;Guc=ss((Cuc(),cD(WC(fX,1),jwe,487,0,[Buc,Auc])))}
+function duc(){duc=geb;cuc=ss(($tc(),cD(WC(cX,1),jwe,428,0,[Ytc,Ztc])))}
+function otc(){otc=geb;ntc=ss((jtc(),cD(WC($W,1),jwe,431,0,[htc,itc])))}
+function xEc(){xEc=geb;wEc=ss((sEc(),cD(WC(xX,1),jwe,430,0,[qEc,rEc])))}
+function xNc(){xNc=geb;wNc=ss((sNc(),cD(WC(MY,1),jwe,531,0,[rNc,qNc])))}
+function BQc(){BQc=geb;AQc=ss((wQc(),cD(WC(FZ,1),jwe,523,0,[vQc,uQc])))}
+function JQc(){JQc=geb;IQc=ss((EQc(),cD(WC(GZ,1),jwe,522,0,[CQc,DQc])))}
+function kTc(){kTc=geb;jTc=ss((fTc(),cD(WC(b$,1),jwe,528,0,[eTc,dTc])))}
+function Tvc(){Tvc=geb;Svc=ss((Ovc(),cD(WC(lX,1),jwe,429,0,[Mvc,Nvc])))}
+function iVc(){iVc=geb;hVc=ss((dVc(),cD(WC(w$,1),jwe,464,0,[bVc,cVc])))}
+function c4c(){c4c=geb;b4c=ss((Y3c(),cD(WC(H0,1),jwe,434,0,[W3c,X3c])))}
+function F_c(){F_c=geb;E_c=ss((A_c(),cD(WC(K_,1),jwe,433,0,[z_c,y_c])))}
+function F2c(){F2c=geb;E2c=ss((x2c(),cD(WC(s0,1),jwe,500,0,[v2c,w2c])))}
+function H8c(){H8c=geb;G8c=ss((z8c(),cD(WC(l1,1),jwe,490,0,[x8c,y8c])))}
+function J9c(){J9c=geb;I9c=ss((B9c(),cD(WC(t1,1),jwe,491,0,[z9c,A9c])))}
+function Rbd(){Rbd=geb;Qbd=ss((Mbd(),cD(WC(O1,1),jwe,438,0,[Lbd,Kbd])))}
+function tdd(){tdd=geb;sdd=ss((ldd(),cD(WC(W1,1),jwe,437,0,[kdd,jdd])))}
+function Eqd(){Eqd=geb;Dqd=ss((uqd(),cD(WC(M3,1),jwe,347,0,[sqd,tqd])))}
+function Imd(){Cmd();return cD(WC(s3,1),jwe,88,0,[Amd,zmd,ymd,xmd,Bmd])}
+function xpd(){qpd();return cD(WC(E3,1),NAe,64,0,[opd,Yod,Xod,npd,ppd])}
+function LSd(a,b,c){return RD(b==null?rtb(a.f,null,c):Ltb(a.i,b,c),288)}
+function L6b(a){return (a.k==(r3b(),p3b)||a.k==m3b)&&nQb(a,(Ywc(),cwc))}
+function bUb(a){return !!a.c&&!!a.d?kUb(a.c)+'->'+kUb(a.d):'e_'+kFb(a)}
+function xgb(a,b){var c,d;uFb(b);for(d=a.Kc();d.Ob();){c=d.Pb();b.Cd(c)}}
+function jEd(a,b){var c;c=new uC;qDd(c,'x',b.a);qDd(c,'y',b.b);oDd(a,c)}
+function mEd(a,b){var c;c=new uC;qDd(c,'x',b.a);qDd(c,'y',b.b);oDd(a,c)}
+function Gsd(a,b){var c;c=b;while(c){Zid(a,-c.i,-c.j);c=vCd(c)}return a}
+function ZLc(a,b){var c,d;c=b;d=0;while(c>0){d+=a.a[c];c-=c&-c}return d}
+function $mb(a,b,c){var d;d=(tFb(b,a.c.length),a.c[b]);a.c[b]=c;return d}
+function uIc(a,b,c){a.a.c.length=0;yIc(a,b,c);a.a.c.length==0||rIc(a,b)}
+function wo(a){a.i=0;Mnb(a.b,null);Mnb(a.c,null);a.a=null;a.e=null;++a.g}
+function gBb(){gBb=geb;dBb=true;bBb=false;cBb=false;fBb=false;eBb=false}
+function oBb(a){gBb();if(dBb){return}this.c=a;this.e=true;this.a=new bnb}
+function kDb(a,b){this.c=0;this.b=b;txb.call(this,a,17493);this.a=this.c}
+function S_b(a){P_b();A$b(this);this.a=new Yub;Q_b(this,a);Mub(this.a,a)}
+function m_b(){Pmb(this);this.b=new rjd(oxe,oxe);this.a=new rjd(pxe,pxe)}
+function z8c(){z8c=geb;x8c=new B8c(CBe,0);y8c=new B8c('TARGET_WIDTH',1)}
+function yDb(a,b){return (MCb(a),QDb(new SDb(a,new hEb(b,a.a)))).Bd(wDb)}
+function vXb(){sXb();return cD(WC(UP,1),jwe,367,0,[nXb,oXb,pXb,qXb,rXb])}
+function Fnc(){Bnc();return cD(WC(TV,1),jwe,375,0,[xnc,znc,Anc,ync,wnc])}
+function Vtc(){Ptc();return cD(WC(bX,1),jwe,348,0,[Ltc,Ktc,Ntc,Otc,Mtc])}
+function PDc(){JDc();return cD(WC(uX,1),jwe,323,0,[IDc,FDc,GDc,EDc,HDc])}
+function fxc(){cxc();return cD(WC(mX,1),jwe,171,0,[bxc,Zwc,$wc,_wc,axc])}
+function k3c(){g3c();return cD(WC(x0,1),jwe,368,0,[e3c,b3c,f3c,c3c,d3c])}
+function vad(){sad();return cD(WC(x1,1),jwe,373,0,[oad,nad,qad,pad,rad])}
+function $bd(){Xbd();return cD(WC(P1,1),jwe,324,0,[Sbd,Tbd,Wbd,Ubd,Vbd])}
+function _hd(){Yhd();return cD(WC(d3,1),jwe,170,0,[Whd,Vhd,Thd,Xhd,Uhd])}
+function sod(){pod();return cD(WC(B3,1),jwe,256,0,[mod,ood,kod,lod,nod])}
+function Tz(b){Qz();return function(){return Uz(b,this,arguments);var a}}
+function W0b(a){if(!a.c||!a.d){return false}return !!a.c.i&&a.c.i==a.d.i}
+function Nfd(a,b){if(ZD(b,143)){return lhb(a.c,RD(b,143).c)}return false}
+function yYd(a){if(!a.t){a.t=new w$d(a);VGd(new Cde(a),0,a.t)}return a.t}
+function jNd(a){this.b=a;dMd.call(this,a);this.a=RD(Ywd(this.b.a,4),129)}
+function sNd(a){this.b=a;yMd.call(this,a);this.a=RD(Ywd(this.b.a,4),129)}
+function Q3d(a,b,c,d,e){OKd.call(this,b,d,e);J3d(this);this.c=a;this.b=c}
+function V3d(a,b,c,d,e){KKd.call(this,b,d,e);J3d(this);this.c=a;this.a=c}
+function Z3d(a,b,c,d,e){LKd.call(this,b,d,e);J3d(this);this.c=a;this.a=c}
+function g4d(a,b,c,d,e){OKd.call(this,b,d,e);J3d(this);this.c=a;this.a=c}
+function ugd(a,b){var c;c=RD(cub(a.d,b),23);return c?c:RD(cub(a.e,b),23)}
+function Blb(a,b){var c,d;c=b.ld();d=a.Fe(c);return !!d&&Fvb(d.e,b.md())}
+function me(a,b){var c;c=b.ld();return new gp(c,a.e.pc(c,RD(b.md(),16)))}
+function ptb(a,b){var c;c=a.a.get(b);return c==null?$C(jJ,rve,1,0,5,1):c}
+function khb(a){var b;b=a.length;return lhb(sxe.substr(sxe.length-b,b),a)}
+function hs(a){if(gs(a)){a.c=a.a;return a.a.Pb()}else{throw Adb(new Dvb)}}
+function $ib(a,b){if(b==0||a.e==0){return a}return b>0?tjb(a,b):qjb(a,-b)}
+function Zib(a,b){if(b==0||a.e==0){return a}return b>0?qjb(a,b):tjb(a,-b)}
+function Deb(a){Beb.call(this,a==null?vve:jeb(a),ZD(a,82)?RD(a,82):null)}
+function Y5d(a){var b;if(!a.c){b=a.r;ZD(b,90)&&(a.c=RD(b,29))}return a.c}
+function s0b(a){var b;b=new a1b;kQb(b,a);pQb(b,(yCc(),RAc),null);return b}
+function lec(a){var b,c;b=a.c.i;c=a.d.i;return b.k==(r3b(),m3b)&&c.k==m3b}
+function fD(a){var b,c,d;b=a&dxe;c=a>>22&dxe;d=a<0?exe:0;return hD(b,c,d)}
+function Ky(a){var b,c,d,e;for(c=a,d=0,e=c.length;d=0?a.Lh(d,c,true):Qvd(a,b,c)}
+function AXc(a,b,c){return Qfb(cjd(jWc(a),ajd(b.b)),cjd(jWc(a),ajd(c.b)))}
+function BXc(a,b,c){return Qfb(cjd(jWc(a),ajd(b.e)),cjd(jWc(a),ajd(c.e)))}
+function Kad(a,b){return $wnd.Math.min(bjd(b.a,a.d.d.c),bjd(b.b,a.d.d.c))}
+function LHd(a,b){a._i(a.i+1);MHd(a,a.i,a.Zi(a.i,b));a.Mi(a.i++,b);a.Ni()}
+function OHd(a){var b,c;++a.j;b=a.g;c=a.i;a.g=null;a.i=0;a.Oi(c,b);a.Ni()}
+function yke(a,b,c){var d;d=new zke(a.a);Ld(d,a.a.a);rtb(d.f,b,c);a.a.a=d}
+function mKb(a,b,c,d){var e;for(e=0;eb){throw Adb(new veb(Jb(a,b,'index')))}return a}
+function Xmb(a,b){var c;c=(tFb(b,a.c.length),a.c[b]);$Eb(a.c,b,1);return c}
+function jhb(a,b){var c,d;c=(uFb(a),a);d=(uFb(b),b);return c==d?0:cb.p){return -1}return 0}
+function hXd(a){var b;if(!a.a){b=a.r;ZD(b,156)&&(a.a=RD(b,156))}return a.a}
+function iOd(a,b,c){var d;++a.e;--a.f;d=RD(a.d[b].gd(c),136);return d.md()}
+function fd(a){var b,c;b=a.ld();c=RD(a.md(),16);return gk(c.Nc(),new jh(b))}
+function oae(a,b){if(Ujb(a.a,b)){_jb(a.a,b);return true}else{return false}}
+function Ui(a,b,c){Pb(b,a.e.Rd().gc());Pb(c,a.c.Rd().gc());return a.a[b][c]}
+function _Uc(a,b,c){this.a=a;this.b=b;this.c=c;Rmb(a.t,this);Rmb(b.i,this)}
+function lg(a,b,c,d){this.f=a;this.e=b;this.d=c;this.b=d;this.c=!d?null:d.d}
+function YWc(){this.b=new Yub;this.a=new Yub;this.b=new Yub;this.a=new Yub}
+function ree(){ree=geb;var a,b;pee=(jTd(),b=new k1d,b);qee=(a=new mXd,a)}
+function UCb(a){var b;MCb(a);b=new $Cb(a,a.a.e,a.a.d|4);return new WCb(a,b)}
+function ADb(a){var b;LCb(a);b=0;while(a.a.Bd(new MEb)){b=Bdb(b,1)}return b}
+function zxb(a,b){uFb(b);if(a.c=0,'Initial capacity must not be negative')}
+function rid(){rid=geb;qid=new jGd('org.eclipse.elk.labels.labelManager')}
+function iec(){iec=geb;hec=new kGd('separateLayerConnections',(vec(),uec))}
+function fTc(){fTc=geb;eTc=new gTc('REGULAR',0);dTc=new gTc('CRITICAL',1)}
+function Mbd(){Mbd=geb;Lbd=new Nbd('FIXED',0);Kbd=new Nbd('CENTER_NODE',1)}
+function jtc(){jtc=geb;htc=new ktc('QUADRATIC',0);itc=new ktc('SCANLINE',1)}
+function Atc(){Atc=geb;ztc=ss((stc(),cD(WC(_W,1),jwe,322,0,[qtc,ptc,rtc])))}
+function Jtc(){Jtc=geb;Itc=ss((Etc(),cD(WC(aX,1),jwe,351,0,[Btc,Dtc,Ctc])))}
+function muc(){muc=geb;luc=ss((huc(),cD(WC(dX,1),jwe,459,0,[fuc,euc,guc])))}
+function ooc(){ooc=geb;noc=ss((joc(),cD(WC(UV,1),jwe,372,0,[ioc,hoc,goc])))}
+function Lvc(){Lvc=geb;Kvc=ss((Gvc(),cD(WC(kX,1),jwe,311,0,[Evc,Fvc,Dvc])))}
+function Cvc(){Cvc=geb;Bvc=ss((xvc(),cD(WC(jX,1),jwe,298,0,[vvc,wvc,uvc])))}
+function rDc(){rDc=geb;qDc=ss((lDc(),cD(WC(sX,1),jwe,390,0,[iDc,jDc,kDc])))}
+function PEc(){PEc=geb;OEc=ss((KEc(),cD(WC(zX,1),jwe,387,0,[HEc,IEc,JEc])))}
+function YEc(){YEc=geb;XEc=ss((TEc(),cD(WC(AX,1),jwe,349,0,[SEc,QEc,REc])))}
+function GEc(){GEc=geb;FEc=ss((BEc(),cD(WC(yX,1),jwe,462,0,[AEc,yEc,zEc])))}
+function zFc(){zFc=geb;yFc=ss((uFc(),cD(WC(DX,1),jwe,352,0,[tFc,rFc,sFc])))}
+function qFc(){qFc=geb;pFc=ss((lFc(),cD(WC(CX,1),jwe,350,0,[iFc,jFc,kFc])))}
+function IFc(){IFc=geb;HFc=ss((DFc(),cD(WC(EX,1),jwe,388,0,[BFc,CFc,AFc])))}
+function QZc(){QZc=geb;PZc=ss((LZc(),cD(WC(F_,1),jwe,392,0,[KZc,JZc,IZc])))}
+function m4c(){m4c=geb;l4c=ss((g4c(),cD(WC(I0,1),jwe,393,0,[d4c,e4c,f4c])))}
+function z5c(){z5c=geb;y5c=ss((t5c(),cD(WC(N0,1),jwe,299,0,[r5c,s5c,q5c])))}
+function k6c(){k6c=geb;j6c=ss((f6c(),cD(WC(V0,1),jwe,445,0,[c6c,d6c,e6c])))}
+function F6c(){F6c=geb;E6c=ss((z6c(),cD(WC(Z0,1),jwe,455,0,[w6c,y6c,x6c])))}
+function e7c(){e7c=geb;d7c=ss((_6c(),cD(WC(a1,1),jwe,394,0,[Z6c,$6c,Y6c])))}
+function j9c(){j9c=geb;i9c=ss((b9c(),cD(WC(o1,1),jwe,439,0,[$8c,a9c,_8c])))}
+function WKc(){WKc=geb;VKc=ss((RKc(),cD(WC(nY,1),jwe,463,0,[OKc,PKc,QKc])))}
+function JKb(){JKb=geb;IKb=ss((EKb(),cD(WC(MN,1),jwe,470,0,[CKb,BKb,DKb])))}
+function cKb(){cKb=geb;bKb=ss((ZJb(),cD(WC(JN,1),jwe,237,0,[WJb,XJb,YJb])))}
+function ALb(){ALb=geb;zLb=ss((vLb(),cD(WC(TN,1),jwe,471,0,[uLb,tLb,sLb])))}
+function CBb(){CBb=geb;BBb=ss((xBb(),cD(WC(QL,1),jwe,108,0,[uBb,vBb,wBb])))}
+function FWb(){FWb=geb;EWb=ss((AWb(),cD(WC(JP,1),jwe,391,0,[yWb,xWb,zWb])))}
+function Knd(){Knd=geb;Jnd=ss((Fnd(),cD(WC(y3,1),jwe,346,0,[Dnd,Cnd,End])))}
+function lbd(){lbd=geb;kbd=ss((gbd(),cD(WC(I1,1),jwe,444,0,[dbd,ebd,fbd])))}
+function Tmd(){Tmd=geb;Smd=ss((Omd(),cD(WC(t3,1),jwe,278,0,[Lmd,Mmd,Nmd])))}
+function rqd(){rqd=geb;qqd=ss((mqd(),cD(WC(J3,1),jwe,280,0,[kqd,jqd,lqd])))}
+function Hxd(a,b){return !a.o&&(a.o=new DVd((pvd(),mvd),X4,a,0)),QNd(a.o,b)}
+function HMb(a,b){var c;if(a.C){c=RD(Vrb(a.b,b),127).n;c.d=a.C.d;c.a=a.C.a}}
+function F8b(a){var b,c,d,e;e=a.d;b=a.a;c=a.b;d=a.c;a.d=c;a.a=d;a.b=e;a.c=b}
+function cOd(a){!a.g&&(a.g=new hQd);!a.g.b&&(a.g.b=new ePd(a));return a.g.b}
+function dOd(a){!a.g&&(a.g=new hQd);!a.g.c&&(a.g.c=new IPd(a));return a.g.c}
+function lOd(a){!a.g&&(a.g=new hQd);!a.g.d&&(a.g.d=new kPd(a));return a.g.d}
+function YNd(a){!a.g&&(a.g=new hQd);!a.g.a&&(a.g.a=new qPd(a));return a.g.a}
+function B9d(a,b,c,d){!!c&&(d=c.Rh(b,BYd(c.Dh(),a.c.uk()),null,d));return d}
+function C9d(a,b,c,d){!!c&&(d=c.Th(b,BYd(c.Dh(),a.c.uk()),null,d));return d}
+function Cjb(a,b,c,d){var e;e=$C(kE,Pwe,28,b+1,15,1);Djb(e,a,b,c,d);return e}
+function $C(a,b,c,d,e,f){var g;g=_C(e,d);e!=10&&cD(WC(a,f),b,c,e,g);return g}
+function $fe(a,b,c){var d,e;e=new Phe(b,a);for(d=0;dc||b=0?a.Lh(c,true,true):Qvd(a,b,true)}
+function gMc(a,b,c){var d;d=qMc(a,b,c);a.b=new _Lc(d.c.length);return iMc(a,d)}
+function Pue(a){if(a.b<=0)throw Adb(new Dvb);--a.b;a.a-=a.c.c;return sgb(a.a)}
+function PGd(a){var b;if(!a.a){throw Adb(new Evb)}b=a.a;a.a=vCd(a.a);return b}
+function WDb(a){while(!a.a){if(!yEb(a.c,new $Db(a))){return false}}return true}
+function Nr(a){var b;Qb(a);if(ZD(a,204)){b=RD(a,204);return b}return new Or(a)}
+function Cfd(a){Afd();RD(a.of((umd(),Lld)),181).Fc((Pod(),Mod));a.qf(Kld,null)}
+function Afd(){Afd=geb;xfd=new Gfd;zfd=new Ifd;yfd=yn((umd(),Kld),xfd,pld,zfd)}
+function Y3c(){Y3c=geb;W3c=new $3c('LEAF_NUMBER',0);X3c=new $3c('NODE_SIZE',1)}
+function YLc(a){a.a=$C(kE,Pwe,28,a.b+1,15,1);a.c=$C(kE,Pwe,28,a.b,15,1);a.d=0}
+function OZb(a,b){if(a.a.Ne(b.d,a.b)>0){Rmb(a.c,new fZb(b.c,b.d,a.d));a.b=b.d}}
+function NHd(a,b){if(a.g==null||b>=a.i)throw Adb(new yNd(b,a.i));return a.g[b]}
+function P_d(a,b,c){gHd(a,c);if(c!=null&&!a.fk(c)){throw Adb(new yeb)}return c}
+function dD(a,b){XC(b)!=10&&cD(rb(b),b.Sm,b.__elementTypeId$,XC(b),a);return a}
+function Wnb(a,b,c,d){var e;d=(urb(),!d?rrb:d);e=a.slice(b,c);Xnb(e,a,b,c,-b,d)}
+function zvd(a,b,c,d,e){return b<0?Qvd(a,c,d):RD(c,69).wk().yk(a,a.hi(),b,d,e)}
+function J9b(a,b){return Qfb(Kfb(UD(mQb(a,(Ywc(),Jwc)))),Kfb(UD(mQb(b,Jwc))))}
+function qAb(){qAb=geb;pAb=ss((cAb(),cD(WC(AL,1),jwe,303,0,[$zb,_zb,aAb,bAb])))}
+function cAb(){cAb=geb;$zb=new dAb('All',0);_zb=new iAb;aAb=new kAb;bAb=new nAb}
+function EKb(){EKb=geb;CKb=new FKb(Nye,0);BKb=new FKb(Kye,1);DKb=new FKb(Oye,2)}
+function Zme(){Zme=geb;qAd();Wme=oxe;Vme=pxe;Yme=new Tfb(oxe);Xme=new Tfb(pxe)}
+function rOb(){rOb=geb;qOb=ss((mOb(),cD(WC(hO,1),jwe,417,0,[lOb,iOb,jOb,kOb])))}
+function NRb(){NRb=geb;MRb=ss((IRb(),cD(WC(WO,1),jwe,406,0,[FRb,ERb,GRb,HRb])))}
+function NPb(){NPb=geb;MPb=ss((IPb(),cD(WC(DO,1),jwe,332,0,[FPb,EPb,GPb,HPb])))}
+function Z$b(){Z$b=geb;Y$b=ss((U$b(),cD(WC(CQ,1),jwe,389,0,[T$b,R$b,Q$b,S$b])))}
+function wZb(){wZb=geb;vZb=ss((nZb(),cD(WC(lQ,1),jwe,416,0,[jZb,mZb,kZb,lZb])))}
+function jnc(){jnc=geb;inc=ss((enc(),cD(WC(LV,1),jwe,421,0,[anc,bnc,cnc,dnc])))}
+function Bec(){Bec=geb;Aec=ss((vec(),cD(WC(qT,1),jwe,371,0,[uec,sec,tec,rec])))}
+function DDc(){DDc=geb;CDc=ss((wDc(),cD(WC(tX,1),jwe,203,0,[uDc,vDc,tDc,sDc])))}
+function pEc(){pEc=geb;oEc=ss((kEc(),cD(WC(wX,1),jwe,284,0,[hEc,gEc,iEc,jEc])))}
+function Cuc(){Cuc=geb;Buc=new Duc(LAe,0);Auc=new Duc('IMPROVE_STRAIGHTNESS',1)}
+function _i(a,b){var c,d;d=b/a.c.Rd().gc()|0;c=b%a.c.Rd().gc();return Ui(a,d,c)}
+function iZd(a){var b;if(a.nl()){for(b=a.i-1;b>=0;--b){QHd(a,b)}}return WHd(a)}
+function Nyb(a){var b,c;if(!a.b){return null}c=a.b;while(b=c.a[0]){c=b}return c}
+function Oyb(a){var b,c;if(!a.b){return null}c=a.b;while(b=c.a[1]){c=b}return c}
+function Hae(a){if(ZD(a,180)){return ''+RD(a,180).a}return a==null?null:jeb(a)}
+function Iae(a){if(ZD(a,180)){return ''+RD(a,180).a}return a==null?null:jeb(a)}
+function eGb(a,b){if(b.a){throw Adb(new yz(jye))}Ysb(a.a,b);b.a=a;!a.j&&(a.j=b)}
+function hEb(a,b){xxb.call(this,b.zd(),b.yd()&-16449);uFb(a);this.a=a;this.c=b}
+function zXc(a,b){return new gud(b,Zid(ajd(b.e),b.f.a+a,b.f.b+a),(Geb(),false))}
+function EMc(a,b){dMc();return Rmb(a,new Ptd(b,sgb(b.e.c.length+b.g.c.length)))}
+function GMc(a,b){dMc();return Rmb(a,new Ptd(b,sgb(b.e.c.length+b.g.c.length)))}
+function p5c(){p5c=geb;o5c=ss((j5c(),cD(WC(M0,1),jwe,354,0,[i5c,g5c,h5c,f5c])))}
+function x_c(){x_c=geb;w_c=ss((s_c(),cD(WC(J_,1),jwe,353,0,[r_c,p_c,q_c,o_c])))}
+function eWc(){eWc=geb;dWc=ss((YVc(),cD(WC(H$,1),jwe,405,0,[UVc,VVc,WVc,XVc])))}
+function bnd(){bnd=geb;and=ss((Ymd(),cD(WC(u3,1),jwe,223,0,[Xmd,Vmd,Umd,Wmd])))}
+function Vnd(){Vnd=geb;Und=ss((Pnd(),cD(WC(z3,1),jwe,290,0,[Ond,Lnd,Mnd,Nnd])))}
+function Vpd(){Vpd=geb;Upd=ss((Qpd(),cD(WC(H3,1),jwe,386,0,[Opd,Ppd,Npd,Mpd])))}
+function Lrd(){Lrd=geb;Krd=ss((Grd(),cD(WC(R3,1),jwe,320,0,[Frd,Crd,Erd,Drd])))}
+function ytd(){ytd=geb;xtd=ss((ttd(),cD(WC(n4,1),jwe,415,0,[qtd,rtd,ptd,std])))}
+function b9c(){b9c=geb;$8c=new d9c(iFe,0);a9c=new d9c(mEe,1);_8c=new d9c(LAe,2)}
+function sBb(a,b,c,d,e){uFb(a);uFb(b);uFb(c);uFb(d);uFb(e);return new DBb(a,b,d)}
+function fub(a,b){var c;c=RD(_jb(a.e,b),400);if(c){rub(c);return c.e}return null}
+function Ymb(a,b){var c;c=Wmb(a,b,0);if(c==-1){return false}Xmb(a,c);return true}
+function LDb(a,b,c){var d;LCb(a);d=new IEb;d.a=b;a.a.Nb(new QEb(d,c));return d.a}
+function VCb(a){var b;LCb(a);b=$C(iE,vxe,28,0,15,1);ixb(a.a,new dDb(b));return b}
+function yc(a){var b;if(!xc(a)){throw Adb(new Dvb)}a.e=1;b=a.d;a.d=null;return b}
+function Odb(a){var b;if(Kdb(a)){b=0-a;if(!isNaN(b)){return b}}return Edb(xD(a))}
+function Wmb(a,b,c){for(;c=0?Dvd(a,c,true,true):Qvd(a,b,true)}
+function Vwd(a){var b;b=SD(Ywd(a,32));if(b==null){Wwd(a);b=SD(Ywd(a,32))}return b}
+function Yvd(a){var b;if(!a.Oh()){b=AYd(a.Dh())-a.ji();a.$h().Mk(b)}return a.zh()}
+function zQb(a,b){yQb=new kRb;wQb=b;xQb=a;RD(xQb.b,68);BQb(xQb,yQb,null);AQb(xQb)}
+function AWb(){AWb=geb;yWb=new BWb('XY',0);xWb=new BWb('X',1);zWb=new BWb('Y',2)}
+function vLb(){vLb=geb;uLb=new wLb('TOP',0);tLb=new wLb(Kye,1);sLb=new wLb(Qye,2)}
+function Gvc(){Gvc=geb;Evc=new Hvc(LAe,0);Fvc=new Hvc('TOP',1);Dvc=new Hvc(Qye,2)}
+function sEc(){sEc=geb;qEc=new tEc('INPUT_ORDER',0);rEc=new tEc('PORT_DEGREE',1)}
+function MD(){MD=geb;ID=hD(dxe,dxe,524287);JD=hD(0,0,fxe);KD=fD(1);fD(2);LD=fD(0)}
+function wWd(a){var b;if(a.d!=a.r){b=WVd(a);a.e=!!b&&b.lk()==aKe;a.d=b}return a.e}
+function UHd(a,b,c){var d;d=a.g[b];MHd(a,b,a.Zi(b,c));a.Ri(b,c,d);a.Ni();return d}
+function dHd(a,b){var c;c=a.dd(b);if(c>=0){a.gd(c);return true}else{return false}}
+function xr(a,b){var c;Qb(a);Qb(b);c=false;while(b.Ob()){c=c|a.Fc(b.Pb())}return c}
+function cub(a,b){var c;c=RD(Wjb(a.e,b),400);if(c){eub(a,c);return c.e}return null}
+function iB(a){var b,c;b=a/60|0;c=a%60;if(c==0){return ''+b}return ''+b+':'+(''+c)}
+function JB(d,a){var b=d.a[a];var c=(HC(),GC)[typeof b];return c?c(b):NC(typeof b)}
+function EDb(a,b){var c,d;MCb(a);d=new zEb(b,a.a);c=new YDb(d);return new SDb(a,c)}
+function mwb(a){var b;b=a.b.c.length==0?null:Vmb(a.b,0);b!=null&&owb(a,0);return b}
+function ukc(a,b){var c,d,e;e=b.c.i;c=RD(Wjb(a.f,e),60);d=c.d.c-c.e.c;Bjd(b.a,d,0)}
+function XLc(a,b){var c;++a.d;++a.c[b];c=b+1;while(c=0){++b[0]}}
+function eEd(a,b){Dyd(a,b==null||Rfb((uFb(b),b))||isNaN((uFb(b),b))?0:(uFb(b),b))}
+function fEd(a,b){Eyd(a,b==null||Rfb((uFb(b),b))||isNaN((uFb(b),b))?0:(uFb(b),b))}
+function gEd(a,b){Cyd(a,b==null||Rfb((uFb(b),b))||isNaN((uFb(b),b))?0:(uFb(b),b))}
+function hEd(a,b){Ayd(a,b==null||Rfb((uFb(b),b))||isNaN((uFb(b),b))?0:(uFb(b),b))}
+function oWc(a,b,c){return cjd(new rjd(c.e.a+c.f.a/2,c.e.b+c.f.b/2),a)==(uFb(b),b)}
+function qge(a,b){return ZD(b,102)&&(RD(b,19).Bb&txe)!=0?new She(b,a):new Phe(b,a)}
+function sge(a,b){return ZD(b,102)&&(RD(b,19).Bb&txe)!=0?new She(b,a):new Phe(b,a)}
+function XC(a){return a.__elementTypeCategory$==null?10:a.__elementTypeCategory$}
+function Bhb(a,b){return b==(wvb(),wvb(),vvb)?a.toLocaleLowerCase():a.toLowerCase()}
+function Mu(a){if(!a.e){throw Adb(new Dvb)}a.c=a.a=a.e;a.e=a.e.e;--a.d;return a.a.f}
+function Lu(a){if(!a.c){throw Adb(new Dvb)}a.e=a.a=a.c;a.c=a.c.c;++a.d;return a.a.f}
+function Lsb(a){var b;++a.a;for(b=a.c.a.length;a.aa.a[d]&&(d=c)}return d}
+function Krc(a){var b;b=RD(mQb(a,(Ywc(),Wvc)),313);if(b){return b.a==a}return false}
+function Lrc(a){var b;b=RD(mQb(a,(Ywc(),Wvc)),313);if(b){return b.i==a}return false}
+function xXb(){xXb=geb;wXb=ss((sXb(),cD(WC(UP,1),jwe,367,0,[nXb,oXb,pXb,qXb,rXb])))}
+function Hnc(){Hnc=geb;Gnc=ss((Bnc(),cD(WC(TV,1),jwe,375,0,[xnc,znc,Anc,ync,wnc])))}
+function Xtc(){Xtc=geb;Wtc=ss((Ptc(),cD(WC(bX,1),jwe,348,0,[Ltc,Ktc,Ntc,Otc,Mtc])))}
+function RDc(){RDc=geb;QDc=ss((JDc(),cD(WC(uX,1),jwe,323,0,[IDc,FDc,GDc,EDc,HDc])))}
+function hxc(){hxc=geb;gxc=ss((cxc(),cD(WC(mX,1),jwe,171,0,[bxc,Zwc,$wc,_wc,axc])))}
+function m3c(){m3c=geb;l3c=ss((g3c(),cD(WC(x0,1),jwe,368,0,[e3c,b3c,f3c,c3c,d3c])))}
+function xad(){xad=geb;wad=ss((sad(),cD(WC(x1,1),jwe,373,0,[oad,nad,qad,pad,rad])))}
+function acd(){acd=geb;_bd=ss((Xbd(),cD(WC(P1,1),jwe,324,0,[Sbd,Tbd,Wbd,Ubd,Vbd])))}
+function Kmd(){Kmd=geb;Jmd=ss((Cmd(),cD(WC(s3,1),jwe,88,0,[Amd,zmd,ymd,xmd,Bmd])))}
+function bid(){bid=geb;aid=ss((Yhd(),cD(WC(d3,1),jwe,170,0,[Whd,Vhd,Thd,Xhd,Uhd])))}
+function uod(){uod=geb;tod=ss((pod(),cD(WC(B3,1),jwe,256,0,[mod,ood,kod,lod,nod])))}
+function zpd(){zpd=geb;ypd=ss((qpd(),cD(WC(E3,1),NAe,64,0,[opd,Yod,Xod,npd,ppd])))}
+function LHb(){LHb=geb;JHb=new MHb('BY_SIZE',0);KHb=new MHb('BY_SIZE_AND_SHAPE',1)}
+function TUb(){TUb=geb;RUb=new UUb('EADES',0);SUb=new UUb('FRUCHTERMAN_REINGOLD',1)}
+function $tc(){$tc=geb;Ytc=new _tc('READING_DIRECTION',0);Ztc=new _tc('ROTATION',1)}
+function CZb(){CZb=geb;zZb=new ZZb;AZb=new b$b;xZb=new f$b;yZb=new j$b;BZb=new n$b}
+function dGb(a){this.b=new bnb;this.a=new bnb;this.c=new bnb;this.d=new bnb;this.e=a}
+function XZb(a){this.g=a;this.f=new bnb;this.a=$wnd.Math.min(this.g.c.c,this.g.d.c)}
+function UKb(a,b,c){RJb.call(this);KKb(this);this.a=a;this.c=c;this.b=b.d;this.f=b.e}
+function d6b(a,b,c){var d,e;for(e=new Anb(c);e.a=0&&b0?b-1:b;return Kqd(Lqd(Mqd(Nqd(new Oqd,c),a.n),a.j),a.k)}
+function nBd(a){var b,c;c=(b=new q4d,b);WGd((!a.q&&(a.q=new C5d(s7,a,11,10)),a.q),c)}
+function ofb(a){return ((a.i&2)!=0?'interface ':(a.i&1)!=0?'':'class ')+(lfb(a),a.o)}
+function dz(a){if(Ddb(a,lve)>0){return lve}if(Ddb(a,qwe)<0){return qwe}return Ydb(a)}
+function Sv(a){if(a<3){dk(a,fwe);return a+1}if(a=-0.01&&a.a<=Tye&&(a.a=0);a.b>=-0.01&&a.b<=Tye&&(a.b=0);return a}
+function Hid(a){tid();var b,c;c=KEe;for(b=0;bc&&(c=a[b])}return c}
+function Zvd(a,b){var c;c=wYd(a.Dh(),b);if(!c){throw Adb(new agb(KHe+b+NHe))}return c}
+function NGd(a,b){var c;c=a;while(vCd(c)){c=vCd(c);if(c==b){return true}}return false}
+function ix(a,b){var c,d,e;d=b.a.ld();c=RD(b.a.md(),16).gc();for(e=0;ea||a>b){throw Adb(new xeb('fromIndex: 0, toIndex: '+a+Qxe+b))}}
+function ZHd(a){if(a<0){throw Adb(new agb('Illegal Capacity: '+a))}this.g=this.aj(a)}
+function _y(a,b){Zy();bz(pwe);return $wnd.Math.abs(a-b)<=pwe||a==b||isNaN(a)&&isNaN(b)}
+function xJc(a,b){var c,d,e,f;for(d=a.d,e=0,f=d.length;e0){a.a/=b;a.b/=b}return a}
+function BXd(a){var b;if(a.w){return a.w}else{b=CXd(a);!!b&&!b.Vh()&&(a.w=b);return b}}
+function l2d(a,b){var c,d;d=a.a;c=m2d(a,b,null);d!=b&&!a.e&&(c=o2d(a,b,c));!!c&&c.oj()}
+function rQc(a,b,c){var d,e;d=b;do{e=Kfb(a.p[d.p])+c;a.p[d.p]=e;d=a.a[d.p]}while(d!=b)}
+function heb(a,b,c){var d=function(){return a.apply(d,arguments)};b.apply(d,c);return d}
+function Gae(a){var b;if(a==null){return null}else{b=RD(a,195);return sAd(b,b.length)}}
+function QHd(a,b){if(a.g==null||b>=a.i)throw Adb(new yNd(b,a.i));return a.Wi(b,a.g[b])}
+function Dob(a,b){yob();var c,d;d=new bnb;for(c=0;c=14&&b<=16)));return a}
+function ws(a,b){var c;uFb(b);c=a[':'+b];mFb(!!c,'Enum constant undefined: '+b);return c}
+function tfb(a,b,c,d,e,f){var g;g=rfb(a,b);Ffb(c,g);g.i=e?8:0;g.f=d;g.e=e;g.g=f;return g}
+function R3d(a,b,c,d,e){this.d=b;this.k=d;this.f=e;this.o=-1;this.p=1;this.c=a;this.a=c}
+function T3d(a,b,c,d,e){this.d=b;this.k=d;this.f=e;this.o=-1;this.p=2;this.c=a;this.a=c}
+function _3d(a,b,c,d,e){this.d=b;this.k=d;this.f=e;this.o=-1;this.p=6;this.c=a;this.a=c}
+function e4d(a,b,c,d,e){this.d=b;this.k=d;this.f=e;this.o=-1;this.p=7;this.c=a;this.a=c}
+function X3d(a,b,c,d,e){this.d=b;this.j=d;this.e=e;this.o=-1;this.p=4;this.c=a;this.a=c}
+function iGb(a,b){var c,d,e,f;for(d=b,e=0,f=d.length;e=0)){throw Adb(new agb('tolerance ('+a+') must be >= 0'))}return a}
+function hOd(a,b){var c;if(ZD(b,44)){return a.c.Mc(b)}else{c=QNd(a,b);jOd(a,b);return c}}
+function yBd(a,b,c){YVd(a,b);PAd(a,c);$Vd(a,0);bWd(a,1);aWd(a,true);_Vd(a,true);return a}
+function ZGd(a,b){var c;c=a.gc();if(b<0||b>c)throw Adb(new aMd(b,c));return new CMd(a,b)}
+function Cad(a,b){a.b=$wnd.Math.max(a.b,b.d);a.e+=b.r+(a.a.c.length==0?0:a.c);Rmb(a.a,b)}
+function Jmb(a){yFb(a.c>=0);if(rmb(a.d,a.c)<0){a.a=a.a-1&a.d.a.length-1;a.b=a.d.c}a.c=-1}
+function Nc(a){var b,c;for(c=a.c.Cc().Kc();c.Ob();){b=RD(c.Pb(),16);b.$b()}a.c.$b();a.d=0}
+function Zi(a){var b,c,d,e;for(c=a.a,d=0,e=c.length;d=0}
+function Iqd(a,b){if(a.r>0&&a.c0&&a.g!=0&&Iqd(a.i,b/a.r*a.i.d)}}
+function $Cd(a,b){var c;c=a.c;a.c=b;(a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new N3d(a,1,1,c,a.c))}
+function P1d(a,b){var c;c=a.c;a.c=b;(a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new N3d(a,1,4,c,a.c))}
+function jyd(a,b){var c;c=a.k;a.k=b;(a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new N3d(a,1,2,c,a.k))}
+function JXd(a,b){var c;c=a.D;a.D=b;(a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new N3d(a,1,2,c,a.D))}
+function Kzd(a,b){var c;c=a.f;a.f=b;(a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new N3d(a,1,8,c,a.f))}
+function Lzd(a,b){var c;c=a.i;a.i=b;(a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new N3d(a,1,7,c,a.i))}
+function fCd(a,b){var c;c=a.a;a.a=b;(a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new N3d(a,1,8,c,a.a))}
+function ZCd(a,b){var c;c=a.b;a.b=b;(a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new N3d(a,1,0,c,a.b))}
+function s6d(a,b){var c;c=a.b;a.b=b;(a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new N3d(a,1,0,c,a.b))}
+function t6d(a,b){var c;c=a.c;a.c=b;(a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new N3d(a,1,1,c,a.c))}
+function nVd(a,b){var c;c=a.d;a.d=b;(a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new N3d(a,1,1,c,a.d))}
+function Cte(a,b,c){var d;a.b=b;a.a=c;d=(a.a&512)==512?new Gre:new Tqe;a.c=Nqe(d,a.b,a.a)}
+function Gge(a,b){return qke(a.e,b)?(nke(),wWd(b)?new ole(b,a):new Eke(b,a)):new Ble(b,a)}
+function iDb(a){var b,c;if(0>a){return new rDb}b=a+1;c=new kDb(b,a);return new oDb(null,c)}
+function Gob(a,b){yob();var c;c=new Usb(1);bE(a)?$jb(c,a,b):rtb(c.f,a,b);return new uqb(c)}
+function pQc(a,b){var c,d;c=a.c;d=b.e[a.p];if(d>0){return RD(Vmb(c.a,d-1),10)}return null}
+function TOb(a,b){var c,d;c=a.o+a.p;d=b.o+b.p;if(cb){b<<=1;return b>0?b:hwe}return b}
+function xc(a){Ub(a.e!=3);switch(a.e){case 2:return false;case 0:return true;}return zc(a)}
+function djd(a,b){var c;if(ZD(b,8)){c=RD(b,8);return a.a==c.a&&a.b==c.b}else{return false}}
+function Ydd(a,b){var c;c=new kRb;RD(b.b,68);RD(b.b,68);RD(b.b,68);Umb(b.a,new ced(a,c,b))}
+function gOd(a,b){var c,d;for(d=b.vc().Kc();d.Ob();){c=RD(d.Pb(),44);fOd(a,c.ld(),c.md())}}
+function Jzd(a,b){var c;c=a.d;a.d=b;(a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new N3d(a,1,11,c,a.d))}
+function zWd(a,b){var c;c=a.j;a.j=b;(a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new N3d(a,1,13,c,a.j))}
+function b6d(a,b){var c;c=a.b;a.b=b;(a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new N3d(a,1,21,c,a.b))}
+function YAb(a,b){((gBb(),dBb)?null:b.c).length==0&&iBb(b,new rBb);$jb(a.a,dBb?null:b.c,b)}
+function b9b(a,b){b.Ug('Hierarchical port constraint processing',1);c9b(a);e9b(a);b.Vg()}
+function joc(){joc=geb;ioc=new koc('START',0);hoc=new koc('MIDDLE',1);goc=new koc('END',2)}
+function x2c(){x2c=geb;v2c=new z2c('P1_NODE_PLACEMENT',0);w2c=new z2c('P2_EDGE_ROUTING',1)}
+function JVb(){JVb=geb;HVb=new jGd(rAe);IVb=new jGd(sAe);GVb=new jGd(tAe);FVb=new jGd(uAe)}
+function tkb(a){var b;rFb(a.f.g,a.d);sFb(a.b);a.c=a.a;b=RD(a.a.Pb(),44);a.b=skb(a);return b}
+function P2d(a){var b;if(a.b==null){return j3d(),j3d(),i3d}b=a.ul()?a.tl():a.sl();return b}
+function nwb(a,b){var c;c=b==null?-1:Wmb(a.b,b,0);if(c<0){return false}owb(a,c);return true}
+function zsb(a,b){var c;uFb(b);c=b.g;if(!a.b[c]){bD(a.b,c,b);++a.c;return true}return false}
+function azb(a,b){var c,d;c=1-b;d=a.a[c];a.a[c]=d.a[b];d.a[b]=a;a.b=true;d.b=false;return d}
+function xRb(a,b){var c,d;for(d=b.Kc();d.Ob();){c=RD(d.Pb(),272);a.b=true;Ysb(a.e,c);c.b=a}}
+function kic(a,b){var c,d;c=RD(mQb(a,(yCc(),IBc)),8);d=RD(mQb(b,IBc),8);return Qfb(c.b,d.b)}
+function SPb(a,b,c){var d,e,f;f=b>>5;e=b&31;d=Cdb(Udb(a.n[c][f],Ydb(Sdb(e,1))),3);return d}
+function lmb(a,b,c){var d,e,f;f=a.a.length-1;for(e=a.b,d=0;d0?1:0}return (!a.c&&(a.c=ojb(Hdb(a.f))),a.c).e}
+function GXd(a,b){if(b){if(a.B==null){a.B=a.D;a.D=null}}else if(a.B!=null){a.D=a.B;a.B=null}}
+function rZb(a,b){nZb();return a==jZb&&b==mZb||a==mZb&&b==jZb||a==lZb&&b==kZb||a==kZb&&b==lZb}
+function sZb(a,b){nZb();return a==jZb&&b==kZb||a==jZb&&b==lZb||a==mZb&&b==lZb||a==mZb&&b==kZb}
+function zMb(a,b){return Zy(),bz(Tye),$wnd.Math.abs(0-b)<=Tye||0==b||isNaN(0)&&isNaN(b)?0:a/b}
+function qsc(a,b){return Kfb(UD(Lvb(MDb(GDb(new SDb(null,new Swb(a.c.b,16)),new Isc(a)),b))))}
+function tsc(a,b){return Kfb(UD(Lvb(MDb(GDb(new SDb(null,new Swb(a.c.b,16)),new Gsc(a)),b))))}
+function rvc(){ovc();return cD(WC(iX,1),jwe,259,0,[fvc,hvc,ivc,jvc,kvc,lvc,nvc,evc,gvc,mvc])}
+function dEc(){aEc();return cD(WC(vX,1),jwe,243,0,[$Dc,VDc,YDc,WDc,XDc,SDc,ZDc,_Dc,TDc,UDc])}
+function z3c(a,b){var c;b.Ug('General Compactor',1);c=h4c(RD(Gxd(a,($4c(),I4c)),393));c.Cg(a)}
+function T5c(a,b){var c,d;c=RD(Gxd(a,($4c(),P4c)),17);d=RD(Gxd(b,P4c),17);return hgb(c.a,d.a)}
+function Bjd(a,b,c){var d,e;for(e=Sub(a,0);e.b!=e.d.c;){d=RD(evb(e),8);d.a+=b;d.b+=c}return a}
+function Go(a,b,c){var d;for(d=a.b[c&a.f];d;d=d.b){if(c==d.a&&Hb(b,d.g)){return d}}return null}
+function Ho(a,b,c){var d;for(d=a.c[c&a.f];d;d=d.d){if(c==d.f&&Hb(b,d.i)){return d}}return null}
+function sjb(a,b,c){var d,e,f;d=0;for(e=0;e>>31}d!=0&&(a[c]=d)}
+function yzb(a,b,c,d,e,f){var g;this.c=a;g=new bnb;Syb(a,g,b,a.b,c,d,e,f);this.a=new Jkb(g,0)}
+function _5c(){this.c=new T2c(0);this.b=new T2c(FEe);this.d=new T2c(EEe);this.a=new T2c(Gze)}
+function kMb(a,b,c,d,e,f,g){qs.call(this,a,b);this.d=c;this.e=d;this.c=e;this.b=f;this.a=dv(g)}
+function tBd(a,b,c,d,e,f,g,h,i,j,k,l,m){ABd(a,b,c,d,e,f,g,h,i,j,k,l,m);kXd(a,false);return a}
+function H0b(a){if(a.b.c.i.k==(r3b(),m3b)){return RD(mQb(a.b.c.i,(Ywc(),Awc)),12)}return a.b.c}
+function I0b(a){if(a.b.d.i.k==(r3b(),m3b)){return RD(mQb(a.b.d.i,(Ywc(),Awc)),12)}return a.b.d}
+function nDb(a){var b;b=mDb(a);if(Gdb(b.a,0)){return bwb(),bwb(),awb}return bwb(),new ewb(b.b)}
+function SCb(a){var b;b=RCb(a);if(Gdb(b.a,0)){return Tvb(),Tvb(),Svb}return Tvb(),new Yvb(b.b)}
+function TCb(a){var b;b=RCb(a);if(Gdb(b.a,0)){return Tvb(),Tvb(),Svb}return Tvb(),new Yvb(b.c)}
+function o8b(a){switch(a.g){case 2:return qpd(),ppd;case 4:return qpd(),Xod;default:return a;}}
+function p8b(a){switch(a.g){case 1:return qpd(),npd;case 3:return qpd(),Yod;default:return a;}}
+function C9c(a){switch(a.g){case 0:return new s9c;case 1:return new x9c;default:return null;}}
+function Zcc(){Zcc=geb;Ycc=new kGd('edgelabelcenterednessanalysis.includelabel',(Geb(),Eeb))}
+function jKc(){jKc=geb;iKc=mfd(qfd(pfd(pfd(new ufd,(sXb(),pXb),(hcc(),Qbc)),qXb,Gbc),rXb),Pbc)}
+function DLc(){DLc=geb;CLc=mfd(qfd(pfd(pfd(new ufd,(sXb(),pXb),(hcc(),Qbc)),qXb,Gbc),rXb),Pbc)}
+function lYd(){lYd=geb;iYd=new i1d;kYd=cD(WC(y7,1),lKe,179,0,[]);jYd=cD(WC(s7,1),mKe,62,0,[])}
+function P8b(){P8b=geb;O8b=new Q8b('TO_INTERNAL_LTR',0);N8b=new Q8b('TO_INPUT_DIRECTION',1)}
+function J3b(){J3b=geb;G3b=new r4b;E3b=new w4b;F3b=new A4b;D3b=new E4b;H3b=new I4b;I3b=new M4b}
+function Cac(a,b){b.Ug(iBe,1);LJb(KJb(new PJb((i1b(),new t1b(a,false,false,new _1b)))));b.Vg()}
+function M_c(a,b,c){c.Ug('DFS Treeifying phase',1);L_c(a,b);J_c(a,b);a.a=null;a.b=null;c.Vg()}
+function Leb(a,b){Geb();return bE(a)?jhb(a,WD(b)):_D(a)?Jfb(a,UD(b)):$D(a)?Ieb(a,TD(b)):a.Fd(b)}
+function Ld(a,b){var c,d;uFb(b);for(d=b.vc().Kc();d.Ob();){c=RD(d.Pb(),44);a.zc(c.ld(),c.md())}}
+function ege(a,b,c){var d;for(d=c.Kc();d.Ob();){if(!cge(a,b,d.Pb())){return false}}return true}
+function S6d(a,b,c,d,e){var f;if(c){f=BYd(b.Dh(),a.c);e=c.Rh(b,-1-(f==-1?d:f),null,e)}return e}
+function T6d(a,b,c,d,e){var f;if(c){f=BYd(b.Dh(),a.c);e=c.Th(b,-1-(f==-1?d:f),null,e)}return e}
+function Uib(a){var b;if(a.b==-2){if(a.e==0){b=-1}else{for(b=0;a.a[b]==0;b++);}a.b=b}return a.b}
+function fjb(a){uFb(a);if(a.length==0){throw Adb(new Vgb('Zero length BigInteger'))}mjb(this,a)}
+function $Hd(a){this.i=a.gc();if(this.i>0){this.g=this.aj(this.i+(this.i/8|0)+1);a.Qc(this.g)}}
+function dmc(a,b,c){this.g=a;this.d=b;this.e=c;this.a=new bnb;bmc(this);yob();_mb(this.a,null)}
+function aad(a,b){b.q=a;a.d=$wnd.Math.max(a.d,b.r);a.b+=b.d+(a.a.c.length==0?0:a.c);Rmb(a.a,b)}
+function xid(a,b){var c,d,e,f;e=a.c;c=a.c+a.b;f=a.d;d=a.d+a.a;return b.a>e&&b.af&&b.be?(c=e):BFb(b,c+1);a.a=zhb(a.a,0,b)+(''+d)+yhb(a.a,c)}
+function ktb(a,b){a.a=Bdb(a.a,1);a.c=$wnd.Math.min(a.c,b);a.b=$wnd.Math.max(a.b,b);a.d=Bdb(a.d,b)}
+function wdc(a,b){return b1||a.Ob()){++a.a;a.g=0;b=a.i;a.Ob();return b}else{throw Adb(new Dvb)}}
+function GRc(a){switch(a.a.g){case 1:return new lSc;case 3:return new VUc;default:return new WRc;}}
+function fyd(a,b){switch(b){case 1:return !!a.n&&a.n.i!=0;case 2:return a.k!=null;}return Cxd(a,b)}
+function Hdb(a){if(jxe>22);e=a.h+b.h+(d>>22);return hD(c&dxe,d&dxe,e&exe)}
+function DD(a,b){var c,d,e;c=a.l-b.l;d=a.m-b.m+(c>>22);e=a.h-b.h+(d>>22);return hD(c&dxe,d&dxe,e&exe)}
+function Jpc(a){var b,c;Hpc(a);for(c=new Anb(a.d);c.ad)throw Adb(new aMd(b,d));a.Si()&&(c=bHd(a,c));return a.Ei(b,c)}
+function eQb(a,b,c,d,e){var f,g;for(g=c;g<=e;g++){for(f=b;f<=d;f++){PPb(a,f,g)||TPb(a,f,g,true,false)}}}
+function uid(a){tid();var b,c,d;c=$C(l3,Nve,8,2,0,1);d=0;for(b=0;b<2;b++){d+=0.5;c[b]=Cid(d,a)}return c}
+function xD(a){var b,c,d;b=~a.l+1&dxe;c=~a.m+(b==0?1:0)&dxe;d=~a.h+(b==0&&c==0?1:0)&exe;return hD(b,c,d)}
+function mgb(a){var b;if(a<0){return qwe}else if(a==0){return 0}else{for(b=hwe;(b&a)==0;b>>=1);return b}}
+function zSd(a,b,c){if(a>=128)return false;return a<64?Pdb(Cdb(Sdb(1,a),c),0):Pdb(Cdb(Sdb(1,a-64),b),0)}
+function oQb(a,b,c){return c==null?(!a.q&&(a.q=new Tsb),_jb(a.q,b)):(!a.q&&(a.q=new Tsb),Zjb(a.q,b,c)),a}
+function pQb(a,b,c){c==null?(!a.q&&(a.q=new Tsb),_jb(a.q,b)):(!a.q&&(a.q=new Tsb),Zjb(a.q,b,c));return a}
+function KTb(a){var b,c;c=new gUb;kQb(c,a);pQb(c,(JVb(),HVb),a);b=new Tsb;MTb(a,c,b);LTb(a,c,b);return c}
+function cIc(a){var b,c;b=a.t-a.k[a.o.p]*a.d+a.j[a.o.p]>a.f;c=a.u+a.e[a.o.p]*a.d>a.f*a.s*a.d;return b||c}
+function qmc(a,b){var c,d,e,f;c=false;d=a.a[b].length;for(f=0;f=0,'Negative initial capacity');mFb(b>=0,'Non-positive load factor');akb(this)}
+function iib(a,b,c,d,e){var f,g;g=a.length;f=c.length;if(b<0||d<0||e<0||b+e>g||d+e>f){throw Adb(new ueb)}}
+function zob(a,b){yob();var c,d,e,f,g;g=false;for(d=b,e=0,f=d.length;e1||b>=0&&a.b<3}
+function nD(a){var b,c,d;b=~a.l+1&dxe;c=~a.m+(b==0?1:0)&dxe;d=~a.h+(b==0&&c==0?1:0)&exe;a.l=b;a.m=c;a.h=d}
+function Cob(a){yob();var b,c,d;d=1;for(c=a.Kc();c.Ob();){b=c.Pb();d=31*d+(b!=null?tb(b):0);d=d|0}return d}
+function kD(a,b,c,d,e){var f;f=BD(a,b);c&&nD(f);if(e){a=mD(a,b);d?(eD=xD(a)):(eD=hD(a.l,a.m,a.h))}return f}
+function Qlc(a,b,c){a.g=Wlc(a,b,(qpd(),Xod),a.b);a.d=Wlc(a,c,Xod,a.b);if(a.g.c==0||a.d.c==0){return}Tlc(a)}
+function Rlc(a,b,c){a.g=Wlc(a,b,(qpd(),ppd),a.j);a.d=Wlc(a,c,ppd,a.j);if(a.g.c==0||a.d.c==0){return}Tlc(a)}
+function Xyd(a,b){switch(b){case 7:return !!a.e&&a.e.i!=0;case 8:return !!a.d&&a.d.i!=0;}return wyd(a,b)}
+function STb(a,b){switch(b.g){case 0:ZD(a.b,641)||(a.b=new tUb);break;case 1:ZD(a.b,642)||(a.b=new zUb);}}
+function tbd(a){switch(a.g){case 0:return new _dd;default:throw Adb(new agb(eGe+(a.f!=null?a.f:''+a.g)));}}
+function bdd(a){switch(a.g){case 0:return new vdd;default:throw Adb(new agb(eGe+(a.f!=null?a.f:''+a.g)));}}
+function LCc(a,b,c){return !QDb(CDb(new SDb(null,new Swb(a.c,16)),new PAb(new gsd(b,c)))).Bd((xDb(),wDb))}
+function mWc(a,b){return cjd(jWc(RD(mQb(b,(h_c(),H$c)),88)),new rjd(a.c.e.a-a.b.e.a,a.c.e.b-a.b.e.b))<=0}
+function dve(a,b){while(a.g==null&&!a.c?sId(a):a.g==null||a.i!=0&&RD(a.g[a.i-1],51).Ob()){mFd(b,tId(a))}}
+function sYb(a){var b,c;for(c=new Anb(a.a.b);c.ad?1:0}
+function ICc(a){Rmb(a.c,(hed(),fed));if(_y(a.a,Kfb(UD(iGd((QCc(),OCc)))))){return new asd}return new csd(a)}
+function fs(a){while(!a.d||!a.d.Ob()){if(!!a.b&&!nmb(a.b)){a.d=RD(smb(a.b),51)}else{return null}}return a.d}
+function BVc(a){switch(a.g){case 1:return EEe;default:case 2:return 0;case 3:return Gze;case 4:return FEe;}}
+function fte(){Vse();var a;if(Cse)return Cse;a=Zse(hte('M',true));a=$se(hte('M',false),a);Cse=a;return Cse}
+function ttd(){ttd=geb;qtd=new utd('ELK',0);rtd=new utd('JSON',1);ptd=new utd('DOT',2);std=new utd('SVG',3)}
+function TEc(){TEc=geb;SEc=new UEc('STACKED',0);QEc=new UEc('REVERSE_STACKED',1);REc=new UEc('SEQUENCED',2)}
+function LZc(){LZc=geb;KZc=new MZc(LAe,0);JZc=new MZc('MIDDLE_TO_MIDDLE',1);IZc=new MZc('AVOID_OVERLAP',2)}
+function sgc(){sgc=geb;qgc=new Lgc;rgc=new Ngc;pgc=new Dgc;ogc=new Pgc;ngc=new Hgc;mgc=(uFb(ngc),new nrb)}
+function vnd(){vnd=geb;tnd=new A3b(15);snd=new mGd((umd(),tld),tnd);und=Qld;ond=Ekd;pnd=kld;rnd=nld;qnd=mld}
+function wgd(a,b){var c,d,e,f,g;for(d=b,e=0,f=d.length;e=a.b.c.length){return}jwb(a,2*b+1);c=2*b+2;c0){b.Cd(c);c.i&&zKc(c)}}}
+function Ejb(a,b,c){var d;for(d=c-1;d>=0&&a[d]===b[d];d--);return d<0?0:Ldb(Cdb(a[d],yxe),Cdb(b[d],yxe))?-1:1}
+function it(a,b,c){var d,e;this.g=a;this.c=b;this.a=this;this.d=this;e=Wp(c);d=$C(UG,ewe,227,e,0,1);this.b=d}
+function fQb(a,b,c,d,e){var f,g;for(g=c;g<=e;g++){for(f=b;f<=d;f++){if(PPb(a,f,g)){return true}}}return false}
+function Dc(a,b){var c,d;for(d=a.Zb().Cc().Kc();d.Ob();){c=RD(d.Pb(),16);if(c.Hc(b)){return true}}return false}
+function iu(a,b,c){var d,e,f,g;uFb(c);g=false;f=a.fd(b);for(e=c.Kc();e.Ob();){d=e.Pb();f.Rb(d);g=true}return g}
+function NMd(a,b){var c,d;d=RD(Ywd(a.a,4),129);c=$C(d6,IJe,424,b,0,1);d!=null&&hib(d,0,c,0,d.length);return c}
+function hSd(a,b){var c;c=new lSd((a.f&256)!=0,a.i,a.a,a.d,(a.f&16)!=0,a.j,a.g,b);a.e!=null||(c.c=a);return c}
+function Tv(a,b){var c;if(a===b){return true}else if(ZD(b,85)){c=RD(b,85);return Rx(gn(a),c.vc())}return false}
+function Vjb(a,b,c){var d,e;for(e=c.Kc();e.Ob();){d=RD(e.Pb(),44);if(a.Be(b,d.md())){return true}}return false}
+function lmc(a,b,c){if(!a.d[b.p][c.p]){kmc(a,b,c);a.d[b.p][c.p]=true;a.d[c.p][b.p]=true}return a.a[b.p][c.p]}
+function vMc(a,b){var c;if(!a||a==b||!nQb(b,(Ywc(),pwc))){return false}c=RD(mQb(b,(Ywc(),pwc)),10);return c!=a}
+function Bhe(a){switch(a.i){case 2:{return true}case 1:{return false}case -1:{++a.c}default:{return a.$l()}}}
+function Che(a){switch(a.i){case -2:{return true}case -1:{return false}case 1:{--a.c}default:{return a._l()}}}
+function bgb(a){oz.call(this,'The given string does not match the expected format for individual spacings.',a)}
+function J6c(a,b){var c;b.Ug('Min Size Preprocessing',1);c=vsd(a);Ixd(a,(X6c(),U6c),c.a);Ixd(a,R6c,c.b);b.Vg()}
+function Djd(a){var b,c,d;b=0;d=$C(l3,Nve,8,a.b,0,1);c=Sub(a,0);while(c.b!=c.d.c){d[b++]=RD(evb(c),8)}return d}
+function Ajd(a,b,c){var d,e,f;d=new Yub;for(f=Sub(c,0);f.b!=f.d.c;){e=RD(evb(f),8);Mub(d,new sjd(e))}iu(a,b,d)}
+function az(a,b){var c;c=Bdb(a,b);if(Ldb($db(a,b),0)|Jdb($db(a,c),0)){return c}return Bdb(Sve,$db(Udb(c,63),1))}
+function le(a,b){var c,d;c=RD(a.d.Bc(b),16);if(!c){return null}d=a.e.hc();d.Gc(c);a.e.d-=c.gc();c.$b();return d}
+function Dyb(a){var b;b=a.a.c.length;if(b>0){return lyb(b-1,a.a.c.length),Xmb(a.a,b-1)}else{throw Adb(new Srb)}}
+function nFb(a,b,c){if(a>b){throw Adb(new agb(_xe+a+aye+b))}if(a<0||b>c){throw Adb(new xeb(_xe+a+bye+b+Qxe+c))}}
+function yXd(a,b){if(a.D==null&&a.B!=null){a.D=a.B;a.B=null}JXd(a,b==null?null:(uFb(b),b));!!a.C&&a.hl(null)}
+function JCc(a,b){var c;c=iGd((QCc(),OCc))!=null&&b.Sg()!=null?Kfb(UD(b.Sg()))/Kfb(UD(iGd(OCc))):1;Zjb(a.b,b,c)}
+function $Lc(a,b){var c,d;d=a.c[b];if(d==0){return}a.c[b]=0;a.d-=d;c=b+1;while(cDEe?a-c>DEe:c-a>DEe}
+function vjd(a,b){var c;for(c=0;ce){ead(b.q,e);d=c!=b.q.d}}return d}
+function C3c(a,b){var c,d,e,f,g,h,i,j;i=b.i;j=b.j;d=a.f;e=d.i;f=d.j;g=i-e;h=j-f;c=$wnd.Math.sqrt(g*g+h*h);return c}
+function pBd(a,b){var c,d;d=Hvd(a);if(!d){!$Ad&&($Ad=new L5d);c=(gSd(),nSd(b));d=new Sde(c);WGd(d.El(),a)}return d}
+function Sc(a,b){var c,d;c=RD(a.c.Bc(b),16);if(!c){return a.jc()}d=a.hc();d.Gc(c);a.d-=c.gc();c.$b();return a.mc(d)}
+function tKc(a,b){var c,d;d=Kwb(a.d,1)!=0;c=true;while(c){c=false;c=b.c.mg(b.e,d);c=c|DKc(a,b,d,false);d=!d}yKc(a)}
+function omc(a,b,c,d){var e,f;a.a=b;f=d?0:1;a.f=(e=new mmc(a.c,a.a,c,f),new Pmc(c,a.a,e,a.e,a.b,a.c==(RKc(),PKc)))}
+function Imb(a){var b;sFb(a.a!=a.b);b=a.d.a[a.a];zmb(a.b==a.d.c&&b!=null);a.c=a.a;a.a=a.a+1&a.d.a.length-1;return b}
+function Vib(a){var b;if(a.c!=0){return a.c}for(b=0;b=a.c.b:a.a<=a.c.b)){throw Adb(new Dvb)}b=a.a;a.a+=a.c.c;++a.b;return sgb(b)}
+function h5b(a){var b;b=new y2b(a.a);kQb(b,a);pQb(b,(Ywc(),Awc),a);b.o.a=a.g;b.o.b=a.f;b.n.a=a.i;b.n.b=a.j;return b}
+function tVc(a){return (qpd(),hpd).Hc(a.j)?Kfb(UD(mQb(a,(Ywc(),Swc)))):xjd(cD(WC(l3,1),Nve,8,0,[a.i.n,a.n,a.a])).b}
+function ZJc(a){var b;b=vfd(XJc);RD(mQb(a,(Ywc(),kwc)),21).Hc((ovc(),kvc))&&pfd(b,(sXb(),pXb),(hcc(),Ybc));return b}
+function M2c(a){var b,c,d,e;e=new _sb;for(d=new Anb(a);d.a=0?b:-b;while(d>0){if(d%2==0){c*=c;d=d/2|0}else{e*=c;d-=1}}return b<0?1/e:e}
+function Jid(a,b){var c,d,e;e=1;c=a;d=b>=0?b:-b;while(d>0){if(d%2==0){c*=c;d=d/2|0}else{e*=c;d-=1}}return b<0?1/e:e}
+function Vvd(a,b){var c,d,e,f;f=(e=a?Hvd(a):null,Pje((d=b,e?e.Gl():null,d)));if(f==b){c=Hvd(a);!!c&&c.Gl()}return f}
+function g2d(a,b,c){var d,e;e=a.f;a.f=b;if((a.Db&4)!=0&&(a.Db&1)==0){d=new N3d(a,1,0,e,b);!c?(c=d):c.nj(d)}return c}
+function e2d(a,b,c){var d,e;e=a.b;a.b=b;if((a.Db&4)!=0&&(a.Db&1)==0){d=new N3d(a,1,3,e,b);!c?(c=d):c.nj(d)}return c}
+function rAd(a,b,c){var d,e;e=a.a;a.a=b;if((a.Db&4)!=0&&(a.Db&1)==0){d=new N3d(a,1,1,e,b);!c?(c=d):c.nj(d)}return c}
+function SNd(a){var b,c,d,e;if(a!=null){for(c=0;c=d||b-129&&a<128){return ugb(),b=a+128,c=tgb[b],!c&&(c=tgb[b]=new fgb(a)),c}return new fgb(a)}
+function bhb(a){var b,c;if(a>-129&&a<128){return dhb(),b=a+128,c=chb[b],!c&&(c=chb[b]=new Xgb(a)),c}return new Xgb(a)}
+function M$b(a,b){var c;if(a.a.c.length>0){c=RD(Vmb(a.a,a.a.c.length-1),579);if(Q_b(c,b)){return}}Rmb(a.a,new S_b(b))}
+function Ekc(a){lkc();var b,c;b=a.d.c-a.e.c;c=RD(a.g,154);Umb(c.b,new Ykc(b));Umb(c.c,new $kc(b));xgb(c.i,new alc(b))}
+function Mlc(a){var b;b=new bib;b.a+='VerticalSegment ';Yhb(b,a.e);b.a+=' ';Zhb(b,Eb(new Gb,new Anb(a.k)));return b.a}
+function Fmc(a,b){var c,d,e;c=0;for(e=b3b(a,b).Kc();e.Ob();){d=RD(e.Pb(),12);c+=mQb(d,(Ywc(),Iwc))!=null?1:0}return c}
+function VTc(a,b,c){var d,e,f;d=0;for(f=Sub(a,0);f.b!=f.d.c;){e=Kfb(UD(evb(f)));if(e>c){break}else e>=b&&++d}return d}
+function Wv(b,c){Qb(b);try{return b._b(c)}catch(a){a=zdb(a);if(ZD(a,212)||ZD(a,169)){return false}else throw Adb(a)}}
+function Nk(b,c){Qb(b);try{return b.Hc(c)}catch(a){a=zdb(a);if(ZD(a,212)||ZD(a,169)){return false}else throw Adb(a)}}
+function Ok(b,c){Qb(b);try{return b.Mc(c)}catch(a){a=zdb(a);if(ZD(a,212)||ZD(a,169)){return false}else throw Adb(a)}}
+function Xv(b,c){Qb(b);try{return b.xc(c)}catch(a){a=zdb(a);if(ZD(a,212)||ZD(a,169)){return null}else throw Adb(a)}}
+function Yv(b,c){Qb(b);try{return b.Bc(c)}catch(a){a=zdb(a);if(ZD(a,212)||ZD(a,169)){return null}else throw Adb(a)}}
+function aMc(a,b){switch(b.g){case 2:case 1:return b3b(a,b);case 3:case 4:return hv(b3b(a,b));}return yob(),yob(),vob}
+function QAd(a){var b;if((a.Db&64)!=0)return awd(a);b=new Shb(awd(a));b.a+=' (name: ';Nhb(b,a.zb);b.a+=')';return b.a}
+function Fgd(a){var b;b=RD(cub(a.c.c,''),233);if(!b){b=new fgd(ogd(ngd(new pgd,''),'Other'));dub(a.c.c,'',b)}return b}
+function hBd(a,b,c){var d,e;e=a.sb;a.sb=b;if((a.Db&4)!=0&&(a.Db&1)==0){d=new N3d(a,1,4,e,b);!c?(c=d):c.nj(d)}return c}
+function ZVd(a,b,c){var d,e;e=a.r;a.r=b;if((a.Db&4)!=0&&(a.Db&1)==0){d=new N3d(a,1,8,e,a.r);!c?(c=d):c.nj(d)}return c}
+function q5d(a,b,c){var d,e;d=new P3d(a.e,4,13,(e=b.c,e?e:(JTd(),wTd)),null,fZd(a,b),false);!c?(c=d):c.nj(d);return c}
+function p5d(a,b,c){var d,e;d=new P3d(a.e,3,13,null,(e=b.c,e?e:(JTd(),wTd)),fZd(a,b),false);!c?(c=d):c.nj(d);return c}
+function Oee(a,b){var c,d;c=RD(b,691);d=c.el();!d&&c.fl(d=ZD(b,90)?new afe(a,RD(b,29)):new mfe(a,RD(b,156)));return d}
+function KHd(a,b,c){var d;a._i(a.i+1);d=a.Zi(b,c);b!=a.i&&hib(a.g,b,a.g,b+1,a.i-b);bD(a.g,b,d);++a.i;a.Mi(b,c);a.Ni()}
+function Hyb(a,b){var c;if(b.a){c=b.a.a.length;!a.a?(a.a=new dib(a.d)):Zhb(a.a,a.b);Xhb(a.a,b.a,b.d.length,c)}return a}
+function wib(a,b){var c;a.c=b;a.a=pjb(b);a.a<54&&(a.f=(c=b.d>1?DFb(b.a[0],b.a[1]):DFb(b.a[0],0),Xdb(b.e>0?c:Odb(c))))}
+function MDb(a,b){var c;c=new IEb;if(!a.a.Bd(c)){LCb(a);return Kvb(),Kvb(),Jvb}return Kvb(),new Ovb(uFb(LDb(a,c.a,b)))}
+function t9b(a,b){var c;if(a.c.length==0){return}c=RD(anb(a,$C(jR,WAe,10,a.c.length,0,1)),199);Znb(c,new F9b);q9b(c,b)}
+function z9b(a,b){var c;if(a.c.length==0){return}c=RD(anb(a,$C(jR,WAe,10,a.c.length,0,1)),199);Znb(c,new K9b);q9b(c,b)}
+function pb(a,b){return bE(a)?lhb(a,b):_D(a)?Lfb(a,b):$D(a)?(uFb(a),dE(a)===dE(b)):YD(a)?a.Fb(b):aD(a)?mb(a,b):Hz(a,b)}
+function Cvd(a,b,c){if(b<0){Tvd(a,c)}else{if(!c.rk()){throw Adb(new agb(KHe+c.xe()+LHe))}RD(c,69).wk().Ek(a,a.hi(),b)}}
+function xFb(a,b,c){if(a<0||b>c){throw Adb(new veb(_xe+a+bye+b+', size: '+c))}if(a>b){throw Adb(new agb(_xe+a+aye+b))}}
+function oVd(a){var b;if((a.Db&64)!=0)return awd(a);b=new Shb(awd(a));b.a+=' (source: ';Nhb(b,a.d);b.a+=')';return b.a}
+function JSd(a){if(a>=65&&a<=70){return a-65+10}if(a>=97&&a<=102){return a-97+10}if(a>=48&&a<=57){return a-48}return 0}
+function lMb(a){hMb();var b,c,d,e;for(c=nMb(),d=0,e=c.length;d=0?jjb(a):Xib(jjb(Odb(a)))))}
+function G0b(a,b,c,d,e,f){this.e=new bnb;this.f=(BEc(),AEc);Rmb(this.e,a);this.d=b;this.a=c;this.b=d;this.f=e;this.c=f}
+function bQb(a,b,c){a.n=YC(lE,[Nve,rxe],[376,28],14,[c,eE($wnd.Math.ceil(b/32))],2);a.o=b;a.p=c;a.j=b-1>>1;a.k=c-1>>1}
+function ggb(a){a-=a>>1&1431655765;a=(a>>2&858993459)+(a&858993459);a=(a>>4)+a&252645135;a+=a>>8;a+=a>>16;return a&63}
+function C4d(a,b){var c,d;for(d=new dMd(a);d.e!=d.i.gc();){c=RD(bMd(d),142);if(dE(b)===dE(c)){return true}}return false}
+function Iee(a,b,c){var d,e,f;f=(e=N5d(a.b,b),e);if(f){d=RD(tfe(Pee(a,f),''),29);if(d){return Ree(a,d,b,c)}}return null}
+function Lee(a,b,c){var d,e,f;f=(e=N5d(a.b,b),e);if(f){d=RD(tfe(Pee(a,f),''),29);if(d){return See(a,d,b,c)}}return null}
+function IDd(a,b){var c;c=Ao(a.i,b);if(c==null){throw Adb(new CDd('Node did not exist in input.'))}wEd(b,c);return null}
+function wvd(a,b){var c;c=wYd(a,b);if(ZD(c,331)){return RD(c,35)}throw Adb(new agb(KHe+b+"' is not a valid attribute"))}
+function VGd(a,b,c){var d;d=a.gc();if(b>d)throw Adb(new aMd(b,d));if(a.Si()&&a.Hc(c)){throw Adb(new agb(LIe))}a.Gi(b,c)}
+function w7b(a,b){b.Ug('Sort end labels',1);FDb(CDb(EDb(new SDb(null,new Swb(a.b,16)),new H7b),new J7b),new L7b);b.Vg()}
+function Cmd(){Cmd=geb;Amd=new Gmd(Sye,0);zmd=new Gmd(Oye,1);ymd=new Gmd(Nye,2);xmd=new Gmd(Zye,3);Bmd=new Gmd('UP',4)}
+function gbd(){gbd=geb;dbd=new hbd('P1_STRUCTURE',0);ebd=new hbd('P2_PROCESSING_ORDER',1);fbd=new hbd('P3_EXECUTION',2)}
+function r0c(){r0c=geb;q0c=mfd(mfd(rfd(mfd(mfd(rfd(pfd(new ufd,(YVc(),VVc),(WYc(),VYc)),WVc),RYc),TYc),XVc),NYc),UYc)}
+function s8b(a){switch(RD(mQb(a,(Ywc(),owc)),311).g){case 1:pQb(a,owc,(Gvc(),Dvc));break;case 2:pQb(a,owc,(Gvc(),Fvc));}}
+function bUc(a){switch(a){case 0:return new mUc;case 1:return new cUc;case 2:return new hUc;default:throw Adb(new _fb);}}
+function Fmd(a){switch(a.g){case 2:return zmd;case 1:return ymd;case 4:return xmd;case 3:return Bmd;default:return Amd;}}
+function UNb(a,b){switch(a.b.g){case 0:case 1:return b;case 2:case 3:return new Uid(b.d,0,b.a,b.b);default:return null;}}
+function rpd(a){switch(a.g){case 1:return ppd;case 2:return Yod;case 3:return Xod;case 4:return npd;default:return opd;}}
+function spd(a){switch(a.g){case 1:return npd;case 2:return ppd;case 3:return Yod;case 4:return Xod;default:return opd;}}
+function tpd(a){switch(a.g){case 1:return Xod;case 2:return npd;case 3:return ppd;case 4:return Yod;default:return opd;}}
+function cyd(a,b,c,d){switch(b){case 1:return !a.n&&(a.n=new C5d(I4,a,1,7)),a.n;case 2:return a.k;}return Axd(a,b,c,d)}
+function uLd(a,b,c){var d,e;if(a.Pj()){e=a.Qj();d=SHd(a,b,c);a.Jj(a.Ij(7,sgb(c),d,b,e));return d}else{return SHd(a,b,c)}}
+function VNd(a,b){var c,d,e;if(a.d==null){++a.e;--a.f}else{e=b.ld();c=b.Bi();d=(c&lve)%a.d.length;iOd(a,d,XNd(a,d,c,e))}}
+function xWd(a,b){var c;c=(a.Bb&gwe)!=0;b?(a.Bb|=gwe):(a.Bb&=-1025);(a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new Q3d(a,1,10,c,b))}
+function DWd(a,b){var c;c=(a.Bb&qxe)!=0;b?(a.Bb|=qxe):(a.Bb&=-4097);(a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new Q3d(a,1,12,c,b))}
+function EWd(a,b){var c;c=(a.Bb&bKe)!=0;b?(a.Bb|=bKe):(a.Bb&=-8193);(a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new Q3d(a,1,15,c,b))}
+function FWd(a,b){var c;c=(a.Bb&cKe)!=0;b?(a.Bb|=cKe):(a.Bb&=-2049);(a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new Q3d(a,1,11,c,b))}
+function zKc(a){var b;if(a.g){b=a.c.kg()?a.f:a.a;BKc(b.a,a.o,true);BKc(b.a,a.o,false);pQb(a.o,(yCc(),BBc),(Bod(),vod))}}
+function Orc(a){var b;if(!a.a){throw Adb(new dgb('Cannot offset an unassigned cut.'))}b=a.c-a.b;a.b+=b;Qrc(a,b);Rrc(a,b)}
+function JDd(a,b){var c;c=Wjb(a.k,b);if(c==null){throw Adb(new CDd('Port did not exist in input.'))}wEd(b,c);return null}
+function Jje(a){var b,c;for(c=Kje(BXd(a)).Kc();c.Ob();){b=WD(c.Pb());if(bAd(a,b)){return USd((TSd(),SSd),b)}}return null}
+function qJb(a){var b,c;for(c=a.p.a.ec().Kc();c.Ob();){b=RD(c.Pb(),218);if(b.f&&a.b[b.c]<-1.0E-10){return b}}return null}
+function Lr(a){var b,c;c=Thb(new bib,91);b=true;while(a.Ob()){b||(c.a+=pve,c);b=false;Yhb(c,a.Pb())}return (c.a+=']',c).a}
+function o_b(a){var b,c,d;b=new bnb;for(d=new Anb(a.b);d.ab){return 1}if(a==b){return a==0?Qfb(1/a,1/b):0}return isNaN(a)?isNaN(b)?0:1:-1}
+function pmb(a){var b;b=a.a[a.c-1&a.a.length-1];if(b==null){return null}a.c=a.c-1&a.a.length-1;bD(a.a,a.c,null);return b}
+function Dqe(a){var b,c,d;d=0;c=a.length;for(b=0;b=1?zmd:xmd}return c}
+function Xhc(a){switch(RD(mQb(a,(yCc(),yAc)),223).g){case 1:return new jqc;case 3:return new arc;default:return new dqc;}}
+function MCb(a){if(a.c){MCb(a.c)}else if(a.d){throw Adb(new dgb("Stream already terminated, can't be modified or used"))}}
+function Ltb(a,b,c){var d;d=a.a.get(b);a.a.set(b,c===undefined?null:c);if(d===undefined){++a.c;++a.b.g}else{++a.d}return d}
+function HHc(a,b,c){var d,e;for(e=a.a.ec().Kc();e.Ob();){d=RD(e.Pb(),10);if(Be(c,RD(Vmb(b,d.p),16))){return d}}return null}
+function u0c(a,b,c){var d;d=0;!!b&&(Emd(a.a)?(d+=b.f.a/2):(d+=b.f.b/2));!!c&&(Emd(a.a)?(d+=c.f.a/2):(d+=c.f.b/2));return d}
+function LWb(a,b,c){var d;d=c;!d&&(d=Nqd(new Oqd,0));d.Ug(EAe,2);y0b(a.b,b,d.eh(1));NWb(a,b,d.eh(1));h0b(b,d.eh(1));d.Vg()}
+function CGd(a,b,c){var d,e;d=(bvd(),e=new Xxd,e);Vxd(d,b);Wxd(d,c);!!a&&WGd((!a.a&&(a.a=new XZd(D4,a,5)),a.a),d);return d}
+function kyd(a){var b;if((a.Db&64)!=0)return awd(a);b=new Shb(awd(a));b.a+=' (identifier: ';Nhb(b,a.k);b.a+=')';return b.a}
+function kXd(a,b){var c;c=(a.Bb&QHe)!=0;b?(a.Bb|=QHe):(a.Bb&=-32769);(a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new Q3d(a,1,18,c,b))}
+function a6d(a,b){var c;c=(a.Bb&QHe)!=0;b?(a.Bb|=QHe):(a.Bb&=-32769);(a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new Q3d(a,1,18,c,b))}
+function AWd(a,b){var c;c=(a.Bb&Ove)!=0;b?(a.Bb|=Ove):(a.Bb&=-16385);(a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new Q3d(a,1,16,c,b))}
+function c6d(a,b){var c;c=(a.Bb&txe)!=0;b?(a.Bb|=txe):(a.Bb&=-65537);(a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new Q3d(a,1,20,c,b))}
+function qse(a){var b;b=$C(hE,zwe,28,2,15,1);a-=txe;b[0]=(a>>10)+uxe&Bwe;b[1]=(a&1023)+56320&Bwe;return Ihb(b,0,b.length)}
+function Zfb(a){var b;b=Neb(a);if(b>3.4028234663852886E38){return oxe}else if(b<-3.4028234663852886E38){return pxe}return b}
+function Bdb(a,b){var c;if(Kdb(a)&&Kdb(b)){c=a+b;if(jxe'+aXc(b.c):'e_'+tb(b),!!a.b&&!!a.c?aXc(a.b)+'->'+aXc(a.c):'e_'+tb(a))}
+function rWc(a,b){return lhb(!!b.b&&!!b.c?aXc(b.b)+'->'+aXc(b.c):'e_'+tb(b),!!a.b&&!!a.c?aXc(a.b)+'->'+aXc(a.c):'e_'+tb(a))}
+function $y(a,b){Zy();return bz(pwe),$wnd.Math.abs(a-b)<=pwe||a==b||isNaN(a)&&isNaN(b)?0:ab?1:cz(isNaN(a),isNaN(b))}
+function Ymd(){Ymd=geb;Xmd=new Zmd(Sye,0);Vmd=new Zmd('POLYLINE',1);Umd=new Zmd('ORTHOGONAL',2);Wmd=new Zmd('SPLINES',3)}
+function _6c(){_6c=geb;Z6c=new a7c('ASPECT_RATIO_DRIVEN',0);$6c=new a7c('MAX_SCALE_DRIVEN',1);Y6c=new a7c('AREA_DRIVEN',2)}
+function Db(b,c,d){var e;try{Cb(b,c,d)}catch(a){a=zdb(a);if(ZD(a,606)){e=a;throw Adb(new Deb(e))}else throw Adb(a)}return c}
+function Im(a){var b,c,d;for(c=0,d=a.length;cb&&d.Ne(a[f-1],a[f])>0;--f){g=a[f];bD(a,f,a[f-1]);bD(a,f-1,g)}}}
+function Egd(a,b){var c,d,e,f,g;c=b.f;dub(a.c.d,c,b);if(b.g!=null){for(e=b.g,f=0,g=e.length;fb){fvb(c);break}}cvb(c,b)}
+function Kic(a,b){var c,d,e;d=Zjc(b);e=Kfb(UD(hFc(d,(yCc(),TBc))));c=$wnd.Math.max(0,e/2-0.5);Iic(b,c,1);Rmb(a,new hjc(b,c))}
+function L5c(a,b,c){var d;c.Ug('Straight Line Edge Routing',1);c.dh(b,eFe);d=RD(Gxd(b,(u2c(),t2c)),27);M5c(a,d);c.dh(b,gFe)}
+function K9c(a,b){a.n.c.length==0&&Rmb(a.n,new _9c(a.s,a.t,a.i));Rmb(a.b,b);W9c(RD(Vmb(a.n,a.n.c.length-1),209),b);M9c(a,b)}
+function Zrb(a){var b;this.a=(b=RD(a.e&&a.e(),9),new Fsb(b,RD(WEb(b,b.length),9),0));this.b=$C(jJ,rve,1,this.a.a.length,5,1)}
+function jeb(a){var b;if(Array.isArray(a)&&a.Tm===keb){return nfb(rb(a))+'@'+(b=tb(a)>>>0,b.toString(16))}return a.toString()}
+function jD(a,b){if(a.h==fxe&&a.m==0&&a.l==0){b&&(eD=hD(0,0,0));return gD((MD(),KD))}b&&(eD=hD(a.l,a.m,a.h));return hD(0,0,0)}
+function _Gb(a,b){switch(b.g){case 2:return a.b;case 1:return a.c;case 4:return a.d;case 3:return a.a;default:return false;}}
+function IYb(a,b){switch(b.g){case 2:return a.b;case 1:return a.c;case 4:return a.d;case 3:return a.a;default:return false;}}
+function vyd(a,b,c,d){switch(b){case 3:return a.f;case 4:return a.g;case 5:return a.i;case 6:return a.j;}return cyd(a,b,c,d)}
+function oIb(a,b){if(b==a.d){return a.e}else if(b==a.e){return a.d}else{throw Adb(new agb('Node '+b+' not part of edge '+a))}}
+function Uvd(a,b){var c;c=wYd(a.Dh(),b);if(ZD(c,102)){return RD(c,19)}throw Adb(new agb(KHe+b+"' is not a valid reference"))}
+function Bvd(a,b,c,d){if(b<0){Svd(a,c,d)}else{if(!c.rk()){throw Adb(new agb(KHe+c.xe()+LHe))}RD(c,69).wk().Ck(a,a.hi(),b,d)}}
+function ig(a){var b;if(a.b){ig(a.b);if(a.b.d!=a.c){throw Adb(new Jrb)}}else if(a.d.dc()){b=RD(a.f.c.xc(a.e),16);!!b&&(a.d=b)}}
+function VMb(a){RMb();var b,c,d,e;b=a.o.b;for(d=RD(RD(Qc(a.r,(qpd(),npd)),21),87).Kc();d.Ob();){c=RD(d.Pb(),117);e=c.e;e.b+=b}}
+function SRb(a){var b,c,d;this.a=new Iub;for(d=new Anb(a);d.a=e){return b.c+c}}return b.c+b.b.gc()}
+function lQd(a,b){jQd();var c,d,e,f;d=iZd(a);e=b;Wnb(d,0,d.length,e);for(c=0;c0){d+=e;++c}}c>1&&(d+=a.d*(c-1));return d}
+function FFd(a){var b,c,d,e,f;f=HFd(a);c=cve(a.c);d=!c;if(d){e=new MB;sC(f,'knownLayouters',e);b=new QFd(e);xgb(a.c,b)}return f}
+function fHd(a){var b,c,d;d=new Qhb;d.a+='[';for(b=0,c=a.gc();b0&&(BFb(b-1,a.length),a.charCodeAt(b-1)==58)&&!mSd(a,aSd,bSd)}
+function Sib(a,b){var c;if(dE(a)===dE(b)){return true}if(ZD(b,92)){c=RD(b,92);return a.e==c.e&&a.d==c.d&&Tib(a,c.a)}return false}
+function vpd(a){qpd();switch(a.g){case 4:return Yod;case 1:return Xod;case 3:return npd;case 2:return ppd;default:return opd;}}
+function jBb(a){var b,c;if(a.b){return a.b}c=dBb?null:a.d;while(c){b=dBb?null:c.b;if(b){return b}c=dBb?null:c.d}return SAb(),RAb}
+function LJb(a){var b,c,d;d=Kfb(UD(a.a.of((umd(),cmd))));for(c=new Anb(a.a.Sf());c.a>5;b=a&31;d=$C(kE,Pwe,28,c+1,15,1);d[c]=1<3){e*=10;--f}a=(a+(e>>1))/e|0}d.i=a;return true}
+function BYd(a,b){var c,d,e;c=(a.i==null&&rYd(a),a.i);d=b.Lj();if(d!=-1){for(e=c.length;d=0;--d){b=c[d];for(e=0;e>1;this.k=b-1>>1}
+function Dfd(a){Afd();if(RD(a.of((umd(),pld)),181).Hc((dqd(),bqd))){RD(a.of(Lld),181).Fc((Pod(),Ood));RD(a.of(pld),181).Mc(bqd)}}
+function ndc(a){var b,c;b=a.d==(btc(),Ysc);c=jdc(a);b&&!c||!b&&c?pQb(a.a,(yCc(),Rzc),(Rjd(),Pjd)):pQb(a.a,(yCc(),Rzc),(Rjd(),Ojd))}
+function QCc(){QCc=geb;GCc();OCc=(yCc(),bCc);PCc=dv(cD(WC(V5,1),kEe,149,0,[SBc,TBc,VBc,WBc,ZBc,$Bc,_Bc,aCc,dCc,fCc,UBc,XBc,cCc]))}
+function RDb(a,b){var c;c=RD(zDb(a,tBb(new ZBb,new XBb,new wCb,cD(WC(QL,1),jwe,108,0,[(xBb(),vBb)]))),15);return c.Qc(__c(c.gc()))}
+function nXc(a,b){var c,d;d=new zAb(a.a.ad(b,true));if(d.a.gc()<=1){throw Adb(new Ngb)}c=d.a.ec().Kc();c.Pb();return RD(c.Pb(),39)}
+function lQc(a,b,c){var d,e;d=Kfb(a.p[b.i.p])+Kfb(a.d[b.i.p])+b.n.b+b.a.b;e=Kfb(a.p[c.i.p])+Kfb(a.d[c.i.p])+c.n.b+c.a.b;return e-d}
+function XHd(a,b){var c;if(a.i>0){if(b.lengtha.i&&bD(b,a.i,null);return b}
+function MXd(a){var b;if((a.Db&64)!=0)return QAd(a);b=new Shb(QAd(a));b.a+=' (instanceClassName: ';Nhb(b,a.D);b.a+=')';return b.a}
+function ySd(a){var b,c,d,e;e=0;for(c=0,d=a.length;c0){a._j();d=b==null?0:tb(b);e=(d&lve)%a.d.length;c=XNd(a,e,d,b);return c!=-1}else{return false}}
+function Nrb(a,b){var c,d;a.a=Bdb(a.a,1);a.c=$wnd.Math.min(a.c,b);a.b=$wnd.Math.max(a.b,b);a.d+=b;c=b-a.f;d=a.e+c;a.f=d-a.e-c;a.e=d}
+function yyd(a,b){switch(b){case 3:Ayd(a,0);return;case 4:Cyd(a,0);return;case 5:Dyd(a,0);return;case 6:Eyd(a,0);return;}hyd(a,b)}
+function c3b(a,b){switch(b.g){case 1:return dr(a.j,(J3b(),E3b));case 2:return dr(a.j,(J3b(),G3b));default:return yob(),yob(),vob;}}
+function zm(a){tm();var b;b=a.Pc();switch(b.length){case 0:return sm;case 1:return new Dy(Qb(b[0]));default:return new Kx(Im(b));}}
+function kMd(b,c){b.Xj();try{b.d.bd(b.e++,c);b.f=b.d.j;b.g=-1}catch(a){a=zdb(a);if(ZD(a,77)){throw Adb(new Jrb)}else throw Adb(a)}}
+function a8d(){a8d=geb;$7d=new b8d;T7d=new e8d;U7d=new h8d;V7d=new k8d;W7d=new n8d;X7d=new q8d;Y7d=new t8d;Z7d=new w8d;_7d=new z8d}
+function YA(a,b){WA();var c,d;c=_A(($A(),$A(),ZA));d=null;b==c&&(d=RD(Xjb(VA,a),624));if(!d){d=new XA(a);b==c&&$jb(VA,a,d)}return d}
+function zDc(a){wDc();var b;(!a.q?(yob(),yob(),wob):a.q)._b((yCc(),iBc))?(b=RD(mQb(a,iBc),203)):(b=RD(mQb(Y2b(a),jBc),203));return b}
+function hFc(a,b){var c,d;d=null;if(nQb(a,(yCc(),YBc))){c=RD(mQb(a,YBc),96);c.pf(b)&&(d=c.of(b))}d==null&&(d=mQb(Y2b(a),b));return d}
+function Ze(a,b){var c,d,e;if(ZD(b,44)){c=RD(b,44);d=c.ld();e=Xv(a.Rc(),d);return Hb(e,c.md())&&(e!=null||a.Rc()._b(d))}return false}
+function $Nd(a,b){var c,d,e;if(a.f>0){a._j();d=b==null?0:tb(b);e=(d&lve)%a.d.length;c=WNd(a,e,d,b);if(c){return c.md()}}return null}
+function qLd(a,b,c){var d,e,f;if(a.Pj()){d=a.i;f=a.Qj();KHd(a,d,b);e=a.Ij(3,null,b,d,f);!c?(c=e):c.nj(e)}else{KHd(a,a.i,b)}return c}
+function f$d(a,b,c){var d,e;d=new P3d(a.e,4,10,(e=b.c,ZD(e,90)?RD(e,29):(JTd(),zTd)),null,fZd(a,b),false);!c?(c=d):c.nj(d);return c}
+function e$d(a,b,c){var d,e;d=new P3d(a.e,3,10,null,(e=b.c,ZD(e,90)?RD(e,29):(JTd(),zTd)),fZd(a,b),false);!c?(c=d):c.nj(d);return c}
+function SMb(a){RMb();var b;b=new sjd(RD(a.e.of((umd(),nld)),8));if(a.B.Hc((dqd(),Ypd))){b.a<=0&&(b.a=20);b.b<=0&&(b.b=20)}return b}
+function jjb(a){Pib();var b,c;c=Ydb(a);b=Ydb(Udb(a,32));if(b!=0){return new bjb(c,b)}if(c>10||c<0){return new ajb(1,c)}return Lib[c]}
+function Mdb(a,b){var c;if(Kdb(a)&&Kdb(b)){c=a%b;if(jxe=0){f=f.a[1]}else{e=f;f=f.a[0]}}return e}
+function Qyb(a,b,c){var d,e,f;e=null;f=a.b;while(f){d=a.a.Ne(b,f.d);if(c&&d==0){return f}if(d<=0){f=f.a[0]}else{e=f;f=f.a[1]}}return e}
+function rmc(a,b,c,d){var e,f,g;e=false;if(Lmc(a.f,c,d)){Omc(a.f,a.a[b][c],a.a[b][d]);f=a.a[b];g=f[d];f[d]=f[c];f[c]=g;e=true}return e}
+function Nqc(a,b,c){var d,e,f,g;e=RD(Wjb(a.b,c),183);d=0;for(g=new Anb(b.j);g.a>5;b&=31;e=a.d+c+(b==0?0:1);d=$C(kE,Pwe,28,e,15,1);rjb(d,a.a,c,b);f=new cjb(a.e,e,d);Rib(f);return f}
+function zGc(a,b){var c,d,e;for(d=new is(Mr(a3b(a).a.Kc(),new ir));gs(d);){c=RD(hs(d),18);e=c.d.i;if(e.c==b){return false}}return true}
+function _Ec(a,b,c){var d,e,f,g,h;g=a.k;h=b.k;d=c[g.g][h.g];e=UD(hFc(a,d));f=UD(hFc(b,d));return $wnd.Math.max((uFb(e),e),(uFb(f),f))}
+function lA(){if(Error.stackTraceLimit>0){$wnd.Error.stackTraceLimit=Error.stackTraceLimit=64;return true}return 'stack' in new Error}
+function sGb(a,b){return Zy(),Zy(),bz(pwe),($wnd.Math.abs(a-b)<=pwe||a==b||isNaN(a)&&isNaN(b)?0:ab?1:cz(isNaN(a),isNaN(b)))>0}
+function uGb(a,b){return Zy(),Zy(),bz(pwe),($wnd.Math.abs(a-b)<=pwe||a==b||isNaN(a)&&isNaN(b)?0:ab?1:cz(isNaN(a),isNaN(b)))<0}
+function tGb(a,b){return Zy(),Zy(),bz(pwe),($wnd.Math.abs(a-b)<=pwe||a==b||isNaN(a)&&isNaN(b)?0:ab?1:cz(isNaN(a),isNaN(b)))<=0}
+function Efb(a,b){var c=0;while(!b[c]||b[c]==''){c++}var d=b[c++];for(;c0&&this.b>0&&(this.g=Aad(this.c,this.b,this.a))}
+function rC(f,a){var b=f.a;var c;a=String(a);b.hasOwnProperty(a)&&(c=b[a]);var d=(HC(),GC)[typeof c];var e=d?d(c):NC(typeof c);return e}
+function uDd(a){var b,c,d;d=null;b=uIe in a.a;c=!b;if(c){throw Adb(new CDd('Every element must have an id.'))}d=tDd(qC(a,uIe));return d}
+function Qqe(a){var b,c;c=Rqe(a);b=null;while(a.c==2){Mqe(a);if(!b){b=(Vse(),Vse(),++Use,new iue(2));hue(b,c);c=b}c.Jm(Rqe(a))}return c}
+function jOd(a,b){var c,d,e;a._j();d=b==null?0:tb(b);e=(d&lve)%a.d.length;c=WNd(a,e,d,b);if(c){hOd(a,c);return c.md()}else{return null}}
+function Qib(a,b){if(a.e>b.e){return 1}if(a.eb.d){return a.e}if(a.d=48&&a<48+$wnd.Math.min(10,10)){return a-48}if(a>=97&&a<97){return a-97+10}if(a>=65&&a<65){return a-65+10}return -1}
+function UHc(a,b){if(b.c==a){return b.d}else if(b.d==a){return b.c}throw Adb(new agb('Input edge is not connected to the input port.'))}
+function Fae(a){if(mhb(FGe,a)){return Geb(),Feb}else if(mhb(GGe,a)){return Geb(),Eeb}else{throw Adb(new agb('Expecting true or false'))}}
+function jFb(a){switch(typeof(a)){case jve:return ohb(a);case ive:return Nfb(a);case hve:return Jeb(a);default:return a==null?0:kFb(a);}}
+function mfd(a,b){if(a.a<0){throw Adb(new dgb('Did not call before(...) or after(...) before calling add(...).'))}tfd(a,a.a,b);return a}
+function FId(a){EId();if(ZD(a,162)){return RD(Wjb(CId,zK),294).Rg(a)}if(Ujb(CId,rb(a))){return RD(Wjb(CId,rb(a)),294).Rg(a)}return null}
+function Wwd(a){var b,c;if((a.Db&32)==0){c=(b=RD(Ywd(a,16),29),AYd(!b?a.ii():b)-AYd(a.ii()));c!=0&&$wd(a,32,$C(jJ,rve,1,c,5,1))}return a}
+function $wd(a,b,c){var d;if((a.Db&b)!=0){if(c==null){Zwd(a,b)}else{d=Xwd(a,b);d==-1?(a.Eb=c):bD(SD(a.Eb),d,c)}}else c!=null&&Twd(a,b,c)}
+function tTc(a,b,c,d){var e,f;if(b.c.length==0){return}e=pTc(c,d);f=oTc(b);FDb(PDb(new SDb(null,new Swb(f,1)),new CTc),new GTc(a,c,e,d))}
+function rmb(a,b){var c,d,e,f;d=a.a.length-1;c=b-a.b&d;f=a.c-b&d;e=a.c-a.b&d;zmb(c=f){umb(a,b);return -1}else{vmb(a,b);return 1}}
+function Hvd(a){var b,c,d;d=a.Jh();if(!d){b=0;for(c=a.Ph();c;c=c.Ph()){if(++b>wxe){return c.Qh()}d=c.Jh();if(!!d||c==a){break}}}return d}
+function Ue(a,b){var c;if(dE(b)===dE(a)){return true}if(!ZD(b,21)){return false}c=RD(b,21);if(c.gc()!=a.gc()){return false}return a.Ic(c)}
+function kNc(a,b){if(a.eb.e){return 1}else if(a.fb.f){return 1}return tb(a)-tb(b)}
+function mhb(a,b){uFb(a);if(b==null){return false}if(lhb(a,b)){return true}return a.length==b.length&&lhb(a.toLowerCase(),b.toLowerCase())}
+function Hgb(a){var b,c;if(Ddb(a,-129)>0&&Ddb(a,128)<0){return Jgb(),b=Ydb(a)+128,c=Igb[b],!c&&(c=Igb[b]=new zgb(a)),c}return new zgb(a)}
+function U$b(){U$b=geb;T$b=new V$b(LAe,0);R$b=new V$b('INSIDE_PORT_SIDE_GROUPS',1);Q$b=new V$b('GROUP_MODEL_ORDER',2);S$b=new V$b(MAe,3)}
+function ufe(a){var b;a.b||vfe(a,(b=Hee(a.e,a.a),!b||!lhb(GGe,$Nd((!b.b&&(b.b=new SVd((JTd(),FTd),C8,b)),b.b),'qualified'))));return a.c}
+function BA(a,b){var c,d;c=(BFb(b,a.length),a.charCodeAt(b));d=b+1;while(d2000){Oz=a;Pz=$wnd.setTimeout(Yz,10)}}if(Nz++==0){_z(($z(),Zz));return true}return false}
+function lBb(a,b,c){var d;(bBb?(jBb(a),true):cBb?(SAb(),true):fBb?(SAb(),true):eBb&&(SAb(),false))&&(d=new aBb(b),d.b=c,hBb(a,d),undefined)}
+function oNb(a,b){var c;c=!a.A.Hc((Qpd(),Ppd))||a.q==(Bod(),wod);a.u.Hc((Pod(),Lod))?c?mNb(a,b):qNb(a,b):a.u.Hc(Nod)&&(c?nNb(a,b):rNb(a,b))}
+function Bed(a){var b;if(dE(Gxd(a,(umd(),Xkd)))===dE((Fnd(),Dnd))){if(!vCd(a)){Ixd(a,Xkd,End)}else{b=RD(Gxd(vCd(a),Xkd),346);Ixd(a,Xkd,b)}}}
+function _fc(a){var b,c;if(nQb(a.d.i,(yCc(),tBc))){b=RD(mQb(a.c.i,tBc),17);c=RD(mQb(a.d.i,tBc),17);return hgb(b.a,c.a)>0}else{return false}}
+function g_b(a,b,c){return new Uid($wnd.Math.min(a.a,b.a)-c/2,$wnd.Math.min(a.b,b.b)-c/2,$wnd.Math.abs(a.a-b.a)+c,$wnd.Math.abs(a.b-b.b)+c)}
+function _mc(a){var b;this.d=new bnb;this.j=new pjd;this.g=new pjd;b=a.g.b;this.f=RD(mQb(Y2b(b),(yCc(),rAc)),88);this.e=Kfb(UD(k2b(b,ZBc)))}
+function onc(a){this.d=new bnb;this.e=new gub;this.c=$C(kE,Pwe,28,(qpd(),cD(WC(E3,1),NAe,64,0,[opd,Yod,Xod,npd,ppd])).length,15,1);this.b=a}
+function $pc(a,b,c){var d;d=c[a.g][b];switch(a.g){case 1:case 3:return new rjd(0,d);case 2:case 4:return new rjd(d,0);default:return null;}}
+function Ced(b,c,d){var e,f;f=RD(ltd(c.f),205);try{f.rf(b,d);mtd(c.f,f)}catch(a){a=zdb(a);if(ZD(a,103)){e=a;throw Adb(e)}else throw Adb(a)}}
+function tEd(a,b,c){var d,e,f,g,h,i;d=null;h=vgd(ygd(),b);f=null;if(h){e=null;i=zhd(h,c);g=null;i!=null&&(g=a.qf(h,i));e=g;f=e}d=f;return d}
+function sSd(a,b,c,d){var e;e=a.length;if(b>=e)return e;for(b=b>0?b:0;bd&&bD(b,d,null);return b}
+function lob(a,b){var c,d;d=a.a.length;b.lengthd&&bD(b,d,null);return b}
+function Bde(a,b){var c,d;++a.j;if(b!=null){c=(d=a.a.Cb,ZD(d,99)?RD(d,99).th():null);if(Jnb(b,c)){$wd(a.a,4,c);return}}$wd(a.a,4,RD(b,129))}
+function mne(a){var b;if(a==null)return null;b=Hqe(nue(a,true));if(b==null){throw Adb(new Mle("Invalid hexBinary value: '"+a+"'"))}return b}
+function wA(a,b,c){var d;if(b.a.length>0){Rmb(a.b,new kB(b.a,c));d=b.a.length;0d&&(b.a+=Hhb($C(hE,zwe,28,-d,15,1)))}}
+function yIb(a,b,c){var d,e,f;if(c[b.d]){return}c[b.d]=true;for(e=new Anb(CIb(b));e.a=a.b>>1){d=a.c;for(c=a.b;c>b;--c){d=d.b}}else{d=a.a.a;for(c=0;c=0?a.Wh(e):Rvd(a,d)):c<0?Rvd(a,d):RD(d,69).wk().Bk(a,a.hi(),c)}
+function Fxd(a){var b,c,d;d=(!a.o&&(a.o=new DVd((pvd(),mvd),X4,a,0)),a.o);for(c=d.c.Kc();c.e!=c.i.gc();){b=RD(c.Yj(),44);b.md()}return dOd(d)}
+function iGd(a){var b;if(ZD(a.a,4)){b=FId(a.a);if(b==null){throw Adb(new dgb(HGe+a.b+"'. "+DGe+(lfb(b6),b6.k)+EGe))}return b}else{return a.a}}
+function iSd(a,b){var c,d;if(a.j.length!=b.j.length)return false;for(c=0,d=a.j.length;c=64&&b<128&&(e=Rdb(e,Sdb(1,b-64)))}return e}
+function k2b(a,b){var c,d;d=null;if(nQb(a,(umd(),amd))){c=RD(mQb(a,amd),96);c.pf(b)&&(d=c.of(b))}d==null&&!!Y2b(a)&&(d=mQb(Y2b(a),b));return d}
+function i0b(a,b){var c;c=RD(mQb(a,(yCc(),RAc)),75);if(br(b,f0b)){if(!c){c=new Ejd;pQb(a,RAc,c)}else{Xub(c)}}else !!c&&pQb(a,RAc,null);return c}
+function tSb(){tSb=geb;sSb=(umd(),Yld);mSb=Ukd;hSb=Dkd;nSb=tld;qSb=(YHb(),UHb);pSb=SHb;rSb=WHb;oSb=RHb;jSb=(eSb(),aSb);iSb=_Rb;kSb=cSb;lSb=dSb}
+function PZb(a){NZb();this.c=new bnb;this.d=a;switch(a.g){case 0:case 2:this.a=Fob(MZb);this.b=oxe;break;case 3:case 1:this.a=MZb;this.b=pxe;}}
+function c9b(a){var b;if(!Cod(RD(mQb(a,(yCc(),BBc)),101))){return}b=a.b;d9b((tFb(0,b.c.length),RD(b.c[0],30)));d9b(RD(Vmb(b,b.c.length-1),30))}
+function ohc(a,b){b.Ug('Self-Loop post-processing',1);FDb(CDb(CDb(EDb(new SDb(null,new Swb(a.b,16)),new uhc),new whc),new yhc),new Ahc);b.Vg()}
+function xrd(a,b,c){var d,e;if(a.c){Dyd(a.c,a.c.i+b);Eyd(a.c,a.c.j+c)}else{for(e=new Anb(a.b);e.a=0&&(c.d=a.t);break;case 3:a.t>=0&&(c.a=a.t);}if(a.C){c.b=a.C.b;c.c=a.C.c}}
+function JDc(){JDc=geb;IDc=new LDc(mEe,0);FDc=new LDc(BBe,1);GDc=new LDc('LINEAR_SEGMENTS',2);EDc=new LDc('BRANDES_KOEPF',3);HDc=new LDc(lEe,4)}
+function IRb(){IRb=geb;FRb=new JRb(_ye,0);ERb=new JRb(aze,1);GRb=new JRb(bze,2);HRb=new JRb(cze,3);FRb.a=false;ERb.a=true;GRb.a=false;HRb.a=true}
+function IPb(){IPb=geb;FPb=new JPb(_ye,0);EPb=new JPb(aze,1);GPb=new JPb(bze,2);HPb=new JPb(cze,3);FPb.a=false;EPb.a=true;GPb.a=false;HPb.a=true}
+function Ivd(a,b,c,d){var e;if(c>=0){return a.Sh(b,c,d)}else{!!a.Ph()&&(d=(e=a.Fh(),e>=0?a.Ah(d):a.Ph().Th(a,-1-e,null,d)));return a.Ch(b,c,d)}}
+function Zyd(a,b){switch(b){case 7:!a.e&&(a.e=new Yie(G4,a,7,4));sLd(a.e);return;case 8:!a.d&&(a.d=new Yie(G4,a,8,5));sLd(a.d);return;}yyd(a,b)}
+function Ixd(a,b,c){c==null?(!a.o&&(a.o=new DVd((pvd(),mvd),X4,a,0)),jOd(a.o,b)):(!a.o&&(a.o=new DVd((pvd(),mvd),X4,a,0)),fOd(a.o,b,c));return a}
+function Aob(a,b){yob();var c,d,e,f;c=a;f=b;if(ZD(a,21)&&!ZD(b,21)){c=b;f=a}for(e=c.Kc();e.Ob();){d=e.Pb();if(f.Hc(d)){return false}}return true}
+function qTc(a,b,c,d){if(b.ac.b){return true}}}return false}
+function QD(a,b){if(bE(a)){return !!PD[b]}else if(a.Sm){return !!a.Sm[b]}else if(_D(a)){return !!OD[b]}else if($D(a)){return !!ND[b]}return false}
+function udc(a){var b;b=a.a;do{b=RD(hs(new is(Mr(Z2b(b).a.Kc(),new ir))),18).c.i;b.k==(r3b(),o3b)&&a.b.Fc(b)}while(b.k==(r3b(),o3b));a.b=hv(a.b)}
+function UGc(a,b){var c,d,e;e=a;for(d=new is(Mr(Z2b(b).a.Kc(),new ir));gs(d);){c=RD(hs(d),18);!!c.c.i.c&&(e=$wnd.Math.max(e,c.c.i.c.p))}return e}
+function INb(a,b){var c,d,e;e=0;d=RD(RD(Qc(a.r,b),21),87).Kc();while(d.Ob()){c=RD(d.Pb(),117);e+=c.d.d+c.b.Mf().b+c.d.a;d.Ob()&&(e+=a.w)}return e}
+function AMb(a,b){var c,d,e;e=0;d=RD(RD(Qc(a.r,b),21),87).Kc();while(d.Ob()){c=RD(d.Pb(),117);e+=c.d.b+c.b.Mf().a+c.d.c;d.Ob()&&(e+=a.w)}return e}
+function O2c(a){var b,c,d,e;d=0;e=Q2c(a);if(e.c.length==0){return 1}else{for(c=new Anb(e);c.a=0?a.Lh(g,c,true):Qvd(a,f,c)):RD(f,69).wk().yk(a,a.hi(),e,c,d)}
+function aNb(a,b,c,d){var e,f;f=b.pf((umd(),ild))?RD(b.of(ild),21):a.j;e=lMb(f);if(e==(hMb(),gMb)){return}if(c&&!jMb(e)){return}LKb(cNb(a,e,d),b)}
+function Y6b(a){switch(a.g){case 1:return mOb(),lOb;case 3:return mOb(),iOb;case 2:return mOb(),kOb;case 4:return mOb(),jOb;default:return null;}}
+function kmc(a,b,c){if(a.e){switch(a.b){case 1:Ulc(a.c,b,c);break;case 0:Vlc(a.c,b,c);}}else{Slc(a.c,b,c)}a.a[b.p][c.p]=a.c.i;a.a[c.p][b.p]=a.c.e}
+function LLc(a){var b,c;if(a==null){return null}c=$C(jR,Nve,199,a.length,0,2);for(b=0;b=0)return e;if(a.ol()){for(d=0;d=e)throw Adb(new aMd(b,e));if(a.Si()){d=a.dd(c);if(d>=0&&d!=b){throw Adb(new agb(LIe))}}return a.Xi(b,c)}
+function wx(a,b){this.a=RD(Qb(a),253);this.b=RD(Qb(b),253);if(a.Ed(b)>0||a==(Wk(),Vk)||b==(kl(),jl)){throw Adb(new agb('Invalid range: '+Dx(a,b)))}}
+function p_b(a){var b,c;this.b=new bnb;this.c=a;this.a=false;for(c=new Anb(a.a);c.a0);if((b&-b)==b){return eE(b*Kwb(a,31)*4.6566128730773926E-10)}do{c=Kwb(a,31);d=c%b}while(c-d+(b-1)<0);return eE(d)}
+function d2b(a,b,c){switch(c.g){case 1:a.a=b.a/2;a.b=0;break;case 2:a.a=b.a;a.b=b.b/2;break;case 3:a.a=b.a/2;a.b=b.b;break;case 4:a.a=0;a.b=b.b/2;}}
+function Onc(a,b,c,d){var e,f;for(e=b;e1&&(f=xIb(a,b));return f}
+function yqd(a){var b;b=Kfb(UD(Gxd(a,(umd(),lmd))))*$wnd.Math.sqrt((!a.a&&(a.a=new C5d(J4,a,10,11)),a.a).i);return new rjd(b,b/Kfb(UD(Gxd(a,kmd))))}
+function Dzd(a){var b;if(!!a.f&&a.f.Vh()){b=RD(a.f,54);a.f=RD(Vvd(a,b),84);a.f!=b&&(a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new N3d(a,9,8,b,a.f))}return a.f}
+function Ezd(a){var b;if(!!a.i&&a.i.Vh()){b=RD(a.i,54);a.i=RD(Vvd(a,b),84);a.i!=b&&(a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new N3d(a,9,7,b,a.i))}return a.i}
+function Z5d(a){var b;if(!!a.b&&(a.b.Db&64)!=0){b=a.b;a.b=RD(Vvd(a,b),19);a.b!=b&&(a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new N3d(a,9,21,b,a.b))}return a.b}
+function UNd(a,b){var c,d,e;if(a.d==null){++a.e;++a.f}else{d=b.Bi();_Nd(a,a.f+1);e=(d&lve)%a.d.length;c=a.d[e];!c&&(c=a.d[e]=a.dk());c.Fc(b);++a.f}}
+function Mge(a,b,c){var d;if(b.tk()){return false}else if(b.Ik()!=-2){d=b.ik();return d==null?c==null:pb(d,c)}else return b.qk()==a.e.Dh()&&c==null}
+function Io(){var a;dk(16,fwe);a=Wp(16);this.b=$C(XF,ewe,302,a,0,1);this.c=$C(XF,ewe,302,a,0,1);this.a=null;this.e=null;this.i=0;this.f=a-1;this.g=0}
+function j3b(a){v2b.call(this);this.k=(r3b(),p3b);this.j=(dk(6,iwe),new cnb(6));this.b=(dk(2,iwe),new cnb(2));this.d=new T2b;this.f=new C3b;this.a=a}
+function wgc(a){var b,c;if(a.c.length<=1){return}b=tgc(a,(qpd(),npd));vgc(a,RD(b.a,17).a,RD(b.b,17).a);c=tgc(a,ppd);vgc(a,RD(c.a,17).a,RD(c.b,17).a)}
+function vHc(a,b,c){var d,e;e=a.a.b;for(d=e.c.length;d102)return -1;if(a<=57)return a-48;if(a<65)return -1;if(a<=70)return a-65+10;if(a<97)return -1;return a-97+10}
+function ck(a,b){if(a==null){throw Adb(new Ogb('null key in entry: null='+b))}else if(b==null){throw Adb(new Ogb('null value in entry: '+a+'=null'))}}
+function Cr(a,b){var c,d;while(a.Ob()){if(!b.Ob()){return false}c=a.Pb();d=b.Pb();if(!(dE(c)===dE(d)||c!=null&&pb(c,d))){return false}}return !b.Ob()}
+function aLb(a,b){var c;c=cD(WC(iE,1),vxe,28,15,[gKb(a.a[0],b),gKb(a.a[1],b),gKb(a.a[2],b)]);if(a.d){c[0]=$wnd.Math.max(c[0],c[2]);c[2]=c[0]}return c}
+function bLb(a,b){var c;c=cD(WC(iE,1),vxe,28,15,[hKb(a.a[0],b),hKb(a.a[1],b),hKb(a.a[2],b)]);if(a.d){c[0]=$wnd.Math.max(c[0],c[2]);c[2]=c[0]}return c}
+function vIc(a,b,c){if(!Cod(RD(mQb(b,(yCc(),BBc)),101))){uIc(a,b,e3b(b,c));uIc(a,b,e3b(b,(qpd(),npd)));uIc(a,b,e3b(b,Yod));yob();_mb(b.j,new JIc(a))}}
+function sUc(a){var b,c;a.c||vUc(a);c=new Ejd;b=new Anb(a.a);ynb(b);while(b.a0&&(BFb(0,b.length),b.charCodeAt(0)==43)?(BFb(1,b.length+1),b.substr(1)):b))}
+function qne(a){var b;return a==null?null:new ejb((b=nue(a,true),b.length>0&&(BFb(0,b.length),b.charCodeAt(0)==43)?(BFb(1,b.length+1),b.substr(1)):b))}
+function Syb(a,b,c,d,e,f,g,h){var i,j;if(!d){return}i=d.a[0];!!i&&Syb(a,b,c,i,e,f,g,h);Tyb(a,c,d.d,e,f,g,h)&&b.Fc(d);j=d.a[1];!!j&&Syb(a,b,c,j,e,f,g,h)}
+function PPb(b,c,d){try{return Gdb(SPb(b,c,d),1)}catch(a){a=zdb(a);if(ZD(a,333)){throw Adb(new veb(fze+b.o+'*'+b.p+gze+c+pve+d+hze))}else throw Adb(a)}}
+function QPb(b,c,d){try{return Gdb(SPb(b,c,d),0)}catch(a){a=zdb(a);if(ZD(a,333)){throw Adb(new veb(fze+b.o+'*'+b.p+gze+c+pve+d+hze))}else throw Adb(a)}}
+function RPb(b,c,d){try{return Gdb(SPb(b,c,d),2)}catch(a){a=zdb(a);if(ZD(a,333)){throw Adb(new veb(fze+b.o+'*'+b.p+gze+c+pve+d+hze))}else throw Adb(a)}}
+function lMd(b,c){if(b.g==-1){throw Adb(new cgb)}b.Xj();try{b.d.hd(b.g,c);b.f=b.d.j}catch(a){a=zdb(a);if(ZD(a,77)){throw Adb(new Jrb)}else throw Adb(a)}}
+function Y7b(a){var b,c,d,e,f;for(d=new Anb(a.b);d.af&&bD(b,f,null);return b}
+function av(a,b){var c,d;d=a.gc();if(b==null){for(c=0;c0&&(i+=e);j[k]=g;g+=h*(i+d)}}
+function vsc(a){var b,c,d;d=a.f;a.n=$C(iE,vxe,28,d,15,1);a.d=$C(iE,vxe,28,d,15,1);for(b=0;b0?a.c:0);++e}a.b=d;a.d=f}
+function rKb(a,b){var c;c=cD(WC(iE,1),vxe,28,15,[qKb(a,(ZJb(),WJb),b),qKb(a,XJb,b),qKb(a,YJb,b)]);if(a.f){c[0]=$wnd.Math.max(c[0],c[2]);c[2]=c[0]}return c}
+function cQb(b,c,d){var e;try{TPb(b,c+b.j,d+b.k,false,true)}catch(a){a=zdb(a);if(ZD(a,77)){e=a;throw Adb(new veb(e.g+ize+c+pve+d+').'))}else throw Adb(a)}}
+function dQb(b,c,d){var e;try{TPb(b,c+b.j,d+b.k,true,false)}catch(a){a=zdb(a);if(ZD(a,77)){e=a;throw Adb(new veb(e.g+ize+c+pve+d+').'))}else throw Adb(a)}}
+function u8b(a){var b;if(!nQb(a,(yCc(),dBc))){return}b=RD(mQb(a,dBc),21);if(b.Hc((dod(),Xnd))){b.Mc(Xnd);b.Fc(Znd)}else if(b.Hc(Znd)){b.Mc(Znd);b.Fc(Xnd)}}
+function v8b(a){var b;if(!nQb(a,(yCc(),dBc))){return}b=RD(mQb(a,dBc),21);if(b.Hc((dod(),cod))){b.Mc(cod);b.Fc(aod)}else if(b.Hc(aod)){b.Mc(aod);b.Fc(cod)}}
+function oqc(a,b,c,d){var e,f,g,h;a.a==null&&rqc(a,b);g=b.b.j.c.length;f=c.d.p;h=d.d.p;e=h-1;e<0&&(e=g-1);return f<=e?a.a[e]-a.a[f]:a.a[g-1]-a.a[f]+a.a[e]}
+function Cud(a){var b,c;if(!a.b){a.b=fv(RD(a.f,27).kh().i);for(c=new dMd(RD(a.f,27).kh());c.e!=c.i.gc();){b=RD(bMd(c),135);Rmb(a.b,new Bud(b))}}return a.b}
+function Dud(a){var b,c;if(!a.e){a.e=fv(wCd(RD(a.f,27)).i);for(c=new dMd(wCd(RD(a.f,27)));c.e!=c.i.gc();){b=RD(bMd(c),123);Rmb(a.e,new Rud(b))}}return a.e}
+function yud(a){var b,c;if(!a.a){a.a=fv(tCd(RD(a.f,27)).i);for(c=new dMd(tCd(RD(a.f,27)));c.e!=c.i.gc();){b=RD(bMd(c),27);Rmb(a.a,new Fud(a,b))}}return a.a}
+function DXd(b){var c;if(!b.C&&(b.D!=null||b.B!=null)){c=EXd(b);if(c){b.hl(c)}else{try{b.hl(null)}catch(a){a=zdb(a);if(!ZD(a,63))throw Adb(a)}}}return b.C}
+function xMb(a){switch(a.q.g){case 5:uMb(a,(qpd(),Yod));uMb(a,npd);break;case 4:vMb(a,(qpd(),Yod));vMb(a,npd);break;default:wMb(a,(qpd(),Yod));wMb(a,npd);}}
+function GNb(a){switch(a.q.g){case 5:DNb(a,(qpd(),Xod));DNb(a,ppd);break;case 4:ENb(a,(qpd(),Xod));ENb(a,ppd);break;default:FNb(a,(qpd(),Xod));FNb(a,ppd);}}
+function G$b(a,b){var c,d,e;e=new pjd;for(d=a.Kc();d.Ob();){c=RD(d.Pb(),36);w$b(c,e.a,0);e.a+=c.f.a+b;e.b=$wnd.Math.max(e.b,c.f.b)}e.b>0&&(e.b+=b);return e}
+function I$b(a,b){var c,d,e;e=new pjd;for(d=a.Kc();d.Ob();){c=RD(d.Pb(),36);w$b(c,0,e.b);e.b+=c.f.b+b;e.a=$wnd.Math.max(e.a,c.f.a)}e.a>0&&(e.a+=b);return e}
+function l2b(a){var b,c,d;d=lve;for(c=new Anb(a.a);c.a>16==6){return a.Cb.Th(a,5,t7,b)}return d=Z5d(RD(vYd((c=RD(Ywd(a,16),29),!c?a.ii():c),a.Db>>16),19)),a.Cb.Th(a,d.n,d.f,b)}
+function kA(a){fA();var b=a.e;if(b&&b.stack){var c=b.stack;var d=b+'\n';c.substring(0,d.length)==d&&(c=c.substring(d.length));return c.split('\n')}return []}
+function pgb(a){var b;b=(wgb(),vgb);return b[a>>>28]|b[a>>24&15]<<4|b[a>>20&15]<<8|b[a>>16&15]<<12|b[a>>12&15]<<16|b[a>>8&15]<<20|b[a>>4&15]<<24|b[a&15]<<28}
+function mmb(a){var b,c,d;if(a.b!=a.c){return}d=a.a.length;c=mgb($wnd.Math.max(8,d))<<1;if(a.b!=0){b=WEb(a.a,c);lmb(a,b,d);a.a=b;a.b=0}else{aFb(a.a,c)}a.c=d}
+function uNb(a,b){var c;c=a.b;return c.pf((umd(),Gld))?c.ag()==(qpd(),ppd)?-c.Mf().a-Kfb(UD(c.of(Gld))):b+Kfb(UD(c.of(Gld))):c.ag()==(qpd(),ppd)?-c.Mf().a:b}
+function X2b(a){var b;if(a.b.c.length!=0&&!!RD(Vmb(a.b,0),72).a){return RD(Vmb(a.b,0),72).a}b=R0b(a);if(b!=null){return b}return ''+(!a.c?-1:Wmb(a.c.a,a,0))}
+function M3b(a){var b;if(a.f.c.length!=0&&!!RD(Vmb(a.f,0),72).a){return RD(Vmb(a.f,0),72).a}b=R0b(a);if(b!=null){return b}return ''+(!a.i?-1:Wmb(a.i.j,a,0))}
+function skc(a,b){var c,d;if(b<0||b>=a.gc()){return null}for(c=b;c0?a.c:0);e=$wnd.Math.max(e,b.d);++d}a.e=f;a.b=e}
+function Qud(a){var b,c;if(!a.b){a.b=fv(RD(a.f,123).kh().i);for(c=new dMd(RD(a.f,123).kh());c.e!=c.i.gc();){b=RD(bMd(c),135);Rmb(a.b,new Bud(b))}}return a.b}
+function aHd(a,b){var c,d,e;if(b.dc()){return jQd(),jQd(),iQd}else{c=new ZLd(a,b.gc());for(e=new dMd(a);e.e!=e.i.gc();){d=bMd(e);b.Hc(d)&&WGd(c,d)}return c}}
+function Axd(a,b,c,d){if(b==0){return d?(!a.o&&(a.o=new DVd((pvd(),mvd),X4,a,0)),a.o):(!a.o&&(a.o=new DVd((pvd(),mvd),X4,a,0)),dOd(a.o))}return Dvd(a,b,c,d)}
+function rBd(a){var b,c;if(a.rb){for(b=0,c=a.rb.i;b>22);e+=d>>22;if(e<0){return false}a.l=c&dxe;a.m=d&dxe;a.h=e&exe;return true}
+function Tyb(a,b,c,d,e,f,g){var h,i;if(b.Te()&&(i=a.a.Ne(c,d),i<0||!e&&i==0)){return false}if(b.Ue()&&(h=a.a.Ne(c,f),h>0||!g&&h==0)){return false}return true}
+function Agc(a,b){sgc();var c;c=a.j.g-b.j.g;if(c!=0){return 0}switch(a.j.g){case 2:return Cgc(b,rgc)-Cgc(a,rgc);case 4:return Cgc(a,qgc)-Cgc(b,qgc);}return 0}
+function uuc(a){switch(a.g){case 0:return nuc;case 1:return ouc;case 2:return puc;case 3:return quc;case 4:return ruc;case 5:return suc;default:return null;}}
+function cBd(a,b,c){var d,e;d=(e=new R5d,YVd(e,b),PAd(e,c),WGd((!a.c&&(a.c=new C5d(u7,a,12,10)),a.c),e),e);$Vd(d,0);bWd(d,1);aWd(d,true);_Vd(d,true);return d}
+function THd(a,b){var c,d;if(b>=a.i)throw Adb(new yNd(b,a.i));++a.j;c=a.g[b];d=a.i-b-1;d>0&&hib(a.g,b+1,a.g,b,d);bD(a.g,--a.i,null);a.Qi(b,c);a.Ni();return c}
+function sWd(a,b){var c,d;if(a.Db>>16==17){return a.Cb.Th(a,21,h7,b)}return d=Z5d(RD(vYd((c=RD(Ywd(a,16),29),!c?a.ii():c),a.Db>>16),19)),a.Cb.Th(a,d.n,d.f,b)}
+function _Fb(a){var b,c,d,e;yob();_mb(a.c,a.a);for(e=new Anb(a.c);e.ac.a.c.length)){throw Adb(new agb('index must be >= 0 and <= layer node count'))}!!a.c&&Ymb(a.c.a,a);a.c=c;!!c&&Qmb(c.a,b,a)}
+function Gac(a,b){var c,d,e;for(d=new is(Mr(W2b(a).a.Kc(),new ir));gs(d);){c=RD(hs(d),18);e=RD(b.Kb(c),10);return new cc(Qb(e.n.b+e.o.b/2))}return wb(),wb(),vb}
+function RQc(a,b){this.c=new Tsb;this.a=a;this.b=b;this.d=RD(mQb(a,(Ywc(),Qwc)),312);dE(mQb(a,(yCc(),eBc)))===dE((Cuc(),Auc))?(this.e=new BRc):(this.e=new uRc)}
+function ftd(a,b){var c,d;d=null;if(a.pf((umd(),amd))){c=RD(a.of(amd),96);c.pf(b)&&(d=c.of(b))}d==null&&!!a.Tf()&&(d=a.Tf().of(b));d==null&&(d=iGd(b));return d}
+function ku(b,c){var d,e;d=b.fd(c);try{e=d.Pb();d.Qb();return e}catch(a){a=zdb(a);if(ZD(a,112)){throw Adb(new veb("Can't remove element "+c))}else throw Adb(a)}}
+function GA(a,b){var c,d,e;d=new uB;e=new vB(d.q.getFullYear()-Owe,d.q.getMonth(),d.q.getDate());c=FA(a,b,e);if(c==0||c0?b:0);++c}return new rjd(d,e)}
+function Czd(a,b){var c,d;if(a.Db>>16==6){return a.Cb.Th(a,6,G4,b)}return d=Z5d(RD(vYd((c=RD(Ywd(a,16),29),!c?(pvd(),hvd):c),a.Db>>16),19)),a.Cb.Th(a,d.n,d.f,b)}
+function cCd(a,b){var c,d;if(a.Db>>16==7){return a.Cb.Th(a,1,H4,b)}return d=Z5d(RD(vYd((c=RD(Ywd(a,16),29),!c?(pvd(),jvd):c),a.Db>>16),19)),a.Cb.Th(a,d.n,d.f,b)}
+function LCd(a,b){var c,d;if(a.Db>>16==9){return a.Cb.Th(a,9,J4,b)}return d=Z5d(RD(vYd((c=RD(Ywd(a,16),29),!c?(pvd(),lvd):c),a.Db>>16),19)),a.Cb.Th(a,d.n,d.f,b)}
+function M1d(a,b){var c,d;if(a.Db>>16==5){return a.Cb.Th(a,9,m7,b)}return d=Z5d(RD(vYd((c=RD(Ywd(a,16),29),!c?(JTd(),tTd):c),a.Db>>16),19)),a.Cb.Th(a,d.n,d.f,b)}
+function qBd(a,b){var c,d;if(a.Db>>16==7){return a.Cb.Th(a,6,t7,b)}return d=Z5d(RD(vYd((c=RD(Ywd(a,16),29),!c?(JTd(),CTd):c),a.Db>>16),19)),a.Cb.Th(a,d.n,d.f,b)}
+function iVd(a,b){var c,d;if(a.Db>>16==3){return a.Cb.Th(a,0,p7,b)}return d=Z5d(RD(vYd((c=RD(Ywd(a,16),29),!c?(JTd(),mTd):c),a.Db>>16),19)),a.Cb.Th(a,d.n,d.f,b)}
+function IEd(){this.a=new BDd;this.g=new Io;this.j=new Io;this.b=new Tsb;this.d=new Io;this.i=new Io;this.k=new Tsb;this.c=new Tsb;this.e=new Tsb;this.f=new Tsb}
+function kQd(a,b,c){var d,e,f;c<0&&(c=0);f=a.i;for(e=c;ewxe){return Oje(a,d)}if(d==a){return true}}}return false}
+function yNb(a){tNb();switch(a.q.g){case 5:vNb(a,(qpd(),Yod));vNb(a,npd);break;case 4:wNb(a,(qpd(),Yod));wNb(a,npd);break;default:xNb(a,(qpd(),Yod));xNb(a,npd);}}
+function CNb(a){tNb();switch(a.q.g){case 5:zNb(a,(qpd(),Xod));zNb(a,ppd);break;case 4:ANb(a,(qpd(),Xod));ANb(a,ppd);break;default:BNb(a,(qpd(),Xod));BNb(a,ppd);}}
+function RTb(a){var b,c;b=RD(mQb(a,(yVb(),mVb)),17);if(b){c=b.a;c==0?pQb(a,(JVb(),IVb),new Owb):pQb(a,(JVb(),IVb),new Pwb(c))}else{pQb(a,(JVb(),IVb),new Pwb(1))}}
+function b2b(a,b){var c;c=a.i;switch(b.g){case 1:return -(a.n.b+a.o.b);case 2:return a.n.a-c.o.a;case 3:return a.n.b-c.o.b;case 4:return -(a.n.a+a.o.a);}return 0}
+function wec(a,b){switch(a.g){case 0:return b==(cxc(),$wc)?sec:tec;case 1:return b==(cxc(),$wc)?sec:rec;case 2:return b==(cxc(),$wc)?rec:tec;default:return rec;}}
+function Fad(a,b){var c,d,e;Ymb(a.a,b);a.e-=b.r+(a.a.c.length==0?0:a.c);e=fFe;for(d=new Anb(a.a);d.a>16==3){return a.Cb.Th(a,12,J4,b)}return d=Z5d(RD(vYd((c=RD(Ywd(a,16),29),!c?(pvd(),gvd):c),a.Db>>16),19)),a.Cb.Th(a,d.n,d.f,b)}
+function sCd(a,b){var c,d;if(a.Db>>16==11){return a.Cb.Th(a,10,J4,b)}return d=Z5d(RD(vYd((c=RD(Ywd(a,16),29),!c?(pvd(),kvd):c),a.Db>>16),19)),a.Cb.Th(a,d.n,d.f,b)}
+function n4d(a,b){var c,d;if(a.Db>>16==10){return a.Cb.Th(a,11,h7,b)}return d=Z5d(RD(vYd((c=RD(Ywd(a,16),29),!c?(JTd(),ATd):c),a.Db>>16),19)),a.Cb.Th(a,d.n,d.f,b)}
+function Q5d(a,b){var c,d;if(a.Db>>16==10){return a.Cb.Th(a,12,s7,b)}return d=Z5d(RD(vYd((c=RD(Ywd(a,16),29),!c?(JTd(),DTd):c),a.Db>>16),19)),a.Cb.Th(a,d.n,d.f,b)}
+function WVd(a){var b;if((a.Bb&1)==0&&!!a.r&&a.r.Vh()){b=RD(a.r,54);a.r=RD(Vvd(a,b),142);a.r!=b&&(a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new N3d(a,9,8,b,a.r))}return a.r}
+function pKb(a,b,c){var d;d=cD(WC(iE,1),vxe,28,15,[sKb(a,(ZJb(),WJb),b,c),sKb(a,XJb,b,c),sKb(a,YJb,b,c)]);if(a.f){d[0]=$wnd.Math.max(d[0],d[2]);d[2]=d[0]}return d}
+function ddc(a,b){var c,d,e;e=kdc(a,b);if(e.c.length==0){return}_mb(e,new Gdc);c=e.c.length;for(d=0;d>19;j=b.h>>19;if(i!=j){return j-i}e=a.h;h=b.h;if(e!=h){return e-h}d=a.m;g=b.m;if(d!=g){return d-g}c=a.l;f=b.l;return c-f}
+function YHb(){YHb=geb;XHb=(iIb(),fIb);WHb=new lGd(Aye,XHb);VHb=(LHb(),KHb);UHb=new lGd(Bye,VHb);THb=(DHb(),CHb);SHb=new lGd(Cye,THb);RHb=new lGd(Dye,(Geb(),true))}
+function Iic(a,b,c){var d,e;d=b*c;if(ZD(a.g,154)){e=$jc(a);if(e.f.d){e.f.a||(a.d.a+=d+Tye)}else{a.d.d-=d+Tye;a.d.a+=d+Tye}}else if(ZD(a.g,10)){a.d.d-=d;a.d.a+=2*d}}
+function _pc(a,b,c){var d,e,f,g,h;e=a[c.g];for(h=new Anb(b.d);h.a0?a.b:0);++c}b.b=d;b.e=e}
+function Fo(a){var b,c,d;d=a.b;if(Xp(a.i,d.length)){c=d.length*2;a.b=$C(XF,ewe,302,c,0,1);a.c=$C(XF,ewe,302,c,0,1);a.f=c-1;a.i=0;for(b=a.a;b;b=b.c){Bo(a,b,b)}++a.g}}
+function VPb(a,b,c,d){var e,f,g,h;for(e=0;eg&&(h=g/d);e>f&&(i=f/e);ijd(a,$wnd.Math.min(h,i));return a}
+function OAd(){qAd();var b,c;try{c=RD(M5d((YSd(),XSd),$He),2113);if(c){return c}}catch(a){a=zdb(a);if(ZD(a,103)){b=a;UId((Hde(),b))}else throw Adb(a)}return new KAd}
+function Qae(){qAd();var b,c;try{c=RD(M5d((YSd(),XSd),AKe),2040);if(c){return c}}catch(a){a=zdb(a);if(ZD(a,103)){b=a;UId((Hde(),b))}else throw Adb(a)}return new Mae}
+function vne(){Zme();var b,c;try{c=RD(M5d((YSd(),XSd),dLe),2122);if(c){return c}}catch(a){a=zdb(a);if(ZD(a,103)){b=a;UId((Hde(),b))}else throw Adb(a)}return new rne}
+function f2d(a,b,c){var d,e;e=a.e;a.e=b;if((a.Db&4)!=0&&(a.Db&1)==0){d=new N3d(a,1,4,e,b);!c?(c=d):c.nj(d)}e!=b&&(b?(c=o2d(a,k2d(a,b),c)):(c=o2d(a,a.a,c)));return c}
+function DB(){uB.call(this);this.e=-1;this.a=false;this.p=qwe;this.k=-1;this.c=-1;this.b=-1;this.g=false;this.f=-1;this.j=-1;this.n=-1;this.i=-1;this.d=-1;this.o=qwe}
+function hHb(a,b){var c,d,e;d=a.b.d.d;a.a||(d+=a.b.d.a);e=b.b.d.d;b.a||(e+=b.b.d.a);c=Qfb(d,e);if(c==0){if(!a.a&&b.a){return -1}else if(!b.a&&a.a){return 1}}return c}
+function XQb(a,b){var c,d,e;d=a.b.b.d;a.a||(d+=a.b.b.a);e=b.b.b.d;b.a||(e+=b.b.b.a);c=Qfb(d,e);if(c==0){if(!a.a&&b.a){return -1}else if(!b.a&&a.a){return 1}}return c}
+function RYb(a,b){var c,d,e;d=a.b.g.d;a.a||(d+=a.b.g.a);e=b.b.g.d;b.a||(e+=b.b.g.a);c=Qfb(d,e);if(c==0){if(!a.a&&b.a){return -1}else if(!b.a&&a.a){return 1}}return c}
+function _Wb(){_Wb=geb;YWb=nfd(pfd(pfd(pfd(new ufd,(sXb(),qXb),(hcc(),Dbc)),qXb,Hbc),rXb,Obc),rXb,rbc);$Wb=pfd(pfd(new ufd,qXb,hbc),qXb,sbc);ZWb=nfd(new ufd,rXb,ubc)}
+function J6b(a){var b,c,d,e,f;b=RD(mQb(a,(Ywc(),cwc)),85);f=a.n;for(d=b.Cc().Kc();d.Ob();){c=RD(d.Pb(),314);e=c.i;e.c+=f.a;e.d+=f.b;c.c?MKb(c):OKb(c)}pQb(a,cwc,null)}
+function Wpc(a,b,c){var d,e;e=a.b;d=e.d;switch(b.g){case 1:return -d.d-c;case 2:return e.o.a+d.c+c;case 3:return e.o.b+d.a+c;case 4:return -d.b-c;default:return -1;}}
+function CNc(a,b,c){var d,e;c.Ug('Interactive node placement',1);a.a=RD(mQb(b,(Ywc(),Qwc)),312);for(e=new Anb(b.b);e.a0){g=(f&lve)%a.d.length;e=WNd(a,g,f,b);if(e){h=e.nd(c);return h}}d=a.ck(f,b,c);a.c.Fc(d);return null}
+function Tee(a,b){var c,d,e,f;switch(Oee(a,b).Kl()){case 3:case 2:{c=mYd(b);for(e=0,f=c.i;e=0;d--){if(lhb(a[d].d,b)||lhb(a[d].d,c)){a.length>=d+1&&a.splice(0,d+1);break}}return a}
+function Fdb(a,b){var c;if(Kdb(a)&&Kdb(b)){c=a/b;if(jxe0){a.b+=2;a.a+=d}}else{a.b+=1;a.a+=$wnd.Math.min(d,e)}}
+function CVc(a){var b;b=RD(mQb(RD(ju(a.b,0),39),(h_c(),T$c)),107);pQb(a,(q$c(),SZc),new rjd(0,0));FVc(new YWc,a,b.b+b.c-Kfb(UD(mQb(a,ZZc))),b.d+b.a-Kfb(UD(mQb(a,_Zc))))}
+function pDd(a,b){var c,d;d=false;if(bE(b)){d=true;oDd(a,new OC(WD(b)))}if(!d){if(ZD(b,242)){d=true;oDd(a,(c=Qeb(RD(b,242)),new hC(c)))}}if(!d){throw Adb(new Aeb(tIe))}}
+function g$d(a,b,c,d){var e,f,g;e=new P3d(a.e,1,10,(g=b.c,ZD(g,90)?RD(g,29):(JTd(),zTd)),(f=c.c,ZD(f,90)?RD(f,29):(JTd(),zTd)),fZd(a,b),false);!d?(d=e):d.nj(e);return d}
+function _2b(a){var b,c;switch(RD(mQb(Y2b(a),(yCc(),QAc)),429).g){case 0:b=a.n;c=a.o;return new rjd(b.a+c.a/2,b.b+c.b/2);case 1:return new sjd(a.n);default:return null;}}
+function Ouc(){Ouc=geb;Luc=new Puc(LAe,0);Kuc=new Puc('LEFTUP',1);Nuc=new Puc('RIGHTUP',2);Juc=new Puc('LEFTDOWN',3);Muc=new Puc('RIGHTDOWN',4);Iuc=new Puc('BALANCED',5)}
+function dKc(a,b,c){var d,e,f;d=Qfb(a.a[b.p],a.a[c.p]);if(d==0){e=RD(mQb(b,(Ywc(),qwc)),15);f=RD(mQb(c,qwc),15);if(e.Hc(c)){return -1}else if(f.Hc(b)){return 1}}return d}
+function k5c(a){switch(a.g){case 1:return new K3c;case 2:return new M3c;case 3:return new I3c;case 0:return null;default:throw Adb(new agb(mFe+(a.f!=null?a.f:''+a.g)));}}
+function gyd(a,b,c){switch(b){case 1:!a.n&&(a.n=new C5d(I4,a,1,7));sLd(a.n);!a.n&&(a.n=new C5d(I4,a,1,7));YGd(a.n,RD(c,16));return;case 2:jyd(a,WD(c));return;}Dxd(a,b,c)}
+function xyd(a,b,c){switch(b){case 3:Ayd(a,Kfb(UD(c)));return;case 4:Cyd(a,Kfb(UD(c)));return;case 5:Dyd(a,Kfb(UD(c)));return;case 6:Eyd(a,Kfb(UD(c)));return;}gyd(a,b,c)}
+function dBd(a,b,c){var d,e,f;f=(d=new R5d,d);e=XVd(f,b,null);!!e&&e.oj();PAd(f,c);WGd((!a.c&&(a.c=new C5d(u7,a,12,10)),a.c),f);$Vd(f,0);bWd(f,1);aWd(f,true);_Vd(f,true)}
+function M5d(a,b){var c,d,e;c=Ktb(a.i,b);if(ZD(c,241)){e=RD(c,241);e.zi()==null&&undefined;return e.wi()}else if(ZD(c,507)){d=RD(c,2037);e=d.b;return e}else{return null}}
+function aj(a,b,c,d){var e,f;Qb(b);Qb(c);f=RD(Fn(a.d,b),17);Ob(!!f,'Row %s not in %s',b,a.e);e=RD(Fn(a.b,c),17);Ob(!!e,'Column %s not in %s',c,a.c);return cj(a,f.a,e.a,d)}
+function ZC(a,b,c,d,e,f,g){var h,i,j,k,l;k=e[f];j=f==g-1;h=j?d:0;l=_C(h,k);d!=10&&cD(WC(a,g-f),b[f],c[f],h,l);if(!j){++f;for(i=0;i1||h==-1){f=RD(i,15);e.Wb(Sje(a,f))}else{e.Wb(Rje(a,RD(i,58)))}}}}
+function ceb(b,c,d,e){beb();var f=_db;$moduleName=c;$moduleBase=d;ydb=e;function g(){for(var a=0;a0){return false}}return true}
+function okc(a){var b,c,d,e,f;for(d=new vkb((new mkb(a.b)).a);d.b;){c=tkb(d);b=RD(c.ld(),10);f=RD(RD(c.md(),42).a,10);e=RD(RD(c.md(),42).b,8);$id(hjd(b.n),$id(ajd(f.n),e))}}
+function Roc(a){switch(RD(mQb(a.b,(yCc(),BAc)),387).g){case 1:FDb(GDb(EDb(new SDb(null,new Swb(a.d,16)),new kpc),new mpc),new opc);break;case 2:Toc(a);break;case 0:Soc(a);}}
+function SVc(a,b,c){var d,e,f;d=c;!d&&(d=new Oqd);d.Ug('Layout',a.a.c.length);for(f=new Anb(a.a);f.aAEe){return c}else e>-1.0E-6&&++c}return c}
+function n2d(a,b){var c;if(b!=a.b){c=null;!!a.b&&(c=Jvd(a.b,a,-4,c));!!b&&(c=Ivd(b,a,-4,c));c=e2d(a,b,c);!!c&&c.oj()}else (a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new N3d(a,1,3,b,b))}
+function q2d(a,b){var c;if(b!=a.f){c=null;!!a.f&&(c=Jvd(a.f,a,-1,c));!!b&&(c=Ivd(b,a,-1,c));c=g2d(a,b,c);!!c&&c.oj()}else (a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new N3d(a,1,0,b,b))}
+function Lge(a,b,c,d){var e,f,g,h;if(Mvd(a.e)){e=b.Lk();h=b.md();f=c.md();g=fge(a,1,e,h,f,e.Jk()?kge(a,e,f,ZD(e,102)&&(RD(e,19).Bb&txe)!=0):-1,true);d?d.nj(g):(d=g)}return d}
+function bne(a){var b,c,d;if(a==null)return null;c=RD(a,15);if(c.dc())return '';d=new Qhb;for(b=c.Kc();b.Ob();){Nhb(d,(nme(),WD(b.Pb())));d.a+=' '}return qeb(d,d.a.length-1)}
+function fne(a){var b,c,d;if(a==null)return null;c=RD(a,15);if(c.dc())return '';d=new Qhb;for(b=c.Kc();b.Ob();){Nhb(d,(nme(),WD(b.Pb())));d.a+=' '}return qeb(d,d.a.length-1)}
+function QIc(a,b,c){var d,e;d=a.c[b.c.p][b.p];e=a.c[c.c.p][c.p];if(d.a!=null&&e.a!=null){return Jfb(d.a,e.a)}else if(d.a!=null){return -1}else if(e.a!=null){return 1}return 0}
+function RVc(a,b,c){c.Ug('Tree layout',1);Sed(a.b);Ved(a.b,(YVc(),UVc),UVc);Ved(a.b,VVc,VVc);Ved(a.b,WVc,WVc);Ved(a.b,XVc,XVc);a.a=Qed(a.b,b);SVc(a,b,c.eh(1));c.Vg();return b}
+function ZDd(a,b){var c,d,e,f,g,h;if(b){f=b.a.length;c=new vue(f);for(h=(c.b-c.a)*c.c<0?(uue(),tue):new Rue(c);h.Ob();){g=RD(h.Pb(),17);e=xDd(b,g.a);d=new aFd(a);$Dd(d.a,e)}}}
+function oEd(a,b){var c,d,e,f,g,h;if(b){f=b.a.length;c=new vue(f);for(h=(c.b-c.a)*c.c<0?(uue(),tue):new Rue(c);h.Ob();){g=RD(h.Pb(),17);e=xDd(b,g.a);d=new LEd(a);NDd(d.a,e)}}}
+function ESd(b){var c;if(b!=null&&b.length>0&&ihb(b,b.length-1)==33){try{c=nSd(zhb(b,0,b.length-1));return c.e==null}catch(a){a=zdb(a);if(!ZD(a,33))throw Adb(a)}}return false}
+function u0b(a,b,c){var d,e,f;d=Y2b(b);e=i2b(d);f=new R3b;P3b(f,b);switch(c.g){case 1:Q3b(f,spd(vpd(e)));break;case 2:Q3b(f,vpd(e));}pQb(f,(yCc(),ABc),UD(mQb(a,ABc)));return f}
+function jdc(a){var b,c;b=RD(hs(new is(Mr(Z2b(a.a).a.Kc(),new ir))),18);c=RD(hs(new is(Mr(a3b(a.a).a.Kc(),new ir))),18);return Heb(TD(mQb(b,(Ywc(),Nwc))))||Heb(TD(mQb(c,Nwc)))}
+function Bnc(){Bnc=geb;xnc=new Cnc('ONE_SIDE',0);znc=new Cnc('TWO_SIDES_CORNER',1);Anc=new Cnc('TWO_SIDES_OPPOSING',2);ync=new Cnc('THREE_SIDES',3);wnc=new Cnc('FOUR_SIDES',4)}
+function Usc(a,b){var c,d,e,f;f=new bnb;e=0;d=b.Kc();while(d.Ob()){c=sgb(RD(d.Pb(),17).a+e);while(c.a=a.f){break}ZEb(f.c,c)}return f}
+function iIc(a,b){var c,d,e,f,g;for(f=new Anb(b.a);f.a0&&Xlc(this,this.c-1,(qpd(),Xod));this.c0&&a[0].length>0&&(this.c=Heb(TD(mQb(Y2b(a[0][0]),(Ywc(),rwc)))));this.a=$C(aY,Nve,2117,a.length,0,2);this.b=$C(dY,Nve,2118,a.length,0,2);this.d=new Ks}
+function TOc(a){if(a.c.length==0){return false}if((tFb(0,a.c.length),RD(a.c[0],18)).c.i.k==(r3b(),o3b)){return true}return yDb(GDb(new SDb(null,new Swb(a,16)),new WOc),new YOc)}
+function I5c(a,b){var c,d,e,f,g,h,i;h=Q2c(b);f=b.f;i=b.g;g=$wnd.Math.sqrt(f*f+i*i);e=0;for(d=new Anb(h);d.a=0){c=Fdb(a,ixe);d=Mdb(a,ixe)}else{b=Udb(a,1);c=Fdb(b,500000000);d=Mdb(b,500000000);d=Bdb(Sdb(d,1),Cdb(a,1))}return Rdb(Sdb(d,32),Cdb(c,yxe))}
+function fTb(a,b,c){var d,e;d=(sFb(b.b!=0),RD(Wub(b,b.a.a),8));switch(c.g){case 0:d.b=0;break;case 2:d.b=a.f;break;case 3:d.a=0;break;default:d.a=a.g;}e=Sub(b,0);cvb(e,d);return b}
+function Vpc(a,b,c,d){var e,f,g,h,i;i=a.b;f=b.d;g=f.j;h=$pc(g,i.d[g.g],c);e=$id(ajd(f.n),f.a);switch(f.j.g){case 1:case 3:h.a+=e.a;break;case 2:case 4:h.b+=e.b;}Pub(d,h,d.c.b,d.c)}
+function YNc(a,b,c){var d,e,f,g;g=Wmb(a.e,b,0);f=new ZNc;f.b=c;d=new Jkb(a.e,g);while(d.b1;b>>=1){(b&1)!=0&&(d=Wib(d,c));c.d==1?(c=Wib(c,c)):(c=new djb(Tjb(c.a,c.d,$C(kE,Pwe,28,c.d<<1,15,1))))}d=Wib(d,c);return d}
+function Hwb(){Hwb=geb;var a,b,c,d;Ewb=$C(iE,vxe,28,25,15,1);Fwb=$C(iE,vxe,28,33,15,1);d=1.52587890625E-5;for(b=32;b>=0;b--){Fwb[b]=d;d*=0.5}c=1;for(a=24;a>=0;a--){Ewb[a]=c;c*=0.5}}
+function a5b(a){var b,c;if(Heb(TD(Gxd(a,(yCc(),NAc))))){for(c=new is(Mr(zGd(a).a.Kc(),new ir));gs(c);){b=RD(hs(c),74);if(ozd(b)){if(Heb(TD(Gxd(b,OAc)))){return true}}}}return false}
+function Qmc(a,b){var c,d,e;if(Ysb(a.f,b)){b.b=a;d=b.c;Wmb(a.j,d,0)!=-1||Rmb(a.j,d);e=b.d;Wmb(a.j,e,0)!=-1||Rmb(a.j,e);c=b.a.b;if(c.c.length!=0){!a.i&&(a.i=new _mc(a));Wmc(a.i,c)}}}
+function Xpc(a){var b,c,d,e,f;c=a.c.d;d=c.j;e=a.d.d;f=e.j;if(d==f){return c.p=0&&lhb(a.substr(b,'GMT'.length),'GMT')){c[0]=b+3;return JA(a,c,d)}if(b>=0&&lhb(a.substr(b,'UTC'.length),'UTC')){c[0]=b+3;return JA(a,c,d)}return JA(a,c,d)}
+function Zmc(a,b){var c,d,e,f,g;f=a.g.a;g=a.g.b;for(d=new Anb(a.d);d.ac;f--){a[f]|=b[f-c-1]>>>g;a[f-1]=b[f-c-1]<0&&hib(a.g,b,a.g,b+d,h);g=c.Kc();a.i+=d;for(e=0;e>4&15;f=a[d]&15;g[e++]=oAd[c];g[e++]=oAd[f]}return Ihb(g,0,g.length)}}
+function Fhb(a){var b,c;if(a>=txe){b=uxe+(a-txe>>10&1023)&Bwe;c=56320+(a-txe&1023)&Bwe;return String.fromCharCode(b)+(''+String.fromCharCode(c))}else{return String.fromCharCode(a&Bwe)}}
+function UMb(a,b){RMb();var c,d,e,f;e=RD(RD(Qc(a.r,b),21),87);if(e.gc()>=2){d=RD(e.Kc().Pb(),117);c=a.u.Hc((Pod(),Kod));f=a.u.Hc(Ood);return !d.a&&!c&&(e.gc()==2||f)}else{return false}}
+function v3c(a,b,c,d,e){var f,g,h;f=w3c(a,b,c,d,e);h=false;while(!f){n3c(a,e,true);h=true;f=w3c(a,b,c,d,e)}h&&n3c(a,e,false);g=N2c(e);if(g.c.length!=0){!!a.d&&a.d.Gg(g);v3c(a,e,c,d,g)}}
+function ind(){ind=geb;gnd=new jnd(LAe,0);end=new jnd('DIRECTED',1);hnd=new jnd('UNDIRECTED',2);cnd=new jnd('ASSOCIATION',3);fnd=new jnd('GENERALIZATION',4);dnd=new jnd('DEPENDENCY',5)}
+function nsd(a,b){var c;if(!MCd(a)){throw Adb(new dgb(sHe))}c=MCd(a);switch(b.g){case 1:return -(a.j+a.f);case 2:return a.i-c.g;case 3:return a.j-c.f;case 4:return -(a.i+a.g);}return 0}
+function Jge(a,b,c){var d,e,f;d=b.Lk();f=b.md();e=d.Jk()?fge(a,4,d,f,null,kge(a,d,f,ZD(d,102)&&(RD(d,19).Bb&txe)!=0),true):fge(a,d.tk()?2:1,d,f,d.ik(),-1,true);c?c.nj(e):(c=e);return c}
+function lwb(a,b){var c,d;uFb(b);d=a.b.c.length;Rmb(a.b,b);while(d>0){c=d;d=(d-1)/2|0;if(a.a.Ne(Vmb(a.b,d),b)<=0){$mb(a.b,c,b);return true}$mb(a.b,c,Vmb(a.b,d))}$mb(a.b,d,b);return true}
+function sKb(a,b,c,d){var e,f;e=0;if(!c){for(f=0;f=h}
+function A8c(a){switch(a.g){case 0:return new o8c;case 1:return new u8c;default:throw Adb(new agb('No implementation is available for the width approximator '+(a.f!=null?a.f:''+a.g)));}}
+function rDd(a,b,c,d){var e;e=false;if(bE(d)){e=true;sDd(b,c,WD(d))}if(!e){if($D(d)){e=true;rDd(a,b,c,d)}}if(!e){if(ZD(d,242)){e=true;qDd(b,c,RD(d,242))}}if(!e){throw Adb(new Aeb(tIe))}}
+function uee(a,b){var c,d,e;c=b.qi(a.a);if(c){e=$Nd((!c.b&&(c.b=new SVd((JTd(),FTd),C8,c)),c.b),rKe);if(e!=null){for(d=1;d<(lke(),hke).length;++d){if(lhb(hke[d],e)){return d}}}}return 0}
+function vee(a,b){var c,d,e;c=b.qi(a.a);if(c){e=$Nd((!c.b&&(c.b=new SVd((JTd(),FTd),C8,c)),c.b),rKe);if(e!=null){for(d=1;d<(lke(),ike).length;++d){if(lhb(ike[d],e)){return d}}}}return 0}
+function Ve(a,b){var c,d,e,f;uFb(b);f=a.a.gc();if(f0?1:0;while(f.a[e]!=c){f=f.a[e];e=a.a.Ne(c.d,f.d)>0?1:0}f.a[e]=d;d.b=c.b;d.a[0]=c.a[0];d.a[1]=c.a[1];c.a[0]=null;c.a[1]=null}
+function zIb(a){var b,c,d,e;b=new bnb;c=$C(xdb,Hye,28,a.a.c.length,16,1);Snb(c,c.length);for(e=new Anb(a.a);e.a0&&O9b((tFb(0,c.c.length),RD(c.c[0],30)),a);c.c.length>1&&O9b(RD(Vmb(c,c.c.length-1),30),a);b.Vg()}
+function Sod(a){Pod();var b,c;b=ysb(Lod,cD(WC(D3,1),jwe,279,0,[Nod]));if(dy(Tx(b,a))>1){return false}c=ysb(Kod,cD(WC(D3,1),jwe,279,0,[Jod,Ood]));if(dy(Tx(c,a))>1){return false}return true}
+function FBd(a,b){var c;c=Xjb((YSd(),XSd),a);ZD(c,507)?$jb(XSd,a,new B5d(this,b)):$jb(XSd,a,this);BBd(this,b);if(b==(jTd(),iTd)){this.wb=RD(this,2038);RD(b,2040)}else{this.wb=(lTd(),kTd)}}
+function Lae(b){var c,d,e;if(b==null){return null}c=null;for(d=0;d=Awe?'error':d>=900?'warn':d>=800?'info':'log');eFb(c,a.a);!!a.b&&fFb(b,c,a.b,'Exception: ',true)}
+function mQb(a,b){var c,d;d=(!a.q&&(a.q=new Tsb),Wjb(a.q,b));if(d!=null){return d}c=b.Sg();ZD(c,4)&&(c==null?(!a.q&&(a.q=new Tsb),_jb(a.q,b)):(!a.q&&(a.q=new Tsb),Zjb(a.q,b,c)),a);return c}
+function sXb(){sXb=geb;nXb=new tXb('P1_CYCLE_BREAKING',0);oXb=new tXb('P2_LAYERING',1);pXb=new tXb('P3_NODE_ORDERING',2);qXb=new tXb('P4_NODE_PLACEMENT',3);rXb=new tXb('P5_EDGE_ROUTING',4)}
+function KZb(a,b){CZb();var c;if(a.c==b.c){if(a.b==b.b||rZb(a.b,b.b)){c=oZb(a.b)?1:-1;if(a.a&&!b.a){return c}else if(!a.a&&b.a){return -c}}return hgb(a.b.g,b.b.g)}else{return Qfb(a.c,b.c)}}
+function E3c(a,b){var c,d,e;if(p3c(a,b)){return true}for(d=new Anb(b);d.a=e||b<0)throw Adb(new veb(MIe+b+NIe+e));if(c>=e||c<0)throw Adb(new veb(OIe+c+NIe+e));b!=c?(d=(f=a.Cj(c),a.qj(b,f),f)):(d=a.xj(c));return d}
+function Lje(a){var b,c,d;d=a;if(a){b=0;for(c=a.Eh();c;c=c.Eh()){if(++b>wxe){return Lje(c)}d=c;if(c==a){throw Adb(new dgb('There is a cycle in the containment hierarchy of '+a))}}}return d}
+function Fe(a){var b,c,d;d=new Jyb(pve,'[',']');for(c=a.Kc();c.Ob();){b=c.Pb();Gyb(d,dE(b)===dE(a)?'(this Collection)':b==null?vve:jeb(b))}return !d.a?d.c:d.e.length==0?d.a.a:d.a.a+(''+d.e)}
+function p3c(a,b){var c,d;d=false;if(b.gc()<2){return false}for(c=0;c1&&(a.j.b+=a.e)}else{a.j.a+=c.a;a.j.b=$wnd.Math.max(a.j.b,c.b);a.d.c.length>1&&(a.j.a+=a.e)}}
+function Mnc(){Mnc=geb;Jnc=cD(WC(E3,1),NAe,64,0,[(qpd(),Yod),Xod,npd]);Inc=cD(WC(E3,1),NAe,64,0,[Xod,npd,ppd]);Knc=cD(WC(E3,1),NAe,64,0,[npd,ppd,Yod]);Lnc=cD(WC(E3,1),NAe,64,0,[ppd,Yod,Xod])}
+function Upc(a,b,c,d){var e,f,g,h,i,j,k;g=a.c.d;h=a.d.d;if(g.j==h.j){return}k=a.b;e=g.j;i=null;while(e!=h.j){i=b==0?tpd(e):rpd(e);f=$pc(e,k.d[e.g],c);j=$pc(i,k.d[i.g],c);Mub(d,$id(f,j));e=i}}
+function OJc(a,b,c,d){var e,f,g,h,i;g=hMc(a.a,b,c);h=RD(g.a,17).a;f=RD(g.b,17).a;if(d){i=RD(mQb(b,(Ywc(),Iwc)),10);e=RD(mQb(c,Iwc),10);if(!!i&&!!e){Slc(a.b,i,e);h+=a.b.i;f+=a.b.e}}return h>f}
+function OLc(a){var b,c,d,e,f,g,h,i,j;this.a=LLc(a);this.b=new bnb;for(c=a,d=0,e=c.length;damc(a.d).c){a.i+=a.g.c;cmc(a.d)}else if(amc(a.d).c>amc(a.g).c){a.e+=a.d.c;cmc(a.g)}else{a.i+=_lc(a.g);a.e+=_lc(a.d);cmc(a.g);cmc(a.d)}}}
+function vTc(a,b,c){var d,e,f,g;f=b.q;g=b.r;new bTc((fTc(),dTc),b,f,1);new bTc(dTc,f,g,1);for(e=new Anb(c);e.ah&&(i=h/d);e>f&&(j=f/e);g=$wnd.Math.min(i,j);a.a+=g*(b.a-a.a);a.b+=g*(b.b-a.b)}
+function I8c(a,b,c,d,e){var f,g;g=false;f=RD(Vmb(c.b,0),27);while(V8c(a,b,f,d,e)){g=true;T9c(c,f);if(c.b.c.length==0){break}f=RD(Vmb(c.b,0),27)}c.b.c.length==0&&Fad(c.j,c);g&&gad(b.q);return g}
+function Eid(a,b){tid();var c,d,e,f;if(b.b<2){return false}f=Sub(b,0);c=RD(evb(f),8);d=c;while(f.b!=f.d.c){e=RD(evb(f),8);if(Did(a,d,e)){return true}d=e}if(Did(a,d,c)){return true}return false}
+function Bxd(a,b,c,d){var e,f;if(c==0){return !a.o&&(a.o=new DVd((pvd(),mvd),X4,a,0)),BVd(a.o,b,d)}return f=RD(vYd((e=RD(Ywd(a,16),29),!e?a.ii():e),c),69),f.wk().Ak(a,Wwd(a),c-AYd(a.ii()),b,d)}
+function BBd(a,b){var c;if(b!=a.sb){c=null;!!a.sb&&(c=RD(a.sb,54).Th(a,1,n7,c));!!b&&(c=RD(b,54).Rh(a,1,n7,c));c=hBd(a,b,c);!!c&&c.oj()}else (a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new N3d(a,1,4,b,b))}
+function YDd(a,b){var c,d,e,f;if(b){e=vDd(b,'x');c=new ZEd(a);Hzd(c.a,(uFb(e),e));f=vDd(b,'y');d=new $Ed(a);Izd(d.a,(uFb(f),f))}else{throw Adb(new CDd('All edge sections need an end point.'))}}
+function WDd(a,b){var c,d,e,f;if(b){e=vDd(b,'x');c=new WEd(a);Ozd(c.a,(uFb(e),e));f=vDd(b,'y');d=new XEd(a);Pzd(d.a,(uFb(f),f))}else{throw Adb(new CDd('All edge sections need a start point.'))}}
+function hBb(a,b){var c,d,e,f,g,h,i;for(d=kBb(a),f=0,h=d.length;f>22-b;e=a.h<>22-b}else if(b<44){c=0;d=a.l<>44-b}else{c=0;d=0;e=a.l<a){throw Adb(new agb('k must be smaller than n'))}else return b==0||b==a?1:a==0?0:Bid(a)/(Bid(b)*Bid(a-b))}
+function msd(a,b){var c,d,e,f;c=new zId(a);while(c.g==null&&!c.c?sId(c):c.g==null||c.i!=0&&RD(c.g[c.i-1],51).Ob()){f=RD(tId(c),58);if(ZD(f,167)){d=RD(f,167);for(e=0;e>4];b[c*2+1]=Fqe[f&15]}return Ihb(b,0,b.length)}
+function sn(a){fn();var b,c,d;d=a.c.length;switch(d){case 0:return en;case 1:b=RD(Ir(new Anb(a)),44);return xn(b.ld(),b.md());default:c=RD(anb(a,$C(UK,Zve,44,a.c.length,0,1)),173);return new Mx(c);}}
+function KWb(a){var b,c,d,e,f,g;b=new wmb;c=new wmb;hmb(b,a);hmb(c,a);while(c.b!=c.c){e=RD(smb(c),36);for(g=new Anb(e.a);g.a0&&uLc(a,c,b);return e}return rLc(a,b,c)}
+function $4c(){$4c=geb;R4c=(umd(),Qld);Y4c=fmd;K4c=kld;L4c=nld;M4c=pld;J4c=ild;N4c=sld;Q4c=Lld;H4c=(D4c(),o4c);I4c=p4c;T4c=v4c;W4c=y4c;U4c=w4c;V4c=x4c;O4c=r4c;P4c=t4c;S4c=u4c;X4c=z4c;Z4c=B4c;G4c=n4c}
+function P9c(a,b){var c,d,e,f,g;if(a.e<=b){return a.g}if(R9c(a,a.g,b)){return a.g}f=a.r;d=a.g;g=a.r;e=(f-d)/2+d;while(d+11&&(a.e.b+=a.a)}else{a.e.a+=c.a;a.e.b=$wnd.Math.max(a.e.b,c.b);a.d.c.length>1&&(a.e.a+=a.a)}}
+function Ipc(a){var b,c,d,e;e=a.i;b=e.b;d=e.j;c=e.g;switch(e.a.g){case 0:c.a=(a.g.b.o.a-d.a)/2;break;case 1:c.a=b.d.n.a+b.d.a.a;break;case 2:c.a=b.d.n.a+b.d.a.a-d.a;break;case 3:c.b=b.d.n.b+b.d.a.b;}}
+function oOc(a,b,c){var d,e,f;for(e=new is(Mr(W2b(c).a.Kc(),new ir));gs(e);){d=RD(hs(e),18);if(!(!W0b(d)&&!(!W0b(d)&&d.c.i.c==d.d.i.c))){continue}f=gOc(a,d,c,new VOc);f.c.length>1&&(ZEb(b.c,f),true)}}
+function _id(a,b,c,d,e){if(dd&&(a.a=d);a.be&&(a.b=e);return a}
+function LFd(a){if(ZD(a,143)){return EFd(RD(a,143))}else if(ZD(a,233)){return FFd(RD(a,233))}else if(ZD(a,23)){return GFd(RD(a,23))}else{throw Adb(new agb(wIe+Fe(new mob(cD(WC(jJ,1),rve,1,5,[a])))))}}
+function ujb(a,b,c,d,e){var f,g,h;f=true;for(g=0;g>>e|c[g+d+1]<>>e;++g}return f}
+function ZQc(a,b,c,d){var e,f,g;if(b.k==(r3b(),o3b)){for(f=new is(Mr(Z2b(b).a.Kc(),new ir));gs(f);){e=RD(hs(f),18);g=e.c.i.k;if(g==o3b&&a.c.a[e.c.i.c.p]==d&&a.c.a[b.c.p]==c){return true}}}return false}
+function CD(a,b){var c,d,e,f;b&=63;c=a.h&exe;if(b<22){f=c>>>b;e=a.m>>b|c<<22-b;d=a.l>>b|a.m<<22-b}else if(b<44){f=0;e=c>>>b-22;d=a.m>>b-22|a.h<<44-b}else{f=0;e=0;d=c>>>b-44}return hD(d&dxe,e&dxe,f&exe)}
+function mmc(a,b,c,d){var e;this.b=d;this.e=a==(RKc(),PKc);e=b[c];this.d=YC(xdb,[Nve,Hye],[183,28],16,[e.length,e.length],2);this.a=YC(kE,[Nve,Pwe],[53,28],15,[e.length,e.length],2);this.c=new Ylc(b,c)}
+function Rmc(a){var b,c,d;a.k=new Si((qpd(),cD(WC(E3,1),NAe,64,0,[opd,Yod,Xod,npd,ppd])).length,a.j.c.length);for(d=new Anb(a.j);d.a=c){_cc(a,b,d.p);return true}}return false}
+function EA(a,b,c,d){var e,f,g,h,i,j;g=c.length;f=0;e=-1;j=Bhb((BFb(b,a.length+1),a.substr(b)),(wvb(),uvb));for(h=0;hf&&whb(j,Bhb(c[h],uvb))){e=h;f=i}}e>=0&&(d[0]=b+f);return e}
+function gCd(a){var b;if((a.Db&64)!=0)return Fyd(a);b=new dib(FHe);!a.a||Zhb(Zhb((b.a+=' "',b),a.a),'"');Zhb(Uhb(Zhb(Uhb(Zhb(Uhb(Zhb(Uhb((b.a+=' (',b),a.i),','),a.j),' | '),a.g),','),a.f),')');return b.a}
+function xge(a,b,c){var d,e,f,g,h;h=pke(a.e.Dh(),b);e=RD(a.g,124);d=0;for(g=0;gc){return Jb(a,c,'start index')}if(b<0||b>c){return Jb(b,c,'end index')}return hc('end index (%s) must not be less than start index (%s)',cD(WC(jJ,1),rve,1,5,[sgb(b),sgb(a)]))}
+function dA(b,c){var d,e,f,g;for(e=0,f=b.length;e0&&aGc(a,f,c))}}b.p=0}
+function Ahd(a){var b;this.c=new Yub;this.f=a.e;this.e=a.d;this.i=a.g;this.d=a.c;this.b=a.b;this.k=a.j;this.a=a.a;!a.i?(this.j=(b=RD(mfb(d3),9),new Fsb(b,RD(WEb(b,b.length),9),0))):(this.j=a.i);this.g=a.f}
+function Wb(a){var b,c,d,e;b=Thb(Zhb(new dib('Predicates.'),'and'),40);c=true;for(e=new Dkb(a);e.b0?h[g-1]:$C(jR,WAe,10,0,0,1);e=h[g];j=g=0?a.ki(e):Tvd(a,d)}else{throw Adb(new agb(KHe+d.xe()+LHe))}}else{Cvd(a,c,d)}}
+function ADd(a){var b,c;c=null;b=false;if(ZD(a,211)){b=true;c=RD(a,211).a}if(!b){if(ZD(a,263)){b=true;c=''+RD(a,263).a}}if(!b){if(ZD(a,492)){b=true;c=''+RD(a,492).a}}if(!b){throw Adb(new Aeb(tIe))}return c}
+function gge(a,b,c){var d,e,f,g,h,i;i=pke(a.e.Dh(),b);d=0;h=a.i;e=RD(a.g,124);for(g=0;g=a.d.b.c.length){b=new R4b(a.d);b.p=d.p-1;Rmb(a.d.b,b);c=new R4b(a.d);c.p=d.p;Rmb(a.d.b,c)}g3b(d,RD(Vmb(a.d.b,d.p),30))}}
+function DVc(a,b,c){var d,e,f;if(!a.b[b.g]){a.b[b.g]=true;d=c;!d&&(d=new YWc);Mub(d.b,b);for(f=a.a[b.g].Kc();f.Ob();){e=RD(f.Pb(),65);e.b!=b&&DVc(a,e.b,d);e.c!=b&&DVc(a,e.c,d);Mub(d.a,e)}return d}return null}
+function iMb(a){switch(a.g){case 0:case 1:case 2:return qpd(),Yod;case 3:case 4:case 5:return qpd(),npd;case 6:case 7:case 8:return qpd(),ppd;case 9:case 10:case 11:return qpd(),Xod;default:return qpd(),opd;}}
+function SOc(a,b){var c;if(a.c.length==0){return false}c=zDc((tFb(0,a.c.length),RD(a.c[0],18)).c.i);dOc();if(c==(wDc(),tDc)||c==sDc){return true}return yDb(GDb(new SDb(null,new Swb(a,16)),new $Oc),new aPc(b))}
+function KDd(a,b){if(ZD(b,207)){return EDd(a,RD(b,27))}else if(ZD(b,193)){return FDd(a,RD(b,123))}else if(ZD(b,451)){return DDd(a,RD(b,166))}else{throw Adb(new agb(wIe+Fe(new mob(cD(WC(jJ,1),rve,1,5,[b])))))}}
+function Ou(a,b,c){var d,e;this.f=a;d=RD(Wjb(a.b,b),260);e=!d?0:d.a;Sb(c,e);if(c>=(e/2|0)){this.e=!d?null:d.c;this.d=e;while(c++0){Lu(this)}}this.b=b;this.a=null}
+function iHb(a,b){var c,d;b.a?jHb(a,b):(c=RD(vAb(a.b,b.b),60),!!c&&c==a.a[b.b.f]&&!!c.a&&c.a!=b.b.a&&c.c.Fc(b.b),d=RD(uAb(a.b,b.b),60),!!d&&a.a[d.f]==b.b&&!!d.a&&d.a!=b.b.a&&b.b.c.Fc(d),wAb(a.b,b.b),undefined)}
+function wMb(a,b){var c,d;c=RD(Vrb(a.b,b),127);if(RD(RD(Qc(a.r,b),21),87).dc()){c.n.b=0;c.n.c=0;return}c.n.b=a.C.b;c.n.c=a.C.c;a.A.Hc((Qpd(),Ppd))&&BMb(a,b);d=AMb(a,b);BLb(a,b)==(pod(),mod)&&(d+=2*a.w);c.a.a=d}
+function FNb(a,b){var c,d;c=RD(Vrb(a.b,b),127);if(RD(RD(Qc(a.r,b),21),87).dc()){c.n.d=0;c.n.a=0;return}c.n.d=a.C.d;c.n.a=a.C.a;a.A.Hc((Qpd(),Ppd))&&JNb(a,b);d=INb(a,b);BLb(a,b)==(pod(),mod)&&(d+=2*a.w);c.a.b=d}
+function VQb(a,b){var c,d,e,f;f=new bnb;for(d=new Anb(b);d.ad&&(BFb(b-1,a.length),a.charCodeAt(b-1)<=32)){--b}return d>0||bc.a&&(d.Hc((ukd(),okd))?(e=(b.a-c.a)/2):d.Hc(qkd)&&(e=b.a-c.a));b.b>c.b&&(d.Hc((ukd(),skd))?(f=(b.b-c.b)/2):d.Hc(rkd)&&(f=b.b-c.b));Isd(a,e,f)}
+function ABd(a,b,c,d,e,f,g,h,i,j,k,l,m){ZD(a.Cb,90)&&v$d(yYd(RD(a.Cb,90)),4);PAd(a,c);a.f=g;DWd(a,h);FWd(a,i);xWd(a,j);EWd(a,k);aWd(a,l);AWd(a,m);_Vd(a,true);$Vd(a,e);a.Zk(f);YVd(a,b);d!=null&&(a.i=null,zWd(a,d))}
+function Jb(a,b,c){if(a<0){return hc(qve,cD(WC(jJ,1),rve,1,5,[c,sgb(a)]))}else if(b<0){throw Adb(new agb(sve+b))}else{return hc('%s (%s) must not be greater than size (%s)',cD(WC(jJ,1),rve,1,5,[c,sgb(a),sgb(b)]))}}
+function Xnb(a,b,c,d,e,f){var g,h,i,j;g=d-c;if(g<7){Unb(b,c,d,f);return}i=c+e;h=d+e;j=i+(h-i>>1);Xnb(b,a,i,j,-e,f);Xnb(b,a,j,h,-e,f);if(f.Ne(a[j-1],a[j])<=0){while(c=0?a.bi(f,c):Svd(a,e,c)}else{throw Adb(new agb(KHe+e.xe()+LHe))}}else{Bvd(a,d,e,c)}}
+function n3d(a){var b,c;if(a.f){while(a.n>0){b=RD(a.k.Xb(a.n-1),76);c=b.Lk();if(ZD(c,102)&&(RD(c,19).Bb&QHe)!=0&&(!a.e||c.pk()!=C4||c.Lj()!=0)&&b.md()!=null){return true}else{--a.n}}return false}else{return a.n>0}}
+function Pje(b){var c,d,e,f;d=RD(b,54)._h();if(d){try{e=null;c=N5d((YSd(),XSd),jSd(kSd(d)));if(c){f=c.ai();!!f&&(e=f.Fl(Chb(d.e)))}if(!!e&&e!=b){return Pje(e)}}catch(a){a=zdb(a);if(!ZD(a,63))throw Adb(a)}}return b}
+function P3c(a,b,c){var d,e,f;c.Ug('Remove overlaps',1);c.dh(b,eFe);d=RD(Gxd(b,(u2c(),t2c)),27);a.f=d;a.a=u5c(RD(Gxd(b,($4c(),X4c)),299));e=UD(Gxd(b,(umd(),fmd)));s3c(a,(uFb(e),e));f=Q2c(d);O3c(a,b,f,c);c.dh(b,gFe)}
+function Ded(a){var b,c,d;if(Heb(TD(Gxd(a,(umd(),$kd))))){d=new bnb;for(c=new is(Mr(zGd(a).a.Kc(),new ir));gs(c);){b=RD(hs(c),74);ozd(b)&&Heb(TD(Gxd(b,_kd)))&&(ZEb(d.c,b),true)}return d}else{return yob(),yob(),vob}}
+function KC(a){if(!a){return cC(),bC}var b=a.valueOf?a.valueOf():a;if(b!==a){var c=GC[typeof b];return c?c(b):NC(typeof b)}else if(a instanceof Array||a instanceof $wnd.Array){return new NB(a)}else{return new vC(a)}}
+function IMb(a,b,c){var d,e,f;f=a.o;d=RD(Vrb(a.p,c),252);e=d.i;e.b=ZKb(d);e.a=YKb(d);e.b=$wnd.Math.max(e.b,f.a);e.b>f.a&&!b&&(e.b=f.a);e.c=-(e.b-f.a)/2;switch(c.g){case 1:e.d=-e.a;break;case 3:e.d=f.b;}$Kb(d);_Kb(d)}
+function JMb(a,b,c){var d,e,f;f=a.o;d=RD(Vrb(a.p,c),252);e=d.i;e.b=ZKb(d);e.a=YKb(d);e.a=$wnd.Math.max(e.a,f.b);e.a>f.b&&!b&&(e.a=f.b);e.d=-(e.a-f.b)/2;switch(c.g){case 4:e.c=-e.b;break;case 2:e.c=f.a;}$Kb(d);_Kb(d)}
+function nkc(a,b){var c,d,e,f,g;if(b.dc()){return}e=RD(b.Xb(0),131);if(b.gc()==1){mkc(a,e,e,1,0,b);return}c=1;while(c0){try{f=Oeb(c,qwe,lve)}catch(a){a=zdb(a);if(ZD(a,130)){e=a;throw Adb(new RSd(e))}else throw Adb(a)}}d=(!b.a&&(b.a=new Zde(b)),b.a);return f=0?RD(QHd(d,f),58):null}
+function Ib(a,b){if(a<0){return hc(qve,cD(WC(jJ,1),rve,1,5,['index',sgb(a)]))}else if(b<0){throw Adb(new agb(sve+b))}else{return hc('%s (%s) must be less than size (%s)',cD(WC(jJ,1),rve,1,5,['index',sgb(a),sgb(b)]))}}
+function cob(a){var b,c,d,e,f;if(a==null){return vve}f=new Jyb(pve,'[',']');for(c=a,d=0,e=c.length;d';throw Adb(new agb(d.a))}
+function cB(a){var b,c;c=-a.a;b=cD(WC(hE,1),zwe,28,15,[43,48,48,48,48]);if(c<0){b[0]=45;c=-c}b[1]=b[1]+((c/60|0)/10|0)&Bwe;b[2]=b[2]+(c/60|0)%10&Bwe;b[3]=b[3]+(c%60/10|0)&Bwe;b[4]=b[4]+c%10&Bwe;return Ihb(b,0,b.length)}
+function V2b(a){var b,c,d,e;a.g=new Zrb(RD(Qb(E3),296));d=0;c=(qpd(),Yod);b=0;for(;b=0?a.Lh(c,true,true):Qvd(a,e,true),160));RD(d,220).Zl(b)}else{throw Adb(new agb(KHe+b.xe()+LHe))}}
+function Cib(a){var b,c;if(a>-140737488355328&&a<140737488355328){if(a==0){return 0}b=a<0;b&&(a=-a);c=eE($wnd.Math.floor($wnd.Math.log(a)/0.6931471805599453));(!b||a!=$wnd.Math.pow(2,c))&&++c;return c}return Dib(Hdb(a))}
+function oTc(a){var b,c,d,e,f,g,h;f=new Iub;for(c=new Anb(a);c.a2&&h.e.b+h.j.b<=2){e=h;d=g}f.a.zc(e,f);e.q=d}return f}
+function B5c(a,b,c){c.Ug('Eades radial',1);c.dh(b,gFe);a.d=RD(Gxd(b,(u2c(),t2c)),27);a.c=Kfb(UD(Gxd(b,($4c(),S4c))));a.e=u5c(RD(Gxd(b,X4c),299));a.a=Z3c(RD(Gxd(b,Z4c),434));a.b=k5c(RD(Gxd(b,O4c),354));C5c(a);c.dh(b,gFe)}
+function t8c(a,b){b.Ug('Target Width Setter',1);if(Hxd(a,(X7c(),W7c))){Ixd(a,(X6c(),W6c),UD(Gxd(a,W7c)))}else{throw Adb(new Jed('A target width has to be set if the TargetWidthWidthApproximator should be used.'))}b.Vg()}
+function _8b(a,b){var c,d,e;d=new j3b(a);kQb(d,b);pQb(d,(Ywc(),gwc),b);pQb(d,(yCc(),BBc),(Bod(),wod));pQb(d,Rzc,(Rjd(),Njd));h3b(d,(r3b(),m3b));c=new R3b;P3b(c,d);Q3b(c,(qpd(),ppd));e=new R3b;P3b(e,d);Q3b(e,Xod);return d}
+function ttc(a){switch(a.g){case 0:return new FKc((RKc(),OKc));case 1:return new aKc;case 2:return new FLc;default:throw Adb(new agb('No implementation is available for the crossing minimizer '+(a.f!=null?a.f:''+a.g)));}}
+function THc(a,b){var c,d,e,f,g;a.c[b.p]=true;Rmb(a.a,b);for(g=new Anb(b.j);g.a=f){g.$b()}else{e=g.Kc();for(d=0;d0?Hh():g<0&&Rw(a,b,-g);return true}else{return false}}
+function YKb(a){var b,c,d,e,f,g,h;h=0;if(a.b==0){g=aLb(a,true);b=0;for(d=g,e=0,f=d.length;e0){h+=c;++b}}b>1&&(h+=a.c*(b-1))}else{h=Vvb(SCb(HDb(CDb(_nb(a.a),new oLb),new qLb)))}return h>0?h+a.n.d+a.n.a:0}
+function ZKb(a){var b,c,d,e,f,g,h;h=0;if(a.b==0){h=Vvb(SCb(HDb(CDb(_nb(a.a),new kLb),new mLb)))}else{g=bLb(a,true);b=0;for(d=g,e=0,f=d.length;e0){h+=c;++b}}b>1&&(h+=a.c*(b-1))}return h>0?h+a.n.b+a.n.c:0}
+function UOc(a){var b,c;if(a.c.length!=2){throw Adb(new dgb('Order only allowed for two paths.'))}b=(tFb(0,a.c.length),RD(a.c[0],18));c=(tFb(1,a.c.length),RD(a.c[1],18));if(b.d.i!=c.c.i){a.c.length=0;ZEb(a.c,c);ZEb(a.c,b)}}
+function O8c(a,b,c){var d;zyd(c,b.g,b.f);Byd(c,b.i,b.j);for(d=0;d<(!b.a&&(b.a=new C5d(J4,b,10,11)),b.a).i;d++){O8c(a,RD(QHd((!b.a&&(b.a=new C5d(J4,b,10,11)),b.a),d),27),RD(QHd((!c.a&&(c.a=new C5d(J4,c,10,11)),c.a),d),27))}}
+function DMb(a,b){var c,d,e,f;f=RD(Vrb(a.b,b),127);c=f.a;for(e=RD(RD(Qc(a.r,b),21),87).Kc();e.Ob();){d=RD(e.Pb(),117);!!d.c&&(c.a=$wnd.Math.max(c.a,QKb(d.c)))}if(c.a>0){switch(b.g){case 2:f.n.c=a.s;break;case 4:f.n.b=a.s;}}}
+function ETb(a,b){var c,d,e;c=RD(mQb(b,(yVb(),lVb)),17).a-RD(mQb(a,lVb),17).a;if(c==0){d=ojd(ajd(RD(mQb(a,(JVb(),FVb)),8)),RD(mQb(a,GVb),8));e=ojd(ajd(RD(mQb(b,FVb),8)),RD(mQb(b,GVb),8));return Qfb(d.a*d.b,e.a*e.b)}return c}
+function JVc(a,b){var c,d,e;c=RD(mQb(b,(h_c(),X$c)),17).a-RD(mQb(a,X$c),17).a;if(c==0){d=ojd(ajd(RD(mQb(a,(q$c(),RZc)),8)),RD(mQb(a,SZc),8));e=ojd(ajd(RD(mQb(b,RZc),8)),RD(mQb(b,SZc),8));return Qfb(d.a*d.b,e.a*e.b)}return c}
+function _0b(a){var b,c;c=new bib;c.a+='e_';b=S0b(a);b!=null&&(c.a+=''+b,c);if(!!a.c&&!!a.d){Zhb((c.a+=' ',c),M3b(a.c));Zhb(Yhb((c.a+='[',c),a.c.i),']');Zhb((c.a+=SAe,c),M3b(a.d));Zhb(Yhb((c.a+='[',c),a.d.i),']')}return c.a}
+function ZVc(a){switch(a.g){case 0:return new N_c;case 1:return new V_c;case 2:return new x0c;case 3:return new J0c;default:throw Adb(new agb('No implementation is available for the layout phase '+(a.f!=null?a.f:''+a.g)));}}
+function qsd(a,b,c,d,e){var f;f=0;switch(e.g){case 1:f=$wnd.Math.max(0,b.b+a.b-(c.b+d));break;case 3:f=$wnd.Math.max(0,-a.b-d);break;case 2:f=$wnd.Math.max(0,-a.a-d);break;case 4:f=$wnd.Math.max(0,b.a+a.a-(c.a+d));}return f}
+function MDd(a,b,c){var d,e,f,g,h;if(c){e=c.a.length;d=new vue(e);for(h=(d.b-d.a)*d.c<0?(uue(),tue):new Rue(d);h.Ob();){g=RD(h.Pb(),17);f=xDd(c,g.a);kIe in f.a||lIe in f.a?yEd(a,f,b):EEd(a,f,b);OGd(RD(Wjb(a.b,uDd(f)),74))}}}
+function jXd(a){var b,c;switch(a.b){case -1:{return true}case 0:{c=a.t;if(c>1||c==-1){a.b=-1;return true}else{b=WVd(a);if(!!b&&(nke(),b.lk()==aKe)){a.b=-1;return true}else{a.b=1;return false}}}default:case 1:{return false}}}
+function Sqe(a,b){var c,d,e,f;Mqe(a);if(a.c!=0||a.a!=123)throw Adb(new Lqe(TId((Hde(),eJe))));f=b==112;d=a.d;c=phb(a.i,125,d);if(c<0)throw Adb(new Lqe(TId((Hde(),fJe))));e=zhb(a.i,d,c);a.d=c+1;return ite(e,f,(a.e&512)==512)}
+function YTb(a){var b,c,d,e,f,g,h;d=a.a.c.length;if(d>0){g=a.c.d;h=a.d.d;e=ijd(ojd(new rjd(h.a,h.b),g),1/(d+1));f=new rjd(g.a,g.b);for(c=new Anb(a.a);c.a=0&&f=0?a.Lh(c,true,true):Qvd(a,e,true),160));return RD(d,220).Wl(b)}else{throw Adb(new agb(KHe+b.xe()+NHe))}}
+function _ae(){Tae();var a;if(Sae)return RD(N5d((YSd(),XSd),AKe),2038);RRd(UK,new hde);abe();a=RD(ZD(Xjb((YSd(),XSd),AKe),560)?Xjb(XSd,AKe):new $ae,560);Sae=true;Yae(a);Zae(a);Zjb((hTd(),gTd),a,new cbe);$jb(XSd,AKe,a);return a}
+function Vfe(a,b){var c,d,e,f;a.j=-1;if(Mvd(a.e)){c=a.i;f=a.i!=0;LHd(a,b);d=new P3d(a.e,3,a.c,null,b,c,f);e=b.zl(a.e,a.c,null);e=Hge(a,b,e);if(!e){qvd(a.e,d)}else{e.nj(d);e.oj()}}else{LHd(a,b);e=b.zl(a.e,a.c,null);!!e&&e.oj()}}
+function HA(a,b){var c,d,e;e=0;d=b[0];if(d>=a.length){return -1}c=(BFb(d,a.length),a.charCodeAt(d));while(c>=48&&c<=57){e=e*10+(c-48);++d;if(d>=a.length){break}c=(BFb(d,a.length),a.charCodeAt(d))}d>b[0]?(b[0]=d):(e=-1);return e}
+function mPb(a){var b,c,d,e,f;e=RD(a.a,17).a;f=RD(a.b,17).a;c=e;d=f;b=$wnd.Math.max($wnd.Math.abs(e),$wnd.Math.abs(f));if(e<=0&&e==f){c=0;d=f-1}else{if(e==-b&&f!=b){c=f;d=e;f>=0&&++c}else{c=-f;d=e}}return new Ptd(sgb(c),sgb(d))}
+function YPb(a,b,c,d){var e,f,g,h,i,j;for(e=0;e=0&&j>=0&&i=a.i)throw Adb(new veb(MIe+b+NIe+a.i));if(c>=a.i)throw Adb(new veb(OIe+c+NIe+a.i));d=a.g[c];if(b!=c){b>16);b=d>>16&16;c=16-b;a=a>>b;d=a-256;b=d>>16&8;c+=b;a<<=b;d=a-qxe;b=d>>16&4;c+=b;a<<=b;d=a-Ove;b=d>>16&2;c+=b;a<<=b;d=a>>14;b=d&~(d>>1);return c+2-b}}
+function RSb(a){HSb();var b,c,d,e;GSb=new bnb;FSb=new Tsb;ESb=new bnb;b=(!a.a&&(a.a=new C5d(J4,a,10,11)),a.a);JSb(b);for(e=new dMd(b);e.e!=e.i.gc();){d=RD(bMd(e),27);if(Wmb(GSb,d,0)==-1){c=new bnb;Rmb(ESb,c);KSb(d,c)}}return ESb}
+function sTb(a,b,c){var d,e,f,g;a.a=c.b.d;if(ZD(b,326)){e=IGd(RD(b,74),false,false);f=ssd(e);d=new wTb(a);xgb(f,d);lsd(f,e);b.of((umd(),cld))!=null&&xgb(RD(b.of(cld),75),d)}else{g=RD(b,422);g.rh(g.nh()+a.a.a);g.sh(g.oh()+a.a.b)}}
+function hWc(a,b){var c,d,e;e=new bnb;for(d=Sub(b.a,0);d.b!=d.d.c;){c=RD(evb(d),65);c.c.g==a.g&&dE(mQb(c.b,(h_c(),f_c)))!==dE(mQb(c.c,f_c))&&!yDb(new SDb(null,new Swb(e,16)),new IWc(c))&&(ZEb(e.c,c),true)}_mb(e,new KWc);return e}
+function fUb(a,b,c){var d,e,f,g;if(ZD(b,153)&&ZD(c,153)){f=RD(b,153);g=RD(c,153);return a.a[f.a][g.a]+a.a[g.a][f.a]}else if(ZD(b,250)&&ZD(c,250)){d=RD(b,250);e=RD(c,250);if(d.a==e.a){return RD(mQb(e.a,(yVb(),lVb)),17).a}}return 0}
+function q9b(a,b){var c,d,e,f,g,h,i,j;j=Kfb(UD(mQb(b,(yCc(),fCc))));i=a[0].n.a+a[0].o.a+a[0].d.c+j;for(h=1;h=0){return c}h=ejd(ojd(new rjd(g.c+g.b/2,g.d+g.a/2),new rjd(f.c+f.b/2,f.d+f.a/2)));return -(oRb(f,g)-1)*h}
+function ysd(a,b,c){var d;FDb(new SDb(null,(!c.a&&(c.a=new C5d(F4,c,6,6)),new Swb(c.a,16))),new Qsd(a,b));FDb(new SDb(null,(!c.n&&(c.n=new C5d(I4,c,1,7)),new Swb(c.n,16))),new Ssd(a,b));d=RD(Gxd(c,(umd(),cld)),75);!!d&&Bjd(d,a,b)}
+function Qvd(a,b,c){var d,e,f;f=Eee((lke(),jke),a.Dh(),b);if(f){nke();RD(f,69).xk()||(f=zfe(Qee(jke,f)));e=(d=a.Ih(f),RD(d>=0?a.Lh(d,true,true):Qvd(a,f,true),160));return RD(e,220).Sl(b,c)}else{throw Adb(new agb(KHe+b.xe()+NHe))}}
+function WNd(a,b,c,d){var e,f,g,h,i;e=a.d[b];if(e){f=e.g;i=e.i;if(d!=null){for(h=0;h=c){d=b;j=(i.c+i.a)/2;g=j-c;if(i.c<=j-c){e=new BTc(i.c,g);Qmb(a,d++,e)}h=j+c;if(h<=i.a){f=new BTc(h,i.a);wFb(d,a.c.length);XEb(a.c,d,f)}}}
+function mZc(a,b,c){var d,e,f,g,h,i;if(!b.dc()){e=new Yub;for(i=b.Kc();i.Ob();){h=RD(i.Pb(),39);Zjb(a.a,sgb(h.g),sgb(c));for(g=(d=Sub((new dXc(h)).a.d,0),new gXc(d));dvb(g.a);){f=RD(evb(g.a),65).c;Pub(e,f,e.c.b,e.c)}}mZc(a,e,c+1)}}
+function Ude(a){var b;if(!a.c&&a.g==null){a.d=a.bj(a.f);WGd(a,a.d);b=a.d}else{if(a.g==null){return true}else if(a.i==0){return false}else{b=RD(a.g[a.i-1],51)}}if(b==a.b&&null.Vm>=null.Um()){tId(a);return Ude(a)}else{return b.Ob()}}
+function t_b(a){this.a=a;if(a.c.i.k==(r3b(),m3b)){this.c=a.c;this.d=RD(mQb(a.c.i,(Ywc(),hwc)),64)}else if(a.d.i.k==m3b){this.c=a.d;this.d=RD(mQb(a.d.i,(Ywc(),hwc)),64)}else{throw Adb(new agb('Edge '+a+' is not an external edge.'))}}
+function O1d(a,b){var c,d,e;e=a.b;a.b=b;(a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new N3d(a,1,3,e,a.b));if(!b){PAd(a,null);Q1d(a,0);P1d(a,null)}else if(b!=a){PAd(a,b.zb);Q1d(a,b.d);c=(d=b.c,d==null?b.zb:d);P1d(a,c==null||lhb(c,b.zb)?null:c)}}
+function hj(a,b){var c;this.e=(tm(),Qb(a),tm(),zm(a));this.c=(Qb(b),zm(b));Lb(this.e.Rd().dc()==this.c.Rd().dc());this.d=Uv(this.e);this.b=Uv(this.c);c=YC(jJ,[Nve,rve],[5,1],5,[this.e.Rd().gc(),this.c.Rd().gc()],2);this.a=c;Zi(this)}
+function Lz(b){var c=(!Jz&&(Jz=Mz()),Jz);var d=b.replace(/[\x00-\x1f\xad\u0600-\u0603\u06dd\u070f\u17b4\u17b5\u200b-\u200f\u2028-\u202e\u2060-\u2064\u206a-\u206f\ufeff\ufff9-\ufffb"\\]/g,function(a){return Kz(a,c)});return '"'+d+'"'}
+function VEb(a,b,c,d,e,f){var g,h,i,j,k;if(e==0){return}if(dE(a)===dE(c)){a=a.slice(b,b+e);b=0}i=c;for(h=b,j=b+e;h=g)throw Adb(new aMd(b,g));e=c[b];if(g==1){d=null}else{d=$C(d6,IJe,424,g-1,0,1);hib(c,0,d,0,b);f=g-b-1;f>0&&hib(c,b+1,d,b,f)}Bde(a,d);Ade(a,b,e);return e}
+function l3d(a){var b,c;if(a.f){while(a.n0?(f=vpd(c)):(f=spd(vpd(c)))}Ixd(b,GBc,f)}
+function agc(a,b){var c;b.Ug('Partition preprocessing',1);c=RD(zDb(CDb(EDb(CDb(new SDb(null,new Swb(a.a,16)),new egc),new ggc),new igc),tBb(new ZBb,new XBb,new wCb,cD(WC(QL,1),jwe,108,0,[(xBb(),vBb)]))),15);FDb(c.Oc(),new kgc);b.Vg()}
+function Uoc(a,b){var c,d,e,f,g;g=a.j;b.a!=b.b&&_mb(g,new ypc);e=g.c.length/2|0;for(d=0;d0&&uLc(a,c,b);return f}else if(d.a!=null){uLc(a,b,c);return -1}else if(e.a!=null){uLc(a,c,b);return 1}return 0}
+function EVc(a,b){var c,d,e,f,g;e=b.b.b;a.a=$C(QK,Ize,15,e,0,1);a.b=$C(xdb,Hye,28,e,16,1);for(g=Sub(b.b,0);g.b!=g.d.c;){f=RD(evb(g),39);a.a[f.g]=new Yub}for(d=Sub(b.a,0);d.b!=d.d.c;){c=RD(evb(d),65);a.a[c.b.g].Fc(c);a.a[c.c.g].Fc(c)}}
+function SJd(a,b){var c,d,e,f;if(a.Pj()){c=a.Ej();f=a.Qj();++a.j;a.qj(c,a.Zi(c,b));d=a.Ij(3,null,b,c,f);if(a.Mj()){e=a.Nj(b,null);if(!e){a.Jj(d)}else{e.nj(d);e.oj()}}else{a.Jj(d)}}else{_Id(a,b);if(a.Mj()){e=a.Nj(b,null);!!e&&e.oj()}}}
+function oLd(a,b,c){var d,e,f;if(a.Pj()){f=a.Qj();KHd(a,b,c);d=a.Ij(3,null,c,b,f);if(a.Mj()){e=a.Nj(c,null);a.Tj()&&(e=a.Uj(c,e));if(!e){a.Jj(d)}else{e.nj(d);e.oj()}}else{a.Jj(d)}}else{KHd(a,b,c);if(a.Mj()){e=a.Nj(c,null);!!e&&e.oj()}}}
+function bge(a,b){var c,d,e,f,g;g=pke(a.e.Dh(),b);e=new YHd;c=RD(a.g,124);for(f=a.i;--f>=0;){d=c[f];g.am(d.Lk())&&WGd(e,d)}!wLd(a,e)&&Mvd(a.e)&&eZd(a,b.Jk()?fge(a,6,b,(yob(),vob),null,-1,false):fge(a,b.tk()?2:1,b,null,null,-1,false))}
+function _7b(a,b){var c,d,e,f,g;if(a.a==($uc(),Yuc)){return true}f=b.a.c;c=b.a.c+b.a.b;if(b.j){d=b.A;g=d.c.c.a-d.o.a/2;e=f-(d.n.a+d.o.a);if(e>g){return false}}if(b.q){d=b.C;g=d.c.c.a-d.o.a/2;e=d.n.a-c;if(e>g){return false}}return true}
+function bRc(a){WQc();var b,c,d,e,f,g,h;c=new gub;for(e=new Anb(a.e.b);e.a1?(a.e*=Kfb(a.a)):(a.f/=Kfb(a.a));uRb(a);vRb(a);rRb(a);pQb(a.b,(tSb(),lSb),a.g)}
+function n9b(a,b,c){var d,e,f,g,h,i;d=0;i=c;if(!b){d=c*(a.c.length-1);i*=-1}for(f=new Anb(a);f.a=0?a.Ah(null):a.Ph().Th(a,-1-b,null,null));a.Bh(RD(e,54),c);!!d&&d.oj();a.vh()&&a.wh()&&c>-1&&qvd(a,new N3d(a,9,c,f,e));return e}}}return f}
+function stb(a,b){var c,d,e,f,g;f=a.b.Ce(b);d=(c=a.a.get(f),c==null?$C(jJ,rve,1,0,5,1):c);for(g=0;g>5;if(e>=a.d){return a.e<0}c=a.a[e];b=1<<(b&31);if(a.e<0){d=Uib(a);if(e>16)),15).dd(f);if(h0){!(Dmd(a.a.c)&&b.n.d)&&!(Emd(a.a.c)&&b.n.b)&&(b.g.d+=$wnd.Math.max(0,d/2-0.5));!(Dmd(a.a.c)&&b.n.a)&&!(Emd(a.a.c)&&b.n.c)&&(b.g.a-=d-1)}}}
+function c7b(a){var b,c,d,e,f;e=new bnb;f=d7b(a,e);b=RD(mQb(a,(Ywc(),Iwc)),10);if(b){for(d=new Anb(b.j);d.a>b;f=a.m>>b|c<<22-b;e=a.l>>b|a.m<<22-b}else if(b<44){g=d?exe:0;f=c>>b-22;e=a.m>>b-22|c<<44-b}else{g=d?exe:0;f=d?dxe:0;e=c>>b-44}return hD(e&dxe,f&dxe,g&exe)}
+function ORb(a){var b,c,d,e,f,g;this.c=new bnb;this.d=a;d=oxe;e=oxe;b=pxe;c=pxe;for(g=Sub(a,0);g.b!=g.d.c;){f=RD(evb(g),8);d=$wnd.Math.min(d,f.a);e=$wnd.Math.min(e,f.b);b=$wnd.Math.max(b,f.a);c=$wnd.Math.max(c,f.b)}this.a=new Uid(d,e,b-d,c-e)}
+function Udc(a,b){var c,d,e,f,g,h;for(f=new Anb(a.b);f.a0&&ZD(b,44)){a.a._j();j=RD(b,44);i=j.ld();f=i==null?0:tb(i);g=bOd(a.a,f);c=a.a.d[g];if(c){d=RD(c.g,379);k=c.i;for(h=0;h=2){c=e.Kc();b=UD(c.Pb());while(c.Ob()){f=b;b=UD(c.Pb());d=$wnd.Math.min(d,(uFb(b),b)-(uFb(f),f))}}return d}
+function iWc(a,b){var c,d,e;e=new bnb;for(d=Sub(b.a,0);d.b!=d.d.c;){c=RD(evb(d),65);c.b.g==a.g&&!lhb(c.b.c,IEe)&&dE(mQb(c.b,(h_c(),f_c)))!==dE(mQb(c.c,f_c))&&!yDb(new SDb(null,new Swb(e,16)),new OWc(c))&&(ZEb(e.c,c),true)}_mb(e,new QWc);return e}
+function $u(a,b){var c,d,e;if(dE(b)===dE(Qb(a))){return true}if(!ZD(b,15)){return false}d=RD(b,15);e=a.gc();if(e!=d.gc()){return false}if(ZD(d,59)){for(c=0;c0&&(e=c);for(g=new Anb(a.f.e);g.a0){b-=1;c-=1}else{if(d>=0&&e<0){b+=1;c+=1}else{if(d>0&&e>=0){b-=1;c+=1}else{b+=1;c-=1}}}}}return new Ptd(sgb(b),sgb(c))}
+function nNc(a,b){if(a.cb.c){return 1}else if(a.bb.b){return 1}else if(a.a!=b.a){return tb(a.a)-tb(b.a)}else if(a.d==(sNc(),rNc)&&b.d==qNc){return -1}else if(a.d==qNc&&b.d==rNc){return 1}return 0}
+function ARc(a,b){var c,d,e,f,g;f=b.a;f.c.i==b.b?(g=f.d):(g=f.c);f.c.i==b.b?(d=f.c):(d=f.d);e=lQc(a.a,g,d);if(e>0&&e0}else if(e<0&&-e0}return false}
+function X9c(a,b,c,d){var e,f,g,h,i,j,k,l;e=(b-a.d)/a.c.c.length;f=0;a.a+=c;a.d=b;for(l=new Anb(a.c);l.a>24}return g}
+function Bfb(a){if(a.ze()){var b=a.c;b.Ae()?(a.o='['+b.n):!b.ze()?(a.o='[L'+b.xe()+';'):(a.o='['+b.xe());a.b=b.we()+'[]';a.k=b.ye()+'[]';return}var c=a.j;var d=a.d;d=d.split('/');a.o=Efb('.',[c,Efb('$',d)]);a.b=Efb('.',[c,Efb('.',d)]);a.k=d[d.length-1]}
+function hJb(a,b){var c,d,e,f,g;g=null;for(f=new Anb(a.e.a);f.a=0;b-=2){for(c=0;c<=b;c+=2){if(a.b[c]>a.b[c+2]||a.b[c]===a.b[c+2]&&a.b[c+1]>a.b[c+3]){d=a.b[c+2];a.b[c+2]=a.b[c];a.b[c]=d;d=a.b[c+3];a.b[c+3]=a.b[c+1];a.b[c+1]=d}}}a.c=true}
+function nKc(a,b){var c,d,e,f,g,h,i,j,k;j=-1;k=0;for(g=a,h=0,i=g.length;h0&&++k}}++j}return k}
+function awd(a){var b,c;c=new dib(nfb(a.Rm));c.a+='@';Zhb(c,(b=tb(a)>>>0,b.toString(16)));if(a.Vh()){c.a+=' (eProxyURI: ';Yhb(c,a._h());if(a.Kh()){c.a+=' eClass: ';Yhb(c,a.Kh())}c.a+=')'}else if(a.Kh()){c.a+=' (eClass: ';Yhb(c,a.Kh());c.a+=')'}return c.a}
+function KGb(a){var b,c,d,e;if(a.e){throw Adb(new dgb((lfb(lN),lye+lN.k+mye)))}a.d==(Cmd(),Amd)&&JGb(a,ymd);for(c=new Anb(a.a.a);c.a>24}return c}
+function cNb(a,b,c){var d,e,f;e=RD(Vrb(a.i,b),314);if(!e){e=new UKb(a.d,b,c);Wrb(a.i,b,e);if(jMb(b)){tKb(a.a,b.c,b.b,e)}else{f=iMb(b);d=RD(Vrb(a.p,f),252);switch(f.g){case 1:case 3:e.j=true;cLb(d,b.b,e);break;case 4:case 2:e.k=true;cLb(d,b.c,e);}}}return e}
+function Ndc(a,b){var c,d,e,f,g,h,i,j,k;i=ev(a.c-a.b&a.a.length-1);j=null;k=null;for(f=new Kmb(a);f.a!=f.b;){e=RD(Imb(f),10);c=(h=RD(mQb(e,(Ywc(),vwc)),12),!h?null:h.i);d=(g=RD(mQb(e,wwc),12),!g?null:g.i);if(j!=c||k!=d){Rdc(i,b);j=c;k=d}ZEb(i.c,e)}Rdc(i,b)}
+function Rge(a,b,c,d){var e,f,g,h,i,j;h=new YHd;i=pke(a.e.Dh(),b);e=RD(a.g,124);nke();if(RD(b,69).xk()){for(g=0;g=0){return e}else{f=1;for(h=new Anb(b.j);h.a=0){return e}else{f=1;for(h=new Anb(b.j);h.a0&&b.Ne((tFb(e-1,a.c.length),RD(a.c[e-1],10)),f)>0){$mb(a,e,(tFb(e-1,a.c.length),RD(a.c[e-1],10)));--e}tFb(e,a.c.length);a.c[e]=f}c.a=new Tsb;c.b=new Tsb}
+function yhd(a,b,c){var d,e,f,g,h,i,j,k;k=(d=RD(b.e&&b.e(),9),new Fsb(d,RD(WEb(d,d.length),9),0));i=vhb(c,'[\\[\\]\\s,]+');for(f=i,g=0,h=f.length;g=0){if(!b){b=new Rhb;d>0&&Nhb(b,(AFb(0,d,a.length),a.substr(0,d)))}b.a+='\\';Jhb(b,c&Bwe)}else !!b&&Jhb(b,c&Bwe)}return b?b.a:a}
+function MYb(a){var b,c,d;for(c=new Anb(a.a.a.b);c.a0){!(Dmd(a.a.c)&&b.n.d)&&!(Emd(a.a.c)&&b.n.b)&&(b.g.d-=$wnd.Math.max(0,d/2-0.5));!(Dmd(a.a.c)&&b.n.a)&&!(Emd(a.a.c)&&b.n.c)&&(b.g.a+=$wnd.Math.max(0,d-1))}}}
+function Ydc(a,b,c){var d,e;if((a.c-a.b&a.a.length-1)==2){if(b==(qpd(),Yod)||b==Xod){Odc(RD(omb(a),15),(Pnd(),Lnd));Odc(RD(omb(a),15),Mnd)}else{Odc(RD(omb(a),15),(Pnd(),Mnd));Odc(RD(omb(a),15),Lnd)}}else{for(e=new Kmb(a);e.a!=e.b;){d=RD(Imb(e),15);Odc(d,c)}}}
+function HGd(a,b){var c,d,e,f,g,h,i;e=cv(new QGd(a));h=new Jkb(e,e.c.length);f=cv(new QGd(b));i=new Jkb(f,f.c.length);g=null;while(h.b>0&&i.b>0){c=(sFb(h.b>0),RD(h.a.Xb(h.c=--h.b),27));d=(sFb(i.b>0),RD(i.a.Xb(i.c=--i.b),27));if(c==d){g=c}else{break}}return g}
+function Dmc(a,b,c){var d,e,f,g;if(Hmc(a,b)>Hmc(a,c)){d=b3b(c,(qpd(),Xod));a.d=d.dc()?0:L3b(RD(d.Xb(0),12));g=b3b(b,ppd);a.b=g.dc()?0:L3b(RD(g.Xb(0),12))}else{e=b3b(c,(qpd(),ppd));a.d=e.dc()?0:L3b(RD(e.Xb(0),12));f=b3b(b,Xod);a.b=f.dc()?0:L3b(RD(f.Xb(0),12))}}
+function wNb(a,b){var c,d,e,f;c=a.o.a;for(f=RD(RD(Qc(a.r,b),21),87).Kc();f.Ob();){e=RD(f.Pb(),117);e.e.a=c*Kfb(UD(e.b.of(sNb)));e.e.b=(d=e.b,d.pf((umd(),Gld))?d.ag()==(qpd(),Yod)?-d.Mf().b-Kfb(UD(d.of(Gld))):Kfb(UD(d.of(Gld))):d.ag()==(qpd(),Yod)?-d.Mf().b:0)}}
+function Mhc(a,b){var c,d,e,f;b.Ug('Self-Loop pre-processing',1);for(d=new Anb(a.a);d.aa.c){break}else if(e.a>=a.s){f<0&&(f=g);h=g}}i=(a.s+a.c)/2;if(f>=0){d=lTc(a,b,f,h);i=yTc((tFb(d,b.c.length),RD(b.c[d],339)));wTc(b,d,c)}return i}
+function _Ad(a,b,c){var d,e,f,g,h,i,j;g=(f=new pVd,f);nVd(g,(uFb(b),b));j=(!g.b&&(g.b=new SVd((JTd(),FTd),C8,g)),g.b);for(i=1;i0&&ASb(this,e)}}
+function zTb(a,b,c,d,e,f){var g,h,i;if(!e[b.a]){e[b.a]=true;g=d;!g&&(g=new gUb);Rmb(g.e,b);for(i=f[b.a].Kc();i.Ob();){h=RD(i.Pb(),289);if(h.d==c||h.c==c){continue}h.c!=b&&zTb(a,h.c,b,g,e,f);h.d!=b&&zTb(a,h.d,b,g,e,f);Rmb(g.c,h);Tmb(g.d,h.b)}return g}return null}
+function v7b(a){var b,c,d,e,f,g,h;b=0;for(e=new Anb(a.e);e.a=2}
+function _qc(a,b,c,d,e){var f,g,h,i,j,k;f=a.c.d.j;g=RD(ju(c,0),8);for(k=1;k1){return false}b=ysb(Xnd,cD(WC(A3,1),jwe,95,0,[Wnd,Znd]));if(dy(Tx(b,a))>1){return false}d=ysb(cod,cD(WC(A3,1),jwe,95,0,[bod,aod]));if(dy(Tx(d,a))>1){return false}return true}
+function $Uc(a,b,c){var d,e,f;for(f=new Anb(a.t);f.a0){d.b.n-=d.c;d.b.n<=0&&d.b.u>0&&Mub(b,d.b)}}for(e=new Anb(a.i);e.a0){d.a.u-=d.c;d.a.u<=0&&d.a.n>0&&Mub(c,d.a)}}}
+function tId(a){var b,c,d,e,f;if(a.g==null){a.d=a.bj(a.f);WGd(a,a.d);if(a.c){f=a.f;return f}}b=RD(a.g[a.i-1],51);e=b.Pb();a.e=b;c=a.bj(e);if(c.Ob()){a.d=c;WGd(a,c)}else{a.d=null;while(!b.Ob()){bD(a.g,--a.i,null);if(a.i==0){break}d=RD(a.g[a.i-1],51);b=d}}return e}
+function Rfe(a,b){var c,d,e,f,g,h;d=b;e=d.Lk();if(qke(a.e,e)){if(e.Si()&&cge(a,e,d.md())){return false}}else{h=pke(a.e.Dh(),e);c=RD(a.g,124);for(f=0;f1||c>1){return 2}}if(b+c==1){return 2}return 0}
+function Kwb(a,b){var c,d,e,f,g,h;f=a.a*Mxe+a.b*1502;h=a.b*Mxe+11;c=$wnd.Math.floor(h*Nxe);f+=c;h-=c*Oxe;f%=Oxe;a.a=f;a.b=h;if(b<=24){return $wnd.Math.floor(a.a*Ewb[b])}else{e=a.a*(1<=2147483648&&(d-=4294967296);return d}}
+function uSc(a,b,c){var d,e,f,g,h,i,j;f=new bnb;j=new Yub;g=new Yub;vSc(a,j,g,b);tSc(a,j,g,b,c);for(i=new Anb(a);i.ad.b.g&&(ZEb(f.c,d),true)}}return f}
+function jed(a,b,c){var d,e,f,g,h,i;h=a.c;for(g=(!c.q?(yob(),yob(),wob):c.q).vc().Kc();g.Ob();){f=RD(g.Pb(),44);d=!QDb(CDb(new SDb(null,new Swb(h,16)),new PAb(new xed(b,f)))).Bd((xDb(),wDb));if(d){i=f.md();if(ZD(i,4)){e=FId(i);e!=null&&(i=e)}b.qf(RD(f.ld(),149),i)}}}
+function mbd(a,b,c){var d,e;Sed(a.b);Ved(a.b,(gbd(),dbd),(_cd(),$cd));Ved(a.b,ebd,b.g);Ved(a.b,fbd,b.a);a.a=Qed(a.b,b);c.Ug('Compaction by shrinking a tree',a.a.c.length);if(b.i.c.length>1){for(e=new Anb(a.a);e.a=0?a.Lh(d,true,true):Qvd(a,f,true),160));RD(e,220).Xl(b,c)}else{throw Adb(new agb(KHe+b.xe()+LHe))}}
+function k2d(a,b){var c,d,e,f,g;if(!b){return null}else{f=ZD(a.Cb,90)||ZD(a.Cb,102);g=!f&&ZD(a.Cb,331);for(d=new dMd((!b.a&&(b.a=new iae(b,o7,b)),b.a));d.e!=d.i.gc();){c=RD(bMd(d),89);e=i2d(c);if(f?ZD(e,90):g?ZD(e,156):!!e){return e}}return f?(JTd(),zTd):(JTd(),wTd)}}
+function W8b(a,b){var c,d,e,f;b.Ug('Resize child graph to fit parent.',1);for(d=new Anb(a.b);d.a=2*b&&Rmb(c,new BTc(g[d-1]+b,g[d]-b))}return c}
+function dEd(a,b,c){var d,e,f,g,h,j,k,l;if(c){f=c.a.length;d=new vue(f);for(h=(d.b-d.a)*d.c<0?(uue(),tue):new Rue(d);h.Ob();){g=RD(h.Pb(),17);e=xDd(c,g.a);!!e&&(i=null,j=sEd(a,(k=(bvd(),l=new PCd,l),!!b&&NCd(k,b),k),e),jyd(j,zDd(e,uIe)),GEd(e,j),HEd(e,j),CEd(a,e,j))}}}
+function sYd(a){var b,c,d,e,f,g;if(!a.j){g=new f1d;b=iYd;f=b.a.zc(a,b);if(f==null){for(d=new dMd(zYd(a));d.e!=d.i.gc();){c=RD(bMd(d),29);e=sYd(c);YGd(g,e);WGd(g,c)}b.a.Bc(a)!=null}VHd(g);a.j=new N$d((RD(QHd(xYd((lTd(),kTd).o),11),19),g.i),g.g);yYd(a).b&=-33}return a.j}
+function lne(a){var b,c,d,e;if(a==null){return null}else{d=nue(a,true);e=mLe.length;if(lhb(d.substr(d.length-e,e),mLe)){c=d.length;if(c==4){b=(BFb(0,d.length),d.charCodeAt(0));if(b==43){return Yme}else if(b==45){return Xme}}else if(c==3){return Yme}}return new Ufb(d)}}
+function pD(a){var b,c,d;c=a.l;if((c&c-1)!=0){return -1}d=a.m;if((d&d-1)!=0){return -1}b=a.h;if((b&b-1)!=0){return -1}if(b==0&&d==0&&c==0){return -1}if(b==0&&d==0&&c!=0){return ogb(c)}if(b==0&&d!=0&&c==0){return ogb(d)+22}if(b!=0&&d==0&&c==0){return ogb(b)+44}return -1}
+function yo(a,b){var c,d,e,f,g;e=b.a&a.f;f=null;for(d=a.b[e];true;d=d.b){if(d==b){!f?(a.b[e]=b.b):(f.b=b.b);break}f=d}g=b.f&a.f;f=null;for(c=a.c[g];true;c=c.d){if(c==b){!f?(a.c[g]=b.d):(f.d=b.d);break}f=c}!b.e?(a.a=b.c):(b.e.c=b.c);!b.c?(a.e=b.e):(b.c.e=b.e);--a.i;++a.g}
+function Dt(a,b){var c;b.d?(b.d.b=b.b):(a.a=b.b);b.b?(b.b.d=b.d):(a.e=b.d);if(!b.e&&!b.c){c=RD(Hvb(RD(_jb(a.b,b.a),260)),260);c.a=0;++a.c}else{c=RD(Hvb(RD(Wjb(a.b,b.a),260)),260);--c.a;!b.e?(c.b=RD(Hvb(b.c),511)):(b.e.c=b.c);!b.c?(c.c=RD(Hvb(b.e),511)):(b.c.e=b.e)}--a.d}
+function XPb(a){var b,c,d,e,f,g,h,i,j,k;c=a.o;b=a.p;g=lve;e=qwe;h=lve;f=qwe;for(j=0;j0);f.a.Xb(f.c=--f.b);Ikb(f,e);sFb(f.b3&&UA(a,0,b-3)}}
+function eXb(a){var b,c,d,e;if(dE(mQb(a,(yCc(),IAc)))===dE((Fnd(),Cnd))){return !a.e&&dE(mQb(a,gAc))!==dE((xvc(),uvc))}d=RD(mQb(a,hAc),298);e=Heb(TD(mQb(a,nAc)))||dE(mQb(a,oAc))===dE((stc(),ptc));b=RD(mQb(a,fAc),17).a;c=a.a.c.length;return !e&&d!=(xvc(),uvc)&&(b==0||b>c)}
+function Rnc(a){var b,c;c=0;for(;c0){break}}if(c>0&&c0){break}}if(b>0&&c>16!=6&&!!b){if(Oje(a,b))throw Adb(new agb(UHe+Qzd(a)));d=null;!!a.Cb&&(d=(c=a.Db>>16,c>=0?Czd(a,d):a.Cb.Th(a,-1-c,null,d)));!!b&&(d=Ivd(b,a,6,d));d=Bzd(a,b,d);!!d&&d.oj()}else (a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new N3d(a,1,6,b,b))}
+function pzd(a,b){var c,d;if(b!=a.Cb||a.Db>>16!=3&&!!b){if(Oje(a,b))throw Adb(new agb(UHe+qzd(a)));d=null;!!a.Cb&&(d=(c=a.Db>>16,c>=0?jzd(a,d):a.Cb.Th(a,-1-c,null,d)));!!b&&(d=Ivd(b,a,12,d));d=izd(a,b,d);!!d&&d.oj()}else (a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new N3d(a,1,3,b,b))}
+function NCd(a,b){var c,d;if(b!=a.Cb||a.Db>>16!=9&&!!b){if(Oje(a,b))throw Adb(new agb(UHe+OCd(a)));d=null;!!a.Cb&&(d=(c=a.Db>>16,c>=0?LCd(a,d):a.Cb.Th(a,-1-c,null,d)));!!b&&(d=Ivd(b,a,9,d));d=KCd(a,b,d);!!d&&d.oj()}else (a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new N3d(a,1,9,b,b))}
+function tWd(b){var c,d,e,f,g;e=WVd(b);g=b.j;if(g==null&&!!e){return b.Jk()?null:e.ik()}else if(ZD(e,156)){d=e.jk();if(d){f=d.wi();if(f!=b.i){c=RD(e,156);if(c.nk()){try{b.g=f.ti(c,g)}catch(a){a=zdb(a);if(ZD(a,82)){b.g=null}else throw Adb(a)}}b.i=f}}return b.g}return null}
+function nRb(a){var b;b=new bnb;Rmb(b,new TFb(new rjd(a.c,a.d),new rjd(a.c+a.b,a.d)));Rmb(b,new TFb(new rjd(a.c,a.d),new rjd(a.c,a.d+a.a)));Rmb(b,new TFb(new rjd(a.c+a.b,a.d+a.a),new rjd(a.c+a.b,a.d)));Rmb(b,new TFb(new rjd(a.c+a.b,a.d+a.a),new rjd(a.c,a.d+a.a)));return b}
+function ic(b){var c,d,e;if(b==null){return vve}try{return jeb(b)}catch(a){a=zdb(a);if(ZD(a,103)){c=a;e=nfb(rb(b))+'@'+(d=(gib(),jFb(b))>>>0,d.toString(16));lBb(pBb(),(SAb(),'Exception during lenientFormat for '+e),c);return '<'+e+' threw '+nfb(c.Rm)+'>'}else throw Adb(a)}}
+function mTb(a,b,c){var d,e,f;for(f=b.a.ec().Kc();f.Ob();){e=RD(f.Pb(),74);d=RD(Wjb(a.b,e),272);!d&&(vCd(JGd(e))==vCd(LGd(e))?lTb(a,e,c):JGd(e)==vCd(LGd(e))?Wjb(a.c,e)==null&&Wjb(a.b,LGd(e))!=null&&oTb(a,e,c,false):Wjb(a.d,e)==null&&Wjb(a.b,JGd(e))!=null&&oTb(a,e,c,true))}}
+function Pfc(a,b){var c,d,e,f,g,h,i;for(e=a.Kc();e.Ob();){d=RD(e.Pb(),10);h=new R3b;P3b(h,d);Q3b(h,(qpd(),Xod));pQb(h,(Ywc(),Hwc),(Geb(),true));for(g=b.Kc();g.Ob();){f=RD(g.Pb(),10);i=new R3b;P3b(i,f);Q3b(i,ppd);pQb(i,Hwc,true);c=new a1b;pQb(c,Hwc,true);Y0b(c,h);Z0b(c,i)}}}
+function Pqc(a,b,c,d){var e,f,g,h;e=Nqc(a,b,c);f=Nqc(a,c,b);g=RD(Wjb(a.c,b),118);h=RD(Wjb(a.c,c),118);if(e1){b=eJb((c=new gJb,++a.b,c),a.d);for(h=Sub(f,0);h.b!=h.d.c;){g=RD(evb(h),125);rIb(uIb(tIb(vIb(sIb(new wIb,1),0),b),g))}}}
+function isc(a,b,c){var d,e,f,g,h;c.Ug('Breaking Point Removing',1);a.a=RD(mQb(b,(yCc(),yAc)),223);for(f=new Anb(b.b);f.a>16!=11&&!!b){if(Oje(a,b))throw Adb(new agb(UHe+zCd(a)));d=null;!!a.Cb&&(d=(c=a.Db>>16,c>=0?sCd(a,d):a.Cb.Th(a,-1-c,null,d)));!!b&&(d=Ivd(b,a,10,d));d=rCd(a,b,d);!!d&&d.oj()}else (a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new N3d(a,1,11,b,b))}
+function C0b(a){var b,c,d,e;for(d=new vkb((new mkb(a.b)).a);d.b;){c=tkb(d);e=RD(c.ld(),12);b=RD(c.md(),10);pQb(b,(Ywc(),Awc),e);pQb(e,Iwc,b);pQb(e,nwc,(Geb(),true));Q3b(e,RD(mQb(b,hwc),64));mQb(b,hwc);pQb(e.i,(yCc(),BBc),(Bod(),yod));RD(mQb(Y2b(e.i),kwc),21).Fc((ovc(),kvc))}}
+function X7b(a,b,c){var d,e,f,g,h,i;f=0;g=0;if(a.c){for(i=new Anb(a.d.i.j);i.af.a){return -1}else if(e.ai){k=a.d;a.d=$C(D6,KJe,66,2*i+4,0,1);for(f=0;f=9223372036854775807){return MD(),ID}e=false;if(a<0){e=true;a=-a}d=0;if(a>=hxe){d=eE(a/hxe);a-=d*hxe}c=0;if(a>=gxe){c=eE(a/gxe);a-=c*gxe}b=eE(a);f=hD(b,c,d);e&&nD(f);return f}
+function KCb(a){var b,c,d,e,f;f=new bnb;Umb(a.b,new SEb(f));a.b.c.length=0;if(f.c.length!=0){b=(tFb(0,f.c.length),RD(f.c[0],82));for(c=1,d=f.c.length;c=-b&&d==b){return new Ptd(sgb(c-1),sgb(d))}return new Ptd(sgb(c),sgb(d-1))}
+function lcc(){hcc();return cD(WC(YS,1),jwe,81,0,[nbc,kbc,obc,Ebc,Xbc,Ibc,bcc,Nbc,Vbc,zbc,Rbc,Mbc,Wbc,vbc,dcc,ebc,Qbc,Zbc,Fbc,Ybc,fcc,Tbc,fbc,Ubc,gcc,_bc,ecc,Gbc,sbc,Hbc,Dbc,ccc,ibc,qbc,Kbc,hbc,Lbc,Bbc,wbc,Obc,ybc,lbc,jbc,Cbc,xbc,Pbc,acc,gbc,Sbc,Abc,Jbc,tbc,rbc,$bc,pbc,ubc,mbc])}
+function Cmc(a,b,c){a.d=0;a.b=0;b.k==(r3b(),q3b)&&c.k==q3b&&RD(mQb(b,(Ywc(),Awc)),10)==RD(mQb(c,Awc),10)&&(Gmc(b).j==(qpd(),Yod)?Dmc(a,b,c):Dmc(a,c,b));b.k==q3b&&c.k==o3b?Gmc(b).j==(qpd(),Yod)?(a.d=1):(a.b=1):c.k==q3b&&b.k==o3b&&(Gmc(c).j==(qpd(),Yod)?(a.b=1):(a.d=1));Imc(a,b,c)}
+function EFd(a){var b,c,d,e,f,g,h,i,j,k,l;l=HFd(a);b=a.a;i=b!=null;i&&sDd(l,'category',a.a);e=cve(new Xkb(a.d));g=!e;if(g){j=new MB;sC(l,'knownOptions',j);c=new MFd(j);xgb(new Xkb(a.d),c)}f=cve(a.g);h=!f;if(h){k=new MB;sC(l,'supportedFeatures',k);d=new OFd(k);xgb(a.g,d)}return l}
+function Ly(a){var b,c,d,e,f,g,h,i,j;d=false;b=336;c=0;f=new hq(a.length);for(h=a,i=0,j=h.length;i>16!=7&&!!b){if(Oje(a,b))throw Adb(new agb(UHe+gCd(a)));d=null;!!a.Cb&&(d=(c=a.Db>>16,c>=0?cCd(a,d):a.Cb.Th(a,-1-c,null,d)));!!b&&(d=RD(b,54).Rh(a,1,H4,d));d=bCd(a,b,d);!!d&&d.oj()}else (a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new N3d(a,1,7,b,b))}
+function lVd(a,b){var c,d;if(b!=a.Cb||a.Db>>16!=3&&!!b){if(Oje(a,b))throw Adb(new agb(UHe+oVd(a)));d=null;!!a.Cb&&(d=(c=a.Db>>16,c>=0?iVd(a,d):a.Cb.Th(a,-1-c,null,d)));!!b&&(d=RD(b,54).Rh(a,0,p7,d));d=hVd(a,b,d);!!d&&d.oj()}else (a.Db&4)!=0&&(a.Db&1)==0&&qvd(a,new N3d(a,1,3,b,b))}
+function Mjb(a,b){Ljb();var c,d,e,f,g,h,i,j,k;if(b.d>a.d){h=a;a=b;b=h}if(b.d<63){return Qjb(a,b)}g=(a.d&-2)<<4;j=$ib(a,g);k=$ib(b,g);d=Gjb(a,Zib(j,g));e=Gjb(b,Zib(k,g));i=Mjb(j,k);c=Mjb(d,e);f=Mjb(Gjb(j,d),Gjb(e,k));f=Bjb(Bjb(f,i),c);f=Zib(f,g);i=Zib(i,g<<1);return Bjb(Bjb(i,f),c)}
+function _Cc(){_Cc=geb;ZCc=new bDc(lEe,0);WCc=new bDc('LONGEST_PATH',1);XCc=new bDc('LONGEST_PATH_SOURCE',2);TCc=new bDc('COFFMAN_GRAHAM',3);VCc=new bDc(BBe,4);$Cc=new bDc('STRETCH_WIDTH',5);YCc=new bDc('MIN_WIDTH',6);SCc=new bDc('BF_MODEL_ORDER',7);UCc=new bDc('DF_MODEL_ORDER',8)}
+function AKc(a,b,c){var d,e,f,g,h;g=aMc(a,c);h=$C(jR,WAe,10,b.length,0,1);d=0;for(f=g.Kc();f.Ob();){e=RD(f.Pb(),12);Heb(TD(mQb(e,(Ywc(),nwc))))&&(h[d++]=RD(mQb(e,Iwc),10))}if(d=0;f+=c?1:-1){g=g|b.c.lg(i,f,c,d&&!Heb(TD(mQb(b.j,(Ywc(),jwc))))&&!Heb(TD(mQb(b.j,(Ywc(),Owc)))));g=g|b.q.ug(i,f,c);g=g|CKc(a,i[f],c,d)}Ysb(a.c,b);return g}
+function F6b(a,b,c){var d,e,f,g,h,i,j,k,l,m;for(k=u2b(a.j),l=0,m=k.length;l1&&(a.a=true);QQb(RD(c.b,68),$id(ajd(RD(b.b,68).c),ijd(ojd(ajd(RD(c.b,68).a),RD(b.b,68).a),e)));Odd(a,b);Qdd(a,c)}}
+function tYb(a){var b,c,d,e,f,g,h;for(f=new Anb(a.a.a);f.a0&&f>0?(g.p=b++):d>0?(g.p=c++):f>0?(g.p=e++):(g.p=c++);}}yob();_mb(a.j,new Lfc)}
+function zic(a){var b,c;c=null;b=RD(Vmb(a.g,0),18);do{c=b.d.i;if(nQb(c,(Ywc(),wwc))){return RD(mQb(c,wwc),12).i}if(c.k!=(r3b(),p3b)&&gs(new is(Mr(a3b(c).a.Kc(),new ir)))){b=RD(hs(new is(Mr(a3b(c).a.Kc(),new ir))),18)}else if(c.k!=p3b){return null}}while(!!c&&c.k!=(r3b(),p3b));return c}
+function sqc(a,b){var c,d,e,f,g,h,i,j,k;h=b.j;g=b.g;i=RD(Vmb(h,h.c.length-1),113);k=(tFb(0,h.c.length),RD(h.c[0],113));j=oqc(a,g,i,k);for(f=1;fj){i=c;k=e;j=d}}b.a=k;b.c=i}
+function fMc(a,b,c){var d,e,f,g,h,i,j;j=new yAb(new TMc(a));for(g=cD(WC(xR,1),XAe,12,0,[b,c]),h=0,i=g.length;hi-a.b&&hi-a.a&&h0){if(f.a){h=f.b.Mf().a;if(c>h){e=(c-h)/2;f.d.b=e;f.d.c=e}}else{f.d.c=a.s+c}}else if(Rod(a.u)){d=wsd(f.b);d.c<0&&(f.d.b=-d.c);d.c+d.b>f.b.Mf().a&&(f.d.c=d.c+d.b-f.b.Mf().a)}}}
+function RUc(a,b){var c,d,e,f,g;g=new bnb;c=b;do{f=RD(Wjb(a.b,c),131);f.B=c.c;f.D=c.d;ZEb(g.c,f);c=RD(Wjb(a.k,c),18)}while(c);d=(tFb(0,g.c.length),RD(g.c[0],131));d.j=true;d.A=RD(d.d.a.ec().Kc().Pb(),18).c.i;e=RD(Vmb(g,g.c.length-1),131);e.q=true;e.C=RD(e.d.a.ec().Kc().Pb(),18).d.i;return g}
+function pPb(a){var b,c;b=RD(a.a,17).a;c=RD(a.b,17).a;if(b>=0){if(b==c){return new Ptd(sgb(-b-1),sgb(-b-1))}if(b==-c){return new Ptd(sgb(-b),sgb(c+1))}}if($wnd.Math.abs(b)>$wnd.Math.abs(c)){if(b<0){return new Ptd(sgb(-b),sgb(c))}return new Ptd(sgb(-b),sgb(c+1))}return new Ptd(sgb(b+1),sgb(c))}
+function H8b(a){var b,c;c=RD(mQb(a,(yCc(),UAc)),171);b=RD(mQb(a,(Ywc(),owc)),311);if(c==(cxc(),$wc)){pQb(a,UAc,bxc);pQb(a,owc,(Gvc(),Fvc))}else if(c==axc){pQb(a,UAc,bxc);pQb(a,owc,(Gvc(),Dvc))}else if(b==(Gvc(),Fvc)){pQb(a,UAc,$wc);pQb(a,owc,Evc)}else if(b==Dvc){pQb(a,UAc,axc);pQb(a,owc,Evc)}}
+function dSc(){dSc=geb;bSc=new pSc;ZRc=pfd(new ufd,(sXb(),pXb),(hcc(),Fbc));aSc=nfd(pfd(new ufd,pXb,Tbc),rXb,Sbc);cSc=mfd(mfd(rfd(nfd(pfd(new ufd,nXb,bcc),rXb,acc),qXb),_bc),ccc);$Rc=nfd(pfd(pfd(pfd(new ufd,oXb,Ibc),qXb,Kbc),qXb,Lbc),rXb,Jbc);_Rc=nfd(pfd(pfd(new ufd,qXb,Lbc),qXb,qbc),rXb,pbc)}
+function HUc(){HUc=geb;CUc=pfd(nfd(new ufd,(sXb(),rXb),(hcc(),tbc)),pXb,Fbc);GUc=mfd(mfd(rfd(nfd(pfd(new ufd,nXb,bcc),rXb,acc),qXb),_bc),ccc);DUc=nfd(pfd(pfd(pfd(new ufd,oXb,Ibc),qXb,Kbc),qXb,Lbc),rXb,Jbc);FUc=pfd(pfd(new ufd,pXb,Tbc),rXb,Sbc);EUc=nfd(pfd(pfd(new ufd,qXb,Lbc),qXb,qbc),rXb,pbc)}
+function eSc(a,b,c,d,e){var f,g;if((!W0b(b)&&b.c.i.c==b.d.i.c||!djd(xjd(cD(WC(l3,1),Nve,8,0,[e.i.n,e.n,e.a])),c))&&!W0b(b)){b.c==e?hu(b.a,0,new sjd(c)):Mub(b.a,new sjd(c));if(d&&!Zsb(a.a,c)){g=RD(mQb(b,(yCc(),RAc)),75);if(!g){g=new Ejd;pQb(b,RAc,g)}f=new sjd(c);Pub(g,f,g.c.b,g.c);Ysb(a.a,f)}}}
+function ht(a,b){var c,d,e,f;f=Ydb(Ndb(cwe,qgb(Ydb(Ndb(b==null?0:tb(b),dwe)),15)));c=f&a.b.length-1;e=null;for(d=a.b[c];d;e=d,d=d.a){if(d.d==f&&Hb(d.i,b)){!e?(a.b[c]=d.a):(e.a=d.a);Ts(RD(Hvb(d.c),604),RD(Hvb(d.f),604));Ss(RD(Hvb(d.b),227),RD(Hvb(d.e),227));--a.f;++a.e;return true}}return false}
+function dec(a){var b,c;for(c=new is(Mr(Z2b(a).a.Kc(),new ir));gs(c);){b=RD(hs(c),18);if(b.c.i.k!=(r3b(),n3b)){throw Adb(new Jed(nBe+X2b(a)+"' has its layer constraint set to FIRST, but has at least one incoming edge that "+' does not come from a FIRST_SEPARATE node. That must not happen.'))}}}
+function Twd(a,b,c){var d,e,f,g,h,i,j;e=ggb(a.Db&254);if(e==0){a.Eb=c}else{if(e==1){h=$C(jJ,rve,1,2,5,1);f=Xwd(a,b);if(f==0){h[0]=c;h[1]=a.Eb}else{h[0]=a.Eb;h[1]=c}}else{h=$C(jJ,rve,1,e+1,5,1);g=SD(a.Eb);for(d=2,i=0,j=0;d<=128;d<<=1){d==b?(h[j++]=c):(a.Db&d)!=0&&(h[j++]=g[i++])}}a.Eb=h}a.Db|=b}
+function vQb(a,b,c){var d,e,f,g;this.b=new bnb;e=0;d=0;for(g=new Anb(a);g.a0){f=RD(Vmb(this.b,0),176);e+=f.o;d+=f.p}e*=2;d*=2;b>1?(e=eE($wnd.Math.ceil(e*b))):(d=eE($wnd.Math.ceil(d/b)));this.a=new gQb(e,d)}
+function mkc(a,b,c,d,e,f){var g,h,i,j,k,l,m,n,o,p,q,r;k=d;if(b.j&&b.o){n=RD(Wjb(a.f,b.A),60);p=n.d.c+n.d.b;--k}else{p=b.a.c+b.a.b}l=e;if(c.q&&c.o){n=RD(Wjb(a.f,c.C),60);j=n.d.c;++l}else{j=c.a.c}q=j-p;i=$wnd.Math.max(2,l-k);h=q/i;o=p+h;for(m=k;m=0;g+=e?1:-1){h=b[g];i=d==(qpd(),Xod)?e?b3b(h,d):hv(b3b(h,d)):e?hv(b3b(h,d)):b3b(h,d);f&&(a.c[h.p]=i.gc());for(l=i.Kc();l.Ob();){k=RD(l.Pb(),12);a.d[k.p]=j++}Tmb(c,i)}}
+function AUc(a,b,c){var d,e,f,g,h,i,j,k;f=Kfb(UD(a.b.Kc().Pb()));j=Kfb(UD(fr(b.b)));d=ijd(ajd(a.a),j-c);e=ijd(ajd(b.a),c-f);k=$id(d,e);ijd(k,1/(j-f));this.a=k;this.b=new bnb;h=true;g=a.b.Kc();g.Pb();while(g.Ob()){i=Kfb(UD(g.Pb()));if(h&&i-c>AEe){this.b.Fc(c);h=false}this.b.Fc(i)}h&&this.b.Fc(c)}
+function mJb(a){var b,c,d,e;pJb(a,a.n);if(a.d.c.length>0){Nnb(a.c);while(xJb(a,RD(ynb(new Anb(a.e.a)),125))>5;b&=31;if(d>=a.d){return a.e<0?(Pib(),Jib):(Pib(),Oib)}f=a.d-d;e=$C(kE,Pwe,28,f+1,15,1);ujb(e,f,a.a,d,b);if(a.e<0){for(c=0;c0&&a.a[c]<<32-b!=0){for(c=0;c=0){return false}else{c=Eee((lke(),jke),e,b);if(!c){return true}else{d=c.Ik();return (d>1||d==-1)&&yfe(Qee(jke,c))!=3}}}}else{return false}}
+function _4b(a,b,c,d){var e,f,g,h,i;h=AGd(RD(QHd((!b.b&&(b.b=new Yie(E4,b,4,7)),b.b),0),84));i=AGd(RD(QHd((!b.c&&(b.c=new Yie(E4,b,5,8)),b.c),0),84));if(vCd(h)==vCd(i)){return null}if(NGd(i,h)){return null}g=kzd(b);if(g==c){return d}else{f=RD(Wjb(a.a,g),10);if(f){e=f.e;if(e){return e}}}return null}
+function uHc(a,b,c){var d,e,f,g,h;c.Ug('Longest path to source layering',1);a.a=b;h=a.a.a;a.b=$C(kE,Pwe,28,h.c.length,15,1);d=0;for(g=new Anb(h);g.a0){c[0]+=a.d;g-=c[0]}if(c[2]>0){c[2]+=a.d;g-=c[2]}f=$wnd.Math.max(0,g);c[1]=$wnd.Math.max(c[1],g);mKb(a,XJb,e.c+d.b+c[0]-(c[1]-g)/2,c);if(b==XJb){a.c.b=f;a.c.c=e.c+d.b+(f-g)/2}}
+function D_b(){this.c=$C(iE,vxe,28,(qpd(),cD(WC(E3,1),NAe,64,0,[opd,Yod,Xod,npd,ppd])).length,15,1);this.b=$C(iE,vxe,28,cD(WC(E3,1),NAe,64,0,[opd,Yod,Xod,npd,ppd]).length,15,1);this.a=$C(iE,vxe,28,cD(WC(E3,1),NAe,64,0,[opd,Yod,Xod,npd,ppd]).length,15,1);Lnb(this.c,oxe);Lnb(this.b,pxe);Lnb(this.a,pxe)}
+function rte(a,b,c){var d,e,f,g;if(b<=c){e=b;f=c}else{e=c;f=b}d=0;if(a.b==null){a.b=$C(kE,Pwe,28,2,15,1);a.b[0]=e;a.b[1]=f;a.c=true}else{d=a.b.length;if(a.b[d-1]+1==e){a.b[d-1]=f;return}g=$C(kE,Pwe,28,d+2,15,1);hib(a.b,0,g,0,d);a.b=g;a.b[d-1]>=e&&(a.c=false,a.a=false);a.b[d++]=e;a.b[d]=f;a.c||vte(a)}}
+function Oqc(a,b,c){var d,e,f,g,h,i,j;j=b.d;a.a=new cnb(j.c.length);a.c=new Tsb;for(h=new Anb(j);h.a=0?a.Lh(j,false,true):Qvd(a,c,false),61));n:for(f=l.Kc();f.Ob();){e=RD(f.Pb(),58);for(k=0;k