diff --git a/deepseek/lib/python3.10/site-packages/lmformatenforcer/__init__.py b/deepseek/lib/python3.10/site-packages/lmformatenforcer/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..410d5142dcde94e27b16d2f7783a8e3bf0fb51f3 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/lmformatenforcer/__init__.py @@ -0,0 +1,23 @@ +__all__ = ['CharacterLevelParser', + 'CharacterLevelParserConfig', + 'StringParser', + 'RegexParser', + 'UnionParser', + 'SequenceParser', + 'JsonSchemaParser', + 'TokenEnforcer', + 'TokenEnforcerTokenizerData', + 'LMFormatEnforcerException', + 'FormatEnforcerAnalyzer',] + +from .characterlevelparser import CharacterLevelParser, CharacterLevelParserConfig, StringParser, UnionParser, SequenceParser +from .regexparser import RegexParser +from .jsonschemaparser import JsonSchemaParser +from .tokenenforcer import TokenEnforcer, TokenEnforcerTokenizerData +from .exceptions import LMFormatEnforcerException +try: + from .analyzer import FormatEnforcerAnalyzer +except ImportError as e: + import logging + logging.warning(e) + FormatEnforcerAnalyzer = None diff --git a/deepseek/lib/python3.10/site-packages/lmformatenforcer/consts.py b/deepseek/lib/python3.10/site-packages/lmformatenforcer/consts.py new file mode 100644 index 0000000000000000000000000000000000000000..620ad737b734a92c6a6a67cbae4a5dacad02fbbf --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/lmformatenforcer/consts.py @@ -0,0 +1,20 @@ +COMPLETE_ALPHABET = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ!@#$%^&*()_+-=[]{};:,./<>? `'\"" +DEFAULT_MAX_CONSECUTIVE_WHITESPACES = 12 +DEFAULT_FORCE_JSON_FIELD_ORDER = False +DEFAULT_MAX_JSON_ARRAY_LENGTH = 20 +WHITESPACE_CHARACTERS = " \t\n\r" +BACKSLASH = "\\" +BACKSLASH_ESCAPING_CHARACTERS = '"\\/bfnrt' # Characters allowed after an escaping backslash, except unicode +BACKSLACH_UNICODE_ESCAPE = "u" + +CONFIG_ENV_VAR_MAX_CONSECUTIVE_WHITESPACES = 'LMFE_MAX_CONSECUTIVE_WHITESPACES' +"""Environment variable for externally controlling how many consective whitespaces the +JsonSchemaParser will allow. Default: 12""" + +CONFIG_ENV_VAR_STRICT_JSON_FIELD_ORDER = 'LMFE_STRICT_JSON_FIELD_ORDER' +"""Environment variable for externally controlling whether the JsonSchemaParser will force +fields to appear in the order of the 'required' field in the schema. Default: false""" + +CONFIG_ENV_VAR_MAX_JSON_ARRAY_LENGTH = 'LMFE_MAX_JSON_ARRAY_LENGTH' +"""Environment variable for externally controlling what is the maximal JSON array length, +if not specified by the schema. Default: 20""" diff --git a/deepseek/lib/python3.10/site-packages/lmformatenforcer/exceptions.py b/deepseek/lib/python3.10/site-packages/lmformatenforcer/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..c90e28fc10575f15eb3a38121963593595f1cb6e --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/lmformatenforcer/exceptions.py @@ -0,0 +1,3 @@ +class LMFormatEnforcerException(Exception): + """Base class for exceptions in this module.""" + pass \ No newline at end of file diff --git a/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/__pycache__/__init__.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d000080863a5ff8d3fdd9a80974de1c89ecc18b0 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/__pycache__/__init__.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/__pycache__/exllamav2.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/__pycache__/exllamav2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..995517b9c4e2737c5fb7af09debc92b15645b9d8 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/__pycache__/exllamav2.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/__pycache__/haystackv1.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/__pycache__/haystackv1.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ca54798505073e72114d9fd3f95082a319f9ac1f Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/__pycache__/haystackv1.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/__pycache__/haystackv2.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/__pycache__/haystackv2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..34027a024efa809486a85f267dab23f937dda1e2 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/__pycache__/haystackv2.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/__pycache__/trtllm.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/__pycache__/trtllm.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..222cf805dd482b0422faa4f36c702016d31929a5 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/__pycache__/trtllm.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/__pycache__/vllm.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/__pycache__/vllm.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..714112d7651ff1d9b345e4863a11ea1d2bf189a9 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/__pycache__/vllm.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/haystackv2.py b/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/haystackv2.py new file mode 100644 index 0000000000000000000000000000000000000000..7885c6f5c6228c183ead20329e9848694da4ded3 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/haystackv2.py @@ -0,0 +1,74 @@ +try: + from haystack import component + from canals import Component +except ImportError: + raise ImportError('haystack is not installed. Please install it with "pip install farm-haystack" or "pip install haystack-ai') + +import enum +from typing import Any, Callable, Dict, List, Optional +from lmformatenforcer import CharacterLevelParser + + +class _ModelType(enum.Enum): + HUGGINGFACE = 'HuggingFaceLocalGenerator' + # VLLM = 'vLLMLocalInvocationLayer' TODO: Add this when vLLM has Haystack V2 support + +@component +class LMFormatEnforcerLocalGenerator: + """A generator component for Haystack V2 API that activates the LMFormatEnforcer on the generated text. + It wraps a local generator, and should be added to the pipeline instead of it""" + def __init__(self, model_component: Component, character_level_parser: Optional[CharacterLevelParser] = None): + """Initialize the generator component + :param model_component: A local generator component to wrap + :param character_level_parser: A CharacterLevelParser that will be used to enforce the format of the generated""" + self.model_component = model_component + self.character_level_parser = character_level_parser + self._model_type = self._resolve_model_type() + self.token_enforcer_fn: Optional[Callable] = None + + @component.output_types(replies=List[str]) + def run(self, prompt: str, generation_kwargs: Optional[Dict[str, Any]] = None): + try: + self._inject_enforcer_into_model() + kwargs = {} + if generation_kwargs: + kwargs['generation_kwargs'] = generation_kwargs + return self.model_component.run(prompt, **kwargs) + finally: + self._release_model_injection() + + def warm_up(self): + if hasattr(self.model_component, 'warm_up'): + self.model_component.warm_up() + self.token_enforcer_fn = self._prepare_token_enforcer_fn() + + def _prepare_token_enforcer_fn(self) -> Optional[Callable]: + if not self.character_level_parser: + return None + if self._model_type == _ModelType.HUGGINGFACE: + tokenizer = self.model_component.pipeline.tokenizer + from lmformatenforcer.integrations.transformers import build_transformers_prefix_allowed_tokens_fn + return build_transformers_prefix_allowed_tokens_fn(tokenizer, self.character_level_parser) + raise NotImplementedError(f"Token enforcer not implemented for model type {self._model_type.name}") + + def _resolve_model_type(self) -> _ModelType: + generator_component_name = self.model_component.__class__.__name__ + try: + return _ModelType(generator_component_name) + except ValueError: + supported_strings = ",".join(str(t.name) for t in _ModelType) + raise ValueError(f"Unsupported local generator component layer: {generator_component_name}. " + f"Must be one of {supported_strings}") + + def _inject_enforcer_into_model(self): + if not self.token_enforcer_fn: + return + if self._model_type == _ModelType.HUGGINGFACE: + self.model_component.generation_kwargs['prefix_allowed_tokens_fn'] = self.token_enforcer_fn + + + def _release_model_injection(self): + if not self.token_enforcer_fn: + return + if self._model_type == _ModelType.HUGGINGFACE: + del self.model_component.generation_kwargs['prefix_allowed_tokens_fn'] diff --git a/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/trtllm.py b/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/trtllm.py new file mode 100644 index 0000000000000000000000000000000000000000..9d1fca7050bdda23c90dfdfc348b7754dfba5bf2 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/trtllm.py @@ -0,0 +1,84 @@ +import math +from typing import List, Optional, Tuple, Union +import torch +from transformers import PreTrainedTokenizerBase +from lmformatenforcer import CharacterLevelParser, FormatEnforcerAnalyzer +from lmformatenforcer.tokenenforcer import TokenEnforcer, TokenEnforcerTokenizerData + + +class TRTLLMLogitsProcessor: + def __init__(self, token_enforcer: TokenEnforcer, eos_token_id, analyze): + self.token_enforcer = token_enforcer + self.analyzer = FormatEnforcerAnalyzer(token_enforcer) if analyze else None + self.mask: Optional[torch.Tensor] = None + self.mask_val = -math.inf + self.eos_token_id = eos_token_id + + def _trim(self, input): + return [x for x in input.tolist() if x not in \ + (self.eos_token_id if isinstance(self.eos_token_id, list) else [self.eos_token_id])] + + def __call__(self, step: int, batch_input_ids: List[List[int]], logits: torch.Tensor) -> torch.Tensor: + for idx in range(len(batch_input_ids)): + if self.analyzer: + self.analyzer.report_raw_logits(batch_input_ids[idx], logits[idx].tolist()) + + allowed_tokens = self.token_enforcer.get_allowed_tokens(self._trim(batch_input_ids[idx])) + + if self.mask is not None: + self.mask.fill_(self.mask_val) + else: + # We create it here because full_like() also copies the device and dtype + self.mask = torch.full_like(logits[idx], self.mask_val) + self.mask[allowed_tokens] = 0 + logits[idx] = logits[idx] + self.mask + + return logits + + +def _build_regular_tokens_list(tokenizer) -> List[Tuple[int, str, bool]]: + # There are many classes that can be passed here, this logic should work on all of them. + if hasattr(tokenizer, 'get_tokenizer'): + tokenizer = tokenizer.get_tokenizer() + if hasattr(tokenizer, 'tokenizer'): + tokenizer = tokenizer.tokenizer + token_0 = [tokenizer.encode("0")[-1]] + regular_tokens = [] + vocab_size = tokenizer.vocab_size + for token_idx in range(vocab_size): + if token_idx in tokenizer.all_special_ids: + continue + # We prepend token 0 and skip the first letter of the result to get a space if the token is a start word. + tensor_after_0 = torch.tensor(token_0 + [token_idx], dtype=torch.long) + decoded_after_0 = tokenizer.decode(tensor_after_0)[1:] + decoded_regular = tokenizer.decode(token_0) + is_word_start_token = len(decoded_after_0) > len(decoded_regular) + regular_tokens.append((token_idx, decoded_after_0, is_word_start_token)) + return regular_tokens + + +def build_trtlmm_tokenizer_data(tokenizer: PreTrainedTokenizerBase) -> TokenEnforcerTokenizerData: + """Build the TokenEnforcerTokenizerData from a tokenizer in order to cache it between instances""" + regular_tokens = _build_regular_tokens_list(tokenizer) + + def _decode(tokens: List[int]) -> str: + tensor = torch.tensor(tokens, dtype=torch.long) + return tokenizer.decode(tensor) + + tokenizer_data = TokenEnforcerTokenizerData(regular_tokens, _decode, tokenizer.eos_token_id) + return tokenizer_data + + +def build_trtllm_logits_processor(tokenizer: Union[PreTrainedTokenizerBase, TokenEnforcerTokenizerData], + character_level_parser: CharacterLevelParser, + analyze: bool = False) -> TRTLLMLogitsProcessor: + """ + Build logits processor for feeding it into generate function (use_py_session should be True) + """ + if isinstance(tokenizer, TokenEnforcerTokenizerData): + tokenizer_data = tokenizer + else: + tokenizer_data = build_trtlmm_tokenizer_data(tokenizer) + + token_enforcer = TokenEnforcer(tokenizer_data, character_level_parser) + return TRTLLMLogitsProcessor(token_enforcer, tokenizer.eos_token_id, analyze) diff --git a/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/vllm.py b/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/vllm.py new file mode 100644 index 0000000000000000000000000000000000000000..94f6c2c6fcd65f7e99949c8362392ea47296dd95 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/vllm.py @@ -0,0 +1,60 @@ +try: + import torch + import vllm + from vllm.transformers_utils.tokenizer import MistralTokenizer + from transformers import PreTrainedTokenizerBase +except ImportError: + raise ImportError('vllm is not installed. Please install it with "pip install vllm"') +from lmformatenforcer import CharacterLevelParser, TokenEnforcer, FormatEnforcerAnalyzer, TokenEnforcerTokenizerData +from lmformatenforcer.integrations.transformers import build_token_enforcer_tokenizer_data +from typing import List, Optional, Union +import math + + +class VLLMLogitsProcessor: + def __init__(self, token_enforcer: TokenEnforcer, analyze): + self.token_enforcer = token_enforcer + self.analyzer = FormatEnforcerAnalyzer(token_enforcer) if analyze else None + self.mask: Optional[torch.Tensor] = None + + def __call__(self, input_ids: List[int], scores: torch.Tensor) -> torch.Tensor: + token_sequence = input_ids + if self.analyzer: + self.analyzer.report_raw_logits(token_sequence, scores.tolist()) + allowed_tokens = self.token_enforcer.get_allowed_tokens(token_sequence) + if self.mask is not None: + self.mask.fill_(-math.inf) + else: + # We create it here because full_like() also copies the device and dtype + self.mask = torch.full_like(scores, -math.inf) + self.mask[allowed_tokens] = 0 + scores = scores + self.mask + return scores + + +def build_vllm_token_enforcer_tokenizer_data(tokenizer: Union[vllm.LLM, PreTrainedTokenizerBase]) -> TokenEnforcerTokenizerData: + # There are many classes that can be passed here, this logic should work on all of them. + vocab_size = None + if hasattr(tokenizer, 'llm_engine'): + vocab_size = tokenizer.llm_engine.get_model_config().get_vocab_size() + if hasattr(tokenizer, 'get_tokenizer'): + tokenizer = tokenizer.get_tokenizer() + if isinstance(tokenizer, MistralTokenizer): + return build_token_enforcer_tokenizer_data(tokenizer, vocab_size) + if hasattr(tokenizer, 'tokenizer'): + tokenizer = tokenizer.tokenizer + return build_token_enforcer_tokenizer_data(tokenizer, vocab_size) + + +def build_vllm_logits_processor(llm: Union[vllm.LLM, PreTrainedTokenizerBase, TokenEnforcerTokenizerData], + character_level_parser: CharacterLevelParser, + analyze: bool=False) -> VLLMLogitsProcessor: + """Build the logits processor function that llama.cpp will use to filter the tokens generated by the model. The result + can be passed in the logits_processor list that is sent to the call or generate() method of llama.cpp models.""" + if not isinstance(llm, TokenEnforcerTokenizerData): + llm = build_vllm_token_enforcer_tokenizer_data(llm) + token_enforcer = TokenEnforcer(llm, character_level_parser) + return VLLMLogitsProcessor(token_enforcer, analyze) + + +__all__ = ['build_vllm_logits_processor', 'build_vllm_token_enforcer_tokenizer_data'] diff --git a/deepseek/lib/python3.10/site-packages/lmformatenforcer/jsonschemaparser.py b/deepseek/lib/python3.10/site-packages/lmformatenforcer/jsonschemaparser.py new file mode 100644 index 0000000000000000000000000000000000000000..d5d67a947b0ac5a65ce86fcdadb897f3ddbf1707 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/lmformatenforcer/jsonschemaparser.py @@ -0,0 +1,710 @@ +from copy import deepcopy +import enum +import sys +from typing import Dict, Hashable, List, Optional, Union, cast + + +from .external.jsonschemaobject import JsonSchemaObject, json_schema_data_formats +from .exceptions import LMFormatEnforcerException +from .characterlevelparser import CharacterLevelParser, CharacterLevelParserConfig, ForceStopParser, SequenceParser, StringParser, UnionParser +from .consts import BACKSLASH, BACKSLASH_ESCAPING_CHARACTERS, WHITESPACE_CHARACTERS +from .regexparser import RegexParser + +# No need to include the 'integer' option in the anyOf, as it is a subset of 'number' +_ANY_JSON_SCHEMA_DICT = {'anyOf': [{'type': type} for type in json_schema_data_formats.keys() if type != 'integer']} + +class JsonSchemaParser(CharacterLevelParser): + ANY_JSON_OBJECT_SCHEMA: JsonSchemaObject = JsonSchemaObject(**_ANY_JSON_SCHEMA_DICT) + class _Context: + model_class: JsonSchemaObject + # We store the active parser in the context, so that if a node adds to the stack, it knows + # to which parser's stack to add. + active_parser: "JsonSchemaParser" + alphabet_without_quotes: str + regex_parser_cache: Dict[str, RegexParser] = {} + + object_stack: List[CharacterLevelParser] + context: _Context + num_consecutive_whitespaces: int + last_parsed_string: str # Slight hack to allow communicating the parsed key to the object parser + last_non_whitespace_character: str # Slight hack to allow list parser to know if there is an item on top + + def __init__(self, + json_schema: Union[dict, _Context, None], + config: Optional[CharacterLevelParserConfig] = None, + existing_stack: Optional[List[CharacterLevelParser]] = None, + num_consecutive_whitespaces: int = 0): + """Create a CharacterLevelParser for parsing JSON. + :param json_schema: The json schema to parse. Can be a dict of a JSON schema, or None if any json output is allowed.""" + super().__init__(config) + if isinstance(json_schema, JsonSchemaParser._Context): + self.context = json_schema + else: + self.context = JsonSchemaParser._Context() + json_schema = json_schema or _ANY_JSON_SCHEMA_DICT + self.context.model_class = JsonSchemaObject(**json_schema) + self.context.active_parser = self + self.context.alphabet_without_quotes = self.config.alphabet.replace('"', '') + + self.num_consecutive_whitespaces = num_consecutive_whitespaces + if existing_stack is None: + self.object_stack = [get_parser(self, self.context.model_class)] + else: + self.object_stack = existing_stack + self.last_parsed_string = "" + self.last_non_whitespace_character = "" + + def add_character(self, new_character: str) -> CharacterLevelParser: + self.context.active_parser = self + # Assumption: The top-most parser that can accept the character is the one that should accept it. + # This is different from the SequenceParser, in which we need to split (union) into all options. + receiving_idx = len(self.object_stack) - 1 + last_parsed_string = self.last_parsed_string + while receiving_idx >= 0 and new_character not in self.object_stack[receiving_idx].get_allowed_characters(): + finished_receiver = self.object_stack[receiving_idx] + if isinstance(finished_receiver, StringParsingState): + last_parsed_string = finished_receiver.parsed_string + receiving_idx -= 1 + + updated_stack = self.object_stack[:receiving_idx + 1] + updated_parser = JsonSchemaParser(self.context, self.config, updated_stack, self.num_consecutive_whitespaces) + updated_parser.context.active_parser = updated_parser + updated_parser.last_parsed_string = last_parsed_string + if receiving_idx >= 0: + updated_parser.object_stack[receiving_idx] = updated_parser.object_stack[receiving_idx].add_character(new_character) + if new_character in WHITESPACE_CHARACTERS: + updated_parser.num_consecutive_whitespaces += 1 + updated_parser.last_non_whitespace_character = self.last_non_whitespace_character + else: + updated_parser.num_consecutive_whitespaces = 0 + updated_parser.last_non_whitespace_character = new_character + + if updated_parser.object_stack and isinstance(updated_parser.object_stack[-1], UnionParser) and \ + any(isinstance(parser, (ObjectParsingState, ListParsingState)) for parser in updated_parser.object_stack[-1].parsers): + # If the top parser is a union parser with "advanced" (=parsers that modify the object stack) parsers inside, + # we need to split the top level parser into the different options, + # As each "fork" can live with a different object stack, and we need to make sure they have their own ones. + option_json_schema_parsers = [] + for option_parser in updated_parser.object_stack[-1].parsers: + option_stack = updated_parser.object_stack[:-1] + [option_parser] + option_parser = JsonSchemaParser(self.context, self.config, option_stack, updated_parser.num_consecutive_whitespaces) + option_parser.context.active_parser = option_parser + option_parser.last_parsed_string = last_parsed_string + option_parser.last_non_whitespace_character = updated_parser.last_non_whitespace_character + option_json_schema_parsers.append(option_parser) + return UnionParser(option_json_schema_parsers) + + # For some performance optimizations to work, we want to make sure we don't leave irrelevant + # objects at the top of the stack, which we know will be passed over next timestep + new_object_stack = updated_parser.object_stack + while new_object_stack and new_object_stack[-1].can_end() and new_object_stack[-1].get_allowed_characters() == '': + finished_receiver = new_object_stack[-1] + if isinstance(finished_receiver, StringParsingState): + updated_parser.last_parsed_string = finished_receiver.parsed_string + del new_object_stack[-1] + if new_object_stack: + new_top_parser = new_object_stack[-1] + if isinstance(new_top_parser, ListParsingState): + new_top_parser = new_top_parser._clone() + new_top_parser.num_items_seen += 1 + new_object_stack[-1] = new_top_parser + + + return updated_parser + + def get_allowed_characters(self) -> str: + self.context.active_parser = self + + allowed_character_strs = [] + for parser in reversed(self.object_stack): + # Similar to SequenceParser, if the top object can end, we need to know to accept the next character of parser below, etc. + allowed_character_strs.append(parser.get_allowed_characters()) + if not parser.can_end(): + break + if len(allowed_character_strs) > 0: + allowed_characters = "".join(allowed_character_strs) + else: + # In certain cases, beam search / sample crashes when there are less legal + # continuation tokens than there are beams. Therefore, we allow whitespace + # characters when the object stack is empty (= we are done parsing) + allowed_characters = WHITESPACE_CHARACTERS + + if self.num_consecutive_whitespaces >= self.config.max_consecutive_whitespaces: + # print("Filtering whitespace characters") + allowed_characters = "".join(c for c in allowed_characters if c not in WHITESPACE_CHARACTERS) + return allowed_characters + + def can_end(self) -> bool: + return all(parser.can_end() for parser in self.object_stack) + + def shortcut_key(self) -> Optional[Hashable]: + if self.object_stack: + current_parser = self.object_stack[-1] + if isinstance(current_parser, StringParsingState): + if not current_parser.allowed_strings and current_parser.seen_opening_quote and not current_parser.seen_closing_quote and not current_parser.regex_parser: + # Performance optimization: When we are parsing a string that is not from a list of allowed strings, most tokens + # are legal. The exploration can be more costly than the LM itself for large tokenizers (because this is pure python), + # so we signal that we are in a "freetext" mode, and reuse the allowed token list throughout the run. + cur_len = len(current_parser.parsed_string) + min_len = current_parser.min_length or 0 + max_len = current_parser.max_length or sys.maxsize + assert min_len <= max_len, "Invalid schema for str: min length is larger than max length" + if cur_len < max_len: + return ('json_freetext', cur_len, min_len, max_len) + return None + + +class BaseParsingState(CharacterLevelParser): + def __init__(self, root: JsonSchemaParser): + self.root = root + + +def _merge_object_schemas(base_schema: JsonSchemaObject, option_schema: JsonSchemaObject) -> JsonSchemaObject: + base_schema_properties = base_schema.properties or {} + for property_name, property_value in base_schema_properties.items(): + # We assume that if a property exists in both base and option, the option version will be + # more specific, therefore we only take missing entries + if property_name not in option_schema.properties: + option_schema.properties[property_name] = property_value + for required_property in base_schema.required: + if required_property not in option_schema.required: + option_schema.required.append(required_property) + return option_schema + + +def get_parser( + parsing_state: JsonSchemaParser, + value_schema: JsonSchemaObject +) -> CharacterLevelParser: + if value_schema is None: + raise Exception("JsonSchemaParser: Value schema is None") + if value_schema.anyOf: + parsers = [get_parser(parsing_state, schema) for schema in value_schema.anyOf] + return UnionParser(parsers) + if value_schema.allOf: + merged_schema = value_schema.allOf[0] + for schema in value_schema.allOf[1:]: + merged_schema = _merge_object_schemas(merged_schema, schema) + return get_parser(parsing_state, merged_schema) + if value_schema.extras and 'const' in value_schema.extras: + allowed_value = value_schema.extras['const'] + is_string = type(allowed_value) == str + return StringParsingState(parsing_state, + [allowed_value], + require_opening_quote=is_string, + require_closing_quote=is_string) + if value_schema.type == "string": + return StringParsingState( + parsing_state, + value_schema.enum, + require_opening_quote=True, + min_length=value_schema.minLength, + max_length=value_schema.maxLength, + pattern=value_schema.pattern, + ) + if value_schema.oneOf: + # We create a combined object schema for each option that includes the information from the parent + # And then create a UnionParser based on the combined options + merged_schemas = [_merge_object_schemas(value_schema, option_schema) for option_schema in value_schema.oneOf] + object_parsing_options = [ObjectParsingState(merged_schema, parsing_state) for merged_schema in merged_schemas] + return UnionParser(object_parsing_options) + elif value_schema.type == "object": + return ObjectParsingState(value_schema, parsing_state) + elif value_schema.type == None and value_schema.ref: + value_class_name = value_schema.ref.split('/')[-1] + extras = parsing_state.context.model_class.extras + # Pydantic V1 and V2 have different names for the definitions field + if 'definitions' in extras: + definitions = extras['definitions'] + elif '$defs' in extras: + definitions = extras['$defs'] + else: + raise ValueError("No definitions found in schema") + class_dict = definitions[value_class_name] + value_schema = JsonSchemaObject(**class_dict) + return get_parser(parsing_state, value_schema) + elif value_schema.enum: + is_numeric = all(isinstance(i, (int, float)) for i in value_schema.enum) + is_string = all(isinstance(i, (str)) for i in value_schema.enum) + if is_string: + return StringParsingState( + parsing_state, + value_schema.enum, + require_opening_quote=True, + ) + elif is_numeric: + return StringParsingState( + parsing_state, + [str(i) for i in value_schema.enum], + require_opening_quote=False, + require_closing_quote=False, + ) + else: + raise Exception("Unsupported enum type " + str(value_schema.enum)) + elif value_schema.type == "integer": + return NumberParsingState(parsing_state, False) + elif value_schema.type == "boolean": + return StringParsingState( + parsing_state, + ["true", "false"], + require_opening_quote=False, + require_closing_quote=False, + ) + elif value_schema.type == "null": + return StringParsingState( + parsing_state, + ["null"], + require_opening_quote=False, + require_closing_quote=False, + ) + elif value_schema.type == "number": + return NumberParsingState(parsing_state, True) + elif value_schema.type == "array": + item_schema = value_schema.items or JsonSchemaParser.ANY_JSON_OBJECT_SCHEMA + return ListParsingState(parsing_state, item_schema, value_schema.minItems, value_schema.maxItems) + else: + raise Exception("Unsupported type " + str(value_schema.type)) + + +class ObjectParsingStage(enum.Enum): + START_OBJECT = "StartObject" + PARSING_KEY_OR_END = "ParsingKey" + PARSING_KEY_VALUE_SEPARATOR = "ParsingKeyValueSeparator" + PARSING_VALUE = "ParsingValue" + PARSING_SEPARATOR_OR_END = "ParsingSeparatorOrEnd" + END_OBJECT = "EndObject" + + +class ObjectParsingState(BaseParsingState): + schema_object: JsonSchemaObject + current_stage: ObjectParsingStage + existing_keys: List[str] + current_key: Optional[str] + is_dictionary: bool + + def __init__(self, schema_object: JsonSchemaObject, root: JsonSchemaParser): + super().__init__(root) + self.schema_object = schema_object + self.current_stage = ObjectParsingStage.START_OBJECT + self.root = root + self.existing_keys = [] + self.current_key = None + # Javascript objects represent both classes and dictionaries, so we need to know which one we are parsing + self.is_dictionary = self.schema_object.properties is None + + def clone(self) -> 'ObjectParsingState': + clone = ObjectParsingState(self.schema_object, self.root) + clone.current_stage = self.current_stage + clone.existing_keys = self.existing_keys[:] + clone.current_key = self.current_key + clone.is_dictionary = self.is_dictionary + return clone + + def add_character(self, new_character: str) -> CharacterLevelParser: + if new_character.strip() == "": + # In object scope, whitespaces can be ignored + return self + self = self.clone() # Immutability requirement + if ( + self.current_stage == ObjectParsingStage.START_OBJECT + and new_character == "{" + ): + self.current_stage = ObjectParsingStage.PARSING_KEY_OR_END + elif self.current_stage == ObjectParsingStage.PARSING_KEY_OR_END: + if new_character == "}": + self.current_stage = ObjectParsingStage.END_OBJECT + if new_character == '"': + possible_keys = None + if not self.is_dictionary: + required_keys = self.schema_object.required or [] + next_required_key = next((key for key in required_keys if key not in self.existing_keys), None) + if self.root.config.force_json_field_order and next_required_key: + possible_keys = [next_required_key] + else: + possible_keys = list(self.schema_object.properties.keys()) + possible_keys = list( + set(possible_keys).difference(self.existing_keys) + ) + # We send require_opening_quote=True and then add_character('"') instead of require_opening_quote=False + # Because there is a difference between "don't need a quote" and "received it before creating the parser" + key_parser = StringParsingState( + self.root, possible_keys, require_opening_quote=True, require_closing_quote=True + ) + key_parser = key_parser.add_character('"') + self.root.context.active_parser.object_stack.append(key_parser) + self.current_stage = ObjectParsingStage.PARSING_KEY_VALUE_SEPARATOR + elif self.current_stage == ObjectParsingStage.PARSING_KEY_VALUE_SEPARATOR: + if new_character == ":": + self.current_stage = ObjectParsingStage.PARSING_VALUE + self.current_key = self.root.context.active_parser.last_parsed_string + self.existing_keys.append(self.current_key) + if self.is_dictionary: + if self.schema_object.additionalProperties: + value_schema = self.schema_object.additionalProperties + else: + value_schema = JsonSchemaParser.ANY_JSON_OBJECT_SCHEMA + else: + value_schema = self.schema_object.properties[self.current_key] + self.current_key_parser = get_parser( + self.root, value_schema + ) + self.root.context.active_parser.object_stack.append(self.current_key_parser) + self.current_key_parser = None + elif self.current_stage == ObjectParsingStage.PARSING_VALUE: + # If we recieve a character during parsing value, it means that its the finishing character + # of the value parser + if new_character == '"': + self.current_stage = ObjectParsingStage.PARSING_SEPARATOR_OR_END + elif new_character == ",": + self.current_stage = ObjectParsingStage.PARSING_KEY_OR_END + elif new_character == "}": + self.current_stage = ObjectParsingStage.END_OBJECT + elif self.current_stage == ObjectParsingStage.PARSING_SEPARATOR_OR_END: + if new_character == ",": + self.current_stage = ObjectParsingStage.PARSING_KEY_OR_END + elif new_character == "}": + self.current_stage = ObjectParsingStage.END_OBJECT + return self + + def get_allowed_characters(self) -> str: + possible_keys = ( + list(self.schema_object.properties.keys()) + if not self.is_dictionary + else None + ) + required_keys = self.schema_object.required or [] + can_end = set(self.existing_keys).issuperset(required_keys) + can_parse_key = self.is_dictionary or set(possible_keys).difference( + self.existing_keys + ) + + possible_characters = [c for c in WHITESPACE_CHARACTERS] + if self.current_stage == ObjectParsingStage.START_OBJECT: + possible_characters.append('{') + elif self.current_stage == ObjectParsingStage.PARSING_KEY_OR_END: + if can_end: + possible_characters.append('}') + if can_parse_key: + possible_characters.append('"') + elif self.current_stage == ObjectParsingStage.PARSING_KEY_VALUE_SEPARATOR: + possible_characters.append(':') + elif self.current_stage == ObjectParsingStage.PARSING_VALUE: + # Sometimes the value parser considers finishing, so it needs to know which continuations are possible + if can_end: + possible_characters.append('}') + if can_parse_key: + possible_characters.append(',') + elif self.current_stage == ObjectParsingStage.PARSING_SEPARATOR_OR_END: + if can_end: + possible_characters.append('}') + if can_parse_key: + possible_characters.append(',') + return "".join(possible_characters) + + def can_end(self) -> bool: + return self.current_stage == ObjectParsingStage.END_OBJECT + + +class StringParsingStage: + START_TOKEN = "StartToken" + PARSING_STRING = "ParsingString" + END_TOKEN = "EndToken" + + +class PrimitiveParsingState(BaseParsingState): + def __init__(self, root: JsonSchemaParser): + super().__init__(root) + self.stage = StringParsingStage.START_TOKEN + self.parsed_string = "" + + def _clone(self) -> "PrimitiveParsingState": + raise NotImplementedError() + + def add_character(self, new_character: str) -> "PrimitiveParsingState": + new = self._clone() + new.parsed_string += new_character + return new + + def can_end(self) -> bool: + return True + + +class NumberParsingState(PrimitiveParsingState): + def __init__( + self, + root: JsonSchemaParser, + allow_floating_point: bool, + ): + super().__init__(root) + self.allow_floating_point = allow_floating_point + self.seen_decimal_point = False + self.seen_whitespace_after_digits = False + self.seen_exponent = False + self.seen_digit = False + + def _clone(self) -> "NumberParsingState": + clone = NumberParsingState(self.root, self.allow_floating_point) + clone.parsed_string = self.parsed_string + clone.seen_decimal_point = self.seen_decimal_point + clone.seen_whitespace_after_digits = self.seen_whitespace_after_digits + clone.seen_exponent = self.seen_exponent + clone.seen_digit = self.seen_digit + return clone + + def add_character(self, new_character: str) -> CharacterLevelParser: + if not self.parsed_string and new_character in WHITESPACE_CHARACTERS: + return self + self = cast(NumberParsingState, super().add_character(new_character)) + if new_character in WHITESPACE_CHARACTERS: + if self.parsed_string: + self.seen_whitespace_after_digits = True + return self + if new_character == ".": + if not self.parsed_string or len(self.parsed_string) == 1: + raise LMFormatEnforcerException("Numbers cannot start with a decimal point.") + if self.seen_decimal_point: + raise LMFormatEnforcerException("Numbers cannot contain more than two decimal points.") + self.seen_decimal_point = True + elif new_character in "eE": + if self.seen_exponent or not self.seen_digit: + raise LMFormatEnforcerException("Invalid number format") + self.seen_exponent = True + elif new_character.isdigit(): + self.seen_digit = True + return self + + def get_allowed_characters(self) -> str: + if self.seen_whitespace_after_digits: + return WHITESPACE_CHARACTERS + allowed_characters = "0123456789" + if not self.parsed_string: + allowed_characters += "-" + WHITESPACE_CHARACTERS + if self.parsed_string and len(self.parsed_string) == 1 and self.parsed_string[0] == "0": + allowed_characters = WHITESPACE_CHARACTERS + if self.parsed_string and len(self.parsed_string) == 2 and self.parsed_string == "-0": + allowed_characters = "." + WHITESPACE_CHARACTERS + if self.parsed_string and self.parsed_string[-1] in "eE": + allowed_characters += "-+" + if self.seen_digit and not self.seen_exponent: + allowed_characters += "eE" + if self.allow_floating_point and not self.seen_decimal_point and self.seen_digit and not self.seen_exponent: + allowed_characters += "." + if self.parsed_string and self.parsed_string[-1].isdigit(): + allowed_characters += WHITESPACE_CHARACTERS + return allowed_characters + + def can_end(self) -> bool: + if self.seen_exponent and self.parsed_string[-1] in "eE+-": + return False + return bool(self.parsed_string) and (self.parsed_string[-1].isdigit() or self.seen_whitespace_after_digits) + + +class StringParsingState(PrimitiveParsingState): + allowed_strings: List[str] + parsed_string: str + seen_closing_quote: bool + seen_opening_quote: bool + min_length: Optional[int] + max_length: Optional[int] + pattern: Optional[str] + regex_parser: Optional[RegexParser] + + def __init__( + self, + root: JsonSchemaParser, + allowed_strings: List[str], + require_opening_quote: bool, + require_closing_quote: bool = True, + min_length: Optional[int]=None, + max_length: Optional[int]=None, + pattern: Optional[str]=None, + regex_parser: Optional[RegexParser]=None, + ): + super().__init__(root) + self.allowed_strings = allowed_strings + self.seen_closing_quote = False + self.seen_opening_quote = not require_opening_quote + self.require_closing_quote = require_closing_quote + self.require_opening_quote = require_opening_quote + self.min_length = min_length + self.max_length = max_length + self.pattern = pattern + if self.pattern and (self.min_length or self.max_length): + raise LMFormatEnforcerException("String schema contains both a pattern and a min/max length, which is not currently supported") + self.regex_parser = regex_parser + if self.pattern and not regex_parser: + if self.pattern not in self.root.context.regex_parser_cache: + self.root.context.regex_parser_cache[self.pattern] = RegexParser(self.pattern, self.root.config) + self.regex_parser = self.root.context.regex_parser_cache[self.pattern] + + + def _clone(self) -> "StringParsingState": + clone = StringParsingState( + self.root, + self.allowed_strings, + self.require_opening_quote, + self.require_closing_quote, + self.min_length, + self.max_length, + self.pattern, + self.regex_parser + ) + clone.parsed_string = self.parsed_string + clone.seen_closing_quote = self.seen_closing_quote + clone.seen_opening_quote = self.seen_opening_quote + return clone + + def add_character(self, new_character: str): + if (not self.parsed_string or self.seen_closing_quote) and new_character in WHITESPACE_CHARACTERS: + return self + self = cast(StringParsingState, super().add_character(new_character)) + if new_character == '"': + if not self.seen_opening_quote: + self.seen_opening_quote = True + self.parsed_string = "" + else: + self.seen_closing_quote = True + self.parsed_string = self.parsed_string[:-1] + if self.regex_parser and new_character != '"' and self.seen_opening_quote and not self.seen_closing_quote: + self.regex_parser = self.regex_parser.add_character(new_character) + if new_character == BACKSLASH: + # After a backslack we immediately have the escaping character, and if its 'u', we have 4 hex digits + escaping_character_parsers: List[CharacterLevelParser] = [StringParser(c) for c in BACKSLASH_ESCAPING_CHARACTERS] + hex_digit_parser: CharacterLevelParser = UnionParser([StringParser(c) for c in "0123456789abcdefABCDEF"]) + unicode_components: List[CharacterLevelParser] = list([StringParser("u")] + [hex_digit_parser] * 4) + unicode_escape_parser: CharacterLevelParser = SequenceParser(unicode_components) + json_escaping_parser = UnionParser(escaping_character_parsers + [unicode_escape_parser]) + self.root.context.active_parser.object_stack.append(json_escaping_parser) + return self + + def get_allowed_characters(self) -> str: + if not self.seen_opening_quote: + return '"' + WHITESPACE_CHARACTERS + if self.seen_closing_quote: + return WHITESPACE_CHARACTERS + if self.regex_parser: + regex_chars = self.regex_parser.get_allowed_characters() + # We don't currently support regexes with quotes or escaping backslashes, so we remove them from the allowed characters + regex_chars = regex_chars.replace('"', '').replace(BACKSLASH, '') + if self.regex_parser.can_end(): + regex_chars += '"' + return regex_chars + if self.allowed_strings: + allowed_continuations = [ + s[len(self.parsed_string) :] + for s in self.allowed_strings + if s.startswith(self.parsed_string) + ] + allowed_next_characters = [allowed_continuation[0] for allowed_continuation in allowed_continuations if len(allowed_continuation) > 0] + allowed_next_characters = list(set(allowed_next_characters)) + if self.parsed_string in self.allowed_strings and self.require_closing_quote: + allowed_next_characters.append('"') + if (not self.parsed_string) and (not self.seen_opening_quote or not self.require_opening_quote): + allowed_next_characters.extend(WHITESPACE_CHARACTERS) + return "".join(allowed_next_characters) + else: + if self.min_length is not None and len(self.parsed_string) < self.min_length: + return self.root.context.alphabet_without_quotes + BACKSLASH + if self.max_length is not None and len(self.parsed_string) >= self.max_length: + return '"' + return self.root.config.alphabet + BACKSLASH + + def can_end(self) -> bool: + if self.require_closing_quote: + return self.seen_closing_quote + else: + if self.allowed_strings: + return self.parsed_string in self.allowed_strings + else: + return bool(self.parsed_string) + + +class ListParsingState(PrimitiveParsingState): + list_member_type: JsonSchemaObject + seen_list_opener: bool = False + seen_list_closer: bool = False + num_items_seen: int = 0 + + def __init__( + self, + root: JsonSchemaParser, + list_member_type: JsonSchemaObject, + min_items: Optional[int], + max_items: Optional[int], + ): + super().__init__(root) + self.list_member_type = list_member_type + self.min_items = min_items + self.max_items = max_items + default_max = root.config.max_json_array_length + if self.max_items is None and default_max > 0 and (min_items is None or min_items < default_max): + self.max_items = default_max + + def _clone(self) -> PrimitiveParsingState: + new = ListParsingState(self.root, self.list_member_type, self.min_items, self.max_items) + new.parsed_string = self.parsed_string + new.num_items_seen = self.num_items_seen + new.seen_list_opener = self.seen_list_opener + new.seen_list_closer = self.seen_list_closer + return new + + def add_character(self, new_character: str) -> "ListParsingState": + self = cast(ListParsingState, super().add_character(new_character)) + if new_character == "[": + self.seen_list_opener = True + item_parser = get_parser(self.root, self.list_member_type) + requires_items = self.min_items is not None and self.min_items > 0 + if requires_items: + parser_to_push = item_parser + else: + # If we don't require items, we can also end immediately, the Union + ForceStopParser combination achieves this + empty_list_parser = ForceStopParser(allow_whitespace=True) + if isinstance(item_parser, UnionParser): + item_parser.parsers.append(empty_list_parser) + parser_to_push = item_parser + else: + parser_to_push = UnionParser([item_parser, empty_list_parser]) + self.root.context.active_parser.object_stack.append(parser_to_push) + elif new_character == "]": + self.seen_list_closer = True + elif new_character == ",": + if not self.seen_list_closer: + self.num_items_seen += 1 + + self.root.context.active_parser.object_stack.append( + get_parser( + self.root, + self.list_member_type, + ) + ) + return self + + def get_allowed_characters(self) -> str: + if not self.seen_list_opener: + return "[" + WHITESPACE_CHARACTERS + elif not self.seen_list_closer: + return self.get_allowed_control_characters() + WHITESPACE_CHARACTERS + else: + return "" + + def can_end(self) -> bool: + return self.seen_list_closer + + def get_allowed_control_characters(self): + num_items = self.num_items_seen + top_parser = self.root.context.active_parser.object_stack[-1] + is_on_top = top_parser == self or isinstance(top_parser, UnionParser) and self in top_parser.parsers + if (not is_on_top) and self.root.context.active_parser.last_non_whitespace_character != "[": + # If there is an active parser above us, and the last character is not [, + # there is an active item parser on the stack that we did not count yet. + num_items += 1 + control_characters = "" + has_enough_items = self.min_items is None or num_items >= self.min_items + can_add_another_item = self.max_items is None or num_items < self.max_items + + if num_items > 0 and can_add_another_item: + control_characters += "," + if has_enough_items: + control_characters += "]" + return control_characters + diff --git a/deepseek/lib/python3.10/site-packages/lmformatenforcer/tokenenforcer.py b/deepseek/lib/python3.10/site-packages/lmformatenforcer/tokenenforcer.py new file mode 100644 index 0000000000000000000000000000000000000000..6b2534ed494ee517bf14cf2fe1b353dc76f23726 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/lmformatenforcer/tokenenforcer.py @@ -0,0 +1,166 @@ +from dataclasses import dataclass, field +import sys +from typing import Callable, Dict, Hashable, List, Optional, Tuple, Union +import logging + +from .exceptions import LMFormatEnforcerException +from .characterlevelparser import CharacterLevelParser, ForceStopParser, CharacterLevelParserConfig +from .tokenizerprefixtree import TokenizerPrefixTree, TokenizerPrefixTreeNode + + +class TokenEnforcerTokenizerData: + """TokenEnforcerTokenizerData contains all of the preprocessing for preparing the TokenEnforcer to work with a + specific tokenizer. It does some calculations, so it is recommended to reuse it for multiple TokenEnforcers""" + def __init__(self, + regular_tokens: List[Tuple[int, str, bool]], + decoder: Callable[[List[int]], str], + eos_token_id: Union[int, List[int]]): + """ + Create the tokenizer data that the TokenEnforcer needs. This can be reused for multiple TokenEnforcers if they work with the same tokenizer. + :param regular_tokens: A list of tuples (token_id, token_string, is_new_word_token) for all the regular (not special) tokens in the tokenizer vocabulary. + Note that token_string is expected to include leading / trailing whitespaces if relevant. + :param decoder: A function that decodes a list of token ids into a string. + :param eos_token_id: The token id(s) of the end-of-string token(s). + """ + self.regular_tokens = regular_tokens + self.tokenizer_tree = TokenizerPrefixTree(regular_tokens) + self.decoder = decoder + self.eos_token_id = eos_token_id + self.tokenizer_alphabet = "".join(token_str for token_str in self.tokenizer_tree.root.children.keys() if len(token_str) == 1) + + +class TokenEnforcer: + """TokenEnforcer provides a token filtering mechanism, given a CharacterLevelParser and some information about the tokenizer. + It is the main entry point for extending lm-format-enforcer to new inference libraries. See __init__() and get_allowed_tokens()""" + @dataclass + class OutputTensorState: + parser: CharacterLevelParser + allowed_tokens: List[int] = field(default_factory=list) + current_word_tokens: List[int] = field(default_factory=list) + + def __init__(self, tokenizer_data: TokenEnforcerTokenizerData, parser: CharacterLevelParser): + """ + Create a new TokenEnforcer. + :param tokenizer_data: Per tokenizer data that the token enforcer needs in order to operate. + :param parser: A CharacterLevelParser that defines the allowed strings. + """ + self.prefix_states: Dict[Tuple, TokenEnforcer.OutputTensorState] = {} + self.root_parser = parser + self.tokenizer_tree = tokenizer_data.tokenizer_tree + self.decoder = tokenizer_data.decoder + self.eos_token_id = tokenizer_data.eos_token_id + self.regular_tokens = tokenizer_data.regular_tokens + self.allowed_token_cache: Dict[Hashable, List[int]] = {} + + config = CharacterLevelParserConfig(alphabet=tokenizer_data.tokenizer_alphabet) + parser.config = config + + def get_allowed_tokens(self, token_sequence: List[int]) -> List[int]: + """ + Get a list of allowed tokens, given a list of tokens that were already generated. + :param token_sequence: The tokens that were already generated, and the next token will be generated for. + :return: A list of token ids that are allowed to be selected next. + """ + # In order to elegantly support beam search and batching, we don't store per-batch information. + # Instead, we store a hash of all the states (unique token tensors) we encountered so far. + # When we encounter a new unique token tensor, we find the token tensor that led to it, and continue from there. + sent_tuple = tuple(token_sequence) + prev_step_tuple = sent_tuple[:-1] + + if sent_tuple in self.prefix_states: + # We already calculated for this node, return cached list + return self.prefix_states[sent_tuple].allowed_tokens + elif prev_step_tuple not in self.prefix_states: + # We have not encountered the tensor up to the before-last entry. This means that this is the first call - the instruction / prompt tensor. + # Initialize the root node + state = TokenEnforcer.OutputTensorState(parser=self.root_parser) + self.prefix_states[sent_tuple] = state + self._compute_allowed_tokens(sent_tuple, state) + return state.allowed_tokens + else: + # Find the state that led to this node. We explicitly don't use the concept of "timestep" because of beam search + prev_step_state = self.prefix_states[prev_step_tuple] + new_state = self._apply_new_characters(prev_step_state, token_sequence) + self.prefix_states[sent_tuple] = new_state + self._compute_allowed_tokens(sent_tuple, new_state) + return new_state.allowed_tokens + + def _compute_allowed_tokens(self, state_tokens: Tuple, state: 'TokenEnforcer.OutputTensorState'): + try: + allowed_tokens: List[int] = [] + cache_key = state.parser.cache_key() + if cache_key is not None and cache_key in self.allowed_token_cache: + state.allowed_tokens = self.allowed_token_cache[cache_key] + return + shortcut_key = state.parser.shortcut_key() + self._collect_allowed_tokens(state.parser, self.tokenizer_tree.root, allowed_tokens, shortcut_key) + if state.parser.can_end(): + allowed_tokens.extend(self.eos_token_id if isinstance(self.eos_token_id, list) else [self.eos_token_id]) + if not allowed_tokens: + raise ValueError(f"Parser reached state with no allowed tokens") + # root_state = next(state for state in self.prefix_states.values() if state.parser == self.root_parser) + # print(f"Allowing {len(allowed_tokens)} tokens after {state.str_so_far[len(root_state.str_so_far):]}") + state.allowed_tokens = allowed_tokens + if cache_key is not None: + self.allowed_token_cache[cache_key] = allowed_tokens + except LMFormatEnforcerException: + # Getting an LMFormatEnforcerException means that we know what the user did wrong, + # and we can give a nice error message for them to fix. + raise + except Exception: + # Other exceptions are potential bugs and should be reported + logging.basicConfig(level=logging.ERROR) # Initialize if no loggers + prefix = self.decoder(list(state_tokens)) + logging.exception(f"Unknown LMFormatEnforcer Problem. Prefix: '{prefix}'\n" + "Terminating the parser. Please open an issue at \n" + "https://github.com/noamgat/lm-format-enforcer/issues with the prefix and " + "CharacterLevelParser parameters") + state.allowed_tokens = self.eos_token_id if isinstance(self.eos_token_id, list) else [self.eos_token_id] + + def _collect_allowed_tokens(self, parser: CharacterLevelParser, tree_node: TokenizerPrefixTreeNode, allowed_tokens: List[int], shortcut_key: Optional[Hashable]): + allowed_tokens.extend(tree_node.tokens) + allowed_characters = parser.get_allowed_characters() + relevant_characters = tree_node.children.keys() + # This next line is the heart of the traversal algorithm. We only explore paths that are shared by both the parser and the tokenizer. + characters_to_explore = set(relevant_characters).intersection(allowed_characters) + + # Performance optimization: If we are in JSON freetext, all of the tokens that don't contain quote, or end with quote, are legal, so we take + # their cached list. If the quote character is allowed, we only need to dynamically explore the cases where the string starts with a quote. + # This breaks the elegance of the API, but otherwise it is a huge performance hit. + if isinstance(shortcut_key, tuple) and shortcut_key[0] == 'json_freetext': + assert len(shortcut_key) == 4 + _, cur_len, min_len, max_len = shortcut_key + cache = self.tokenizer_tree.json_freetext_tokens + + min_remaining = min(cache.max_token_len, max(0, min_len - cur_len)) # no " allowed before this many chars + max_allowed_len = min(cache.max_token_len, max_len - cur_len) # max new characters allowed (before ") + + allowed_tokens.extend(cache.lookup_allowed_tokens(min_remaining, max_allowed_len)) + characters_to_explore = characters_to_explore.intersection(['"']) + + for character in characters_to_explore: + next_parser = parser.add_character(character) + next_tree_node = tree_node.children[character] + self._collect_allowed_tokens(next_parser, next_tree_node, allowed_tokens, None) + + def _apply_new_characters(self, state: 'TokenEnforcer.OutputTensorState', token_sequence: List[int]): + new_state = TokenEnforcer.OutputTensorState(parser=state.parser) + new_token = token_sequence[-1] + if new_token in self.tokenizer_tree.new_word_tokens: + new_state.current_word_tokens = [new_token] + new_characters = self.tokenizer_tree.tokens_to_strs[new_token] + else: + new_state.current_word_tokens = state.current_word_tokens + [new_token] + prev_decoded = self.decoder(state.current_word_tokens) + new_decoded = self.decoder(new_state.current_word_tokens) + new_characters = new_decoded[len(prev_decoded):] + for character in new_characters: + try: + new_state.parser = new_state.parser.add_character(character) + except Exception as e: + # This can happen in beam / batch scenarios, when some of the batches finished but others are continuing. + logging.debug(f"Received an invalid character '{character}', switching to ForceStopParser (Exception:{e})") + new_state.parser = ForceStopParser() + return new_state + + diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest1.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest1.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fb949af8bb9d83415bcee334a005cb2a6efecba8 Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest1.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest10.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest10.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..89308384049d9fcb63a08a77cc076b243cf51f9e Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest10.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest11.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest11.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cb16e2a1e6d871fd434be5aa720c0ea6c32aabd4 Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest11.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest12.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest12.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b458a167e69b893c2403dafb5dbe8bdb5b52377d Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest12.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest2.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5888c58dfee9bc99ff605978b4ee5be19e829aa0 Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest2.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest3.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest3.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8bd03fd883dd8981fcfb2276f6087cf55d704d7c Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest3.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest4.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest4.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a16681c5f47e144ab917228c865a3ce8f7f75945 Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest4.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest5.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest5.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..84da9498ae32e71f0d847396d71b9fdccd70994e Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest5.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest6.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest6.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..25c4634cfded652bf796874ae1607e7cf9009d3c Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest6.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest7.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest7.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ceec9a7d19b6a56e2359967b022798252787942d Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest7.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest8.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest8.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9cccfb44f389cae140fc5d2512d983dd0dc3a31a Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest8.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest9.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest9.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ae171cf5f2b257a9c257917f12b792bf4d8ec3e1 Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/__pycache__/ruletest9.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/__pycache__/chaos_pendulum.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/__pycache__/chaos_pendulum.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b599f5cd2d23c33c6e51c8456679dad6f093c372 Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/__pycache__/chaos_pendulum.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/__pycache__/double_pendulum.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/__pycache__/double_pendulum.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..765417d9fca8c1993433a29b62fa8401f9000642 Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/__pycache__/double_pendulum.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/__pycache__/mass_spring_damper.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/__pycache__/mass_spring_damper.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9d98a86bcbd0b924420bbcd883b8eda97f5ff54d Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/__pycache__/mass_spring_damper.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/__pycache__/non_min_pendulum.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/__pycache__/non_min_pendulum.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bfc17141f05ae8000dd64e3ade33a2f4e1faabca Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/__pycache__/non_min_pendulum.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/chaos_pendulum.al b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/chaos_pendulum.al new file mode 100644 index 0000000000000000000000000000000000000000..3bbb4d51b853bfd759df38d666a42adc1cbea190 --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/chaos_pendulum.al @@ -0,0 +1,33 @@ +CONSTANTS G,LB,W,H +MOTIONVARIABLES' THETA'',PHI'',OMEGA',ALPHA' +NEWTONIAN N +BODIES A,B +SIMPROT(N,A,2,THETA) +SIMPROT(A,B,3,PHI) +POINT O +LA = (LB-H/2)/2 +P_O_AO> = LA*A3> +P_O_BO> = LB*A3> +OMEGA = THETA' +ALPHA = PHI' +W_A_N> = OMEGA*N2> +W_B_A> = ALPHA*A3> +V_O_N> = 0> +V2PTS(N, A, O, AO) +V2PTS(N, A, O, BO) +MASS A=MA, B=MB +IAXX = 1/12*MA*(2*LA)^2 +IAYY = IAXX +IAZZ = 0 +IBXX = 1/12*MB*H^2 +IBYY = 1/12*MB*(W^2+H^2) +IBZZ = 1/12*MB*W^2 +INERTIA A, IAXX, IAYY, IAZZ +INERTIA B, IBXX, IBYY, IBZZ +GRAVITY(G*N3>) +ZERO = FR() + FRSTAR() +KANE() +INPUT LB=0.2,H=0.1,W=0.2,MA=0.01,MB=0.1,G=9.81 +INPUT THETA = 90 DEG, PHI = 0.5 DEG, OMEGA=0, ALPHA=0 +INPUT TFINAL=10, INTEGSTP=0.02 +CODE DYNAMICS() some_filename.c diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/chaos_pendulum.py b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/chaos_pendulum.py new file mode 100644 index 0000000000000000000000000000000000000000..4435635720bb38f40366f55bb3ace0f6f6899284 --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/chaos_pendulum.py @@ -0,0 +1,55 @@ +import sympy.physics.mechanics as _me +import sympy as _sm +import math as m +import numpy as _np + +g, lb, w, h = _sm.symbols('g lb w h', real=True) +theta, phi, omega, alpha = _me.dynamicsymbols('theta phi omega alpha') +theta_d, phi_d, omega_d, alpha_d = _me.dynamicsymbols('theta_ phi_ omega_ alpha_', 1) +theta_dd, phi_dd = _me.dynamicsymbols('theta_ phi_', 2) +frame_n = _me.ReferenceFrame('n') +body_a_cm = _me.Point('a_cm') +body_a_cm.set_vel(frame_n, 0) +body_a_f = _me.ReferenceFrame('a_f') +body_a = _me.RigidBody('a', body_a_cm, body_a_f, _sm.symbols('m'), (_me.outer(body_a_f.x,body_a_f.x),body_a_cm)) +body_b_cm = _me.Point('b_cm') +body_b_cm.set_vel(frame_n, 0) +body_b_f = _me.ReferenceFrame('b_f') +body_b = _me.RigidBody('b', body_b_cm, body_b_f, _sm.symbols('m'), (_me.outer(body_b_f.x,body_b_f.x),body_b_cm)) +body_a_f.orient(frame_n, 'Axis', [theta, frame_n.y]) +body_b_f.orient(body_a_f, 'Axis', [phi, body_a_f.z]) +point_o = _me.Point('o') +la = (lb-h/2)/2 +body_a_cm.set_pos(point_o, la*body_a_f.z) +body_b_cm.set_pos(point_o, lb*body_a_f.z) +body_a_f.set_ang_vel(frame_n, omega*frame_n.y) +body_b_f.set_ang_vel(body_a_f, alpha*body_a_f.z) +point_o.set_vel(frame_n, 0) +body_a_cm.v2pt_theory(point_o,frame_n,body_a_f) +body_b_cm.v2pt_theory(point_o,frame_n,body_a_f) +ma = _sm.symbols('ma') +body_a.mass = ma +mb = _sm.symbols('mb') +body_b.mass = mb +iaxx = 1/12*ma*(2*la)**2 +iayy = iaxx +iazz = 0 +ibxx = 1/12*mb*h**2 +ibyy = 1/12*mb*(w**2+h**2) +ibzz = 1/12*mb*w**2 +body_a.inertia = (_me.inertia(body_a_f, iaxx, iayy, iazz, 0, 0, 0), body_a_cm) +body_b.inertia = (_me.inertia(body_b_f, ibxx, ibyy, ibzz, 0, 0, 0), body_b_cm) +force_a = body_a.mass*(g*frame_n.z) +force_b = body_b.mass*(g*frame_n.z) +kd_eqs = [theta_d - omega, phi_d - alpha] +forceList = [(body_a.masscenter,body_a.mass*(g*frame_n.z)), (body_b.masscenter,body_b.mass*(g*frame_n.z))] +kane = _me.KanesMethod(frame_n, q_ind=[theta,phi], u_ind=[omega, alpha], kd_eqs = kd_eqs) +fr, frstar = kane.kanes_equations([body_a, body_b], forceList) +zero = fr+frstar +from pydy.system import System +sys = System(kane, constants = {g:9.81, lb:0.2, w:0.2, h:0.1, ma:0.01, mb:0.1}, +specifieds={}, +initial_conditions={theta:_np.deg2rad(90), phi:_np.deg2rad(0.5), omega:0, alpha:0}, +times = _np.linspace(0.0, 10, 10/0.02)) + +y=sys.integrate() diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/double_pendulum.al b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/double_pendulum.al new file mode 100644 index 0000000000000000000000000000000000000000..0b6d72a072e093a6cb048a0b7976041ee9c2f4f3 --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/double_pendulum.al @@ -0,0 +1,25 @@ +MOTIONVARIABLES' Q{2}', U{2}' +CONSTANTS L,M,G +NEWTONIAN N +FRAMES A,B +SIMPROT(N, A, 3, Q1) +SIMPROT(N, B, 3, Q2) +W_A_N>=U1*N3> +W_B_N>=U2*N3> +POINT O +PARTICLES P,R +P_O_P> = L*A1> +P_P_R> = L*B1> +V_O_N> = 0> +V2PTS(N, A, O, P) +V2PTS(N, B, P, R) +MASS P=M, R=M +Q1' = U1 +Q2' = U2 +GRAVITY(G*N1>) +ZERO = FR() + FRSTAR() +KANE() +INPUT M=1,G=9.81,L=1 +INPUT Q1=.1,Q2=.2,U1=0,U2=0 +INPUT TFINAL=10, INTEGSTP=.01 +CODE DYNAMICS() some_filename.c diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/double_pendulum.py b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/double_pendulum.py new file mode 100644 index 0000000000000000000000000000000000000000..12c73c3b4b198399f4c45f5e00d556c859caff74 --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/double_pendulum.py @@ -0,0 +1,39 @@ +import sympy.physics.mechanics as _me +import sympy as _sm +import math as m +import numpy as _np + +q1, q2, u1, u2 = _me.dynamicsymbols('q1 q2 u1 u2') +q1_d, q2_d, u1_d, u2_d = _me.dynamicsymbols('q1_ q2_ u1_ u2_', 1) +l, m, g = _sm.symbols('l m g', real=True) +frame_n = _me.ReferenceFrame('n') +frame_a = _me.ReferenceFrame('a') +frame_b = _me.ReferenceFrame('b') +frame_a.orient(frame_n, 'Axis', [q1, frame_n.z]) +frame_b.orient(frame_n, 'Axis', [q2, frame_n.z]) +frame_a.set_ang_vel(frame_n, u1*frame_n.z) +frame_b.set_ang_vel(frame_n, u2*frame_n.z) +point_o = _me.Point('o') +particle_p = _me.Particle('p', _me.Point('p_pt'), _sm.Symbol('m')) +particle_r = _me.Particle('r', _me.Point('r_pt'), _sm.Symbol('m')) +particle_p.point.set_pos(point_o, l*frame_a.x) +particle_r.point.set_pos(particle_p.point, l*frame_b.x) +point_o.set_vel(frame_n, 0) +particle_p.point.v2pt_theory(point_o,frame_n,frame_a) +particle_r.point.v2pt_theory(particle_p.point,frame_n,frame_b) +particle_p.mass = m +particle_r.mass = m +force_p = particle_p.mass*(g*frame_n.x) +force_r = particle_r.mass*(g*frame_n.x) +kd_eqs = [q1_d - u1, q2_d - u2] +forceList = [(particle_p.point,particle_p.mass*(g*frame_n.x)), (particle_r.point,particle_r.mass*(g*frame_n.x))] +kane = _me.KanesMethod(frame_n, q_ind=[q1,q2], u_ind=[u1, u2], kd_eqs = kd_eqs) +fr, frstar = kane.kanes_equations([particle_p, particle_r], forceList) +zero = fr+frstar +from pydy.system import System +sys = System(kane, constants = {l:1, m:1, g:9.81}, +specifieds={}, +initial_conditions={q1:.1, q2:.2, u1:0, u2:0}, +times = _np.linspace(0.0, 10, 10/.01)) + +y=sys.integrate() diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/mass_spring_damper.al b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/mass_spring_damper.al new file mode 100644 index 0000000000000000000000000000000000000000..4892e5ca8cb18cad6b14a2a37cbdc1f7fb8217ac --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/mass_spring_damper.al @@ -0,0 +1,19 @@ +CONSTANTS M,K,B,G +MOTIONVARIABLES' POSITION',SPEED' +VARIABLES O +FORCE = O*SIN(T) +NEWTONIAN CEILING +POINTS ORIGIN +V_ORIGIN_CEILING> = 0> +PARTICLES BLOCK +P_ORIGIN_BLOCK> = POSITION*CEILING1> +MASS BLOCK=M +V_BLOCK_CEILING>=SPEED*CEILING1> +POSITION' = SPEED +FORCE_MAGNITUDE = M*G-K*POSITION-B*SPEED+FORCE +FORCE_BLOCK>=EXPLICIT(FORCE_MAGNITUDE*CEILING1>) +ZERO = FR() + FRSTAR() +KANE() +INPUT TFINAL=10.0, INTEGSTP=0.01 +INPUT M=1.0, K=1.0, B=0.2, G=9.8, POSITION=0.1, SPEED=-1.0, O=2 +CODE DYNAMICS() dummy_file.c diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/mass_spring_damper.py b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/mass_spring_damper.py new file mode 100644 index 0000000000000000000000000000000000000000..8a5baab9642ff140e0ee81027a1e8f9152d7050c --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/mass_spring_damper.py @@ -0,0 +1,31 @@ +import sympy.physics.mechanics as _me +import sympy as _sm +import math as m +import numpy as _np + +m, k, b, g = _sm.symbols('m k b g', real=True) +position, speed = _me.dynamicsymbols('position speed') +position_d, speed_d = _me.dynamicsymbols('position_ speed_', 1) +o = _me.dynamicsymbols('o') +force = o*_sm.sin(_me.dynamicsymbols._t) +frame_ceiling = _me.ReferenceFrame('ceiling') +point_origin = _me.Point('origin') +point_origin.set_vel(frame_ceiling, 0) +particle_block = _me.Particle('block', _me.Point('block_pt'), _sm.Symbol('m')) +particle_block.point.set_pos(point_origin, position*frame_ceiling.x) +particle_block.mass = m +particle_block.point.set_vel(frame_ceiling, speed*frame_ceiling.x) +force_magnitude = m*g-k*position-b*speed+force +force_block = (force_magnitude*frame_ceiling.x).subs({position_d:speed}) +kd_eqs = [position_d - speed] +forceList = [(particle_block.point,(force_magnitude*frame_ceiling.x).subs({position_d:speed}))] +kane = _me.KanesMethod(frame_ceiling, q_ind=[position], u_ind=[speed], kd_eqs = kd_eqs) +fr, frstar = kane.kanes_equations([particle_block], forceList) +zero = fr+frstar +from pydy.system import System +sys = System(kane, constants = {m:1.0, k:1.0, b:0.2, g:9.8}, +specifieds={_me.dynamicsymbols('t'):lambda x, t: t, o:2}, +initial_conditions={position:0.1, speed:-1*1.0}, +times = _np.linspace(0.0, 10.0, 10.0/0.01)) + +y=sys.integrate() diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/non_min_pendulum.al b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/non_min_pendulum.al new file mode 100644 index 0000000000000000000000000000000000000000..74f5062d80926db7acd634a04759abce857087e5 --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/non_min_pendulum.al @@ -0,0 +1,20 @@ +MOTIONVARIABLES' Q{2}'' +CONSTANTS L,M,G +NEWTONIAN N +POINT PN +V_PN_N> = 0> +THETA1 = ATAN(Q2/Q1) +FRAMES A +SIMPROT(N, A, 3, THETA1) +PARTICLES P +P_PN_P> = Q1*N1>+Q2*N2> +MASS P=M +V_P_N>=DT(P_P_PN>, N) +F_V = DOT(EXPRESS(V_P_N>,A), A1>) +GRAVITY(G*N1>) +DEPENDENT[1] = F_V +CONSTRAIN(DEPENDENT[Q1']) +ZERO=FR()+FRSTAR() +F_C = MAG(P_P_PN>)-L +CONFIG[1]=F_C +ZERO[2]=CONFIG[1] diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/non_min_pendulum.py b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/non_min_pendulum.py new file mode 100644 index 0000000000000000000000000000000000000000..fc972ebd518e77da5e1902c149f2699979865e7f --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/pydy-example-repo/non_min_pendulum.py @@ -0,0 +1,36 @@ +import sympy.physics.mechanics as _me +import sympy as _sm +import math as m +import numpy as _np + +q1, q2 = _me.dynamicsymbols('q1 q2') +q1_d, q2_d = _me.dynamicsymbols('q1_ q2_', 1) +q1_dd, q2_dd = _me.dynamicsymbols('q1_ q2_', 2) +l, m, g = _sm.symbols('l m g', real=True) +frame_n = _me.ReferenceFrame('n') +point_pn = _me.Point('pn') +point_pn.set_vel(frame_n, 0) +theta1 = _sm.atan(q2/q1) +frame_a = _me.ReferenceFrame('a') +frame_a.orient(frame_n, 'Axis', [theta1, frame_n.z]) +particle_p = _me.Particle('p', _me.Point('p_pt'), _sm.Symbol('m')) +particle_p.point.set_pos(point_pn, q1*frame_n.x+q2*frame_n.y) +particle_p.mass = m +particle_p.point.set_vel(frame_n, (point_pn.pos_from(particle_p.point)).dt(frame_n)) +f_v = _me.dot((particle_p.point.vel(frame_n)).express(frame_a), frame_a.x) +force_p = particle_p.mass*(g*frame_n.x) +dependent = _sm.Matrix([[0]]) +dependent[0] = f_v +velocity_constraints = [i for i in dependent] +u_q1_d = _me.dynamicsymbols('u_q1_d') +u_q2_d = _me.dynamicsymbols('u_q2_d') +kd_eqs = [q1_d-u_q1_d, q2_d-u_q2_d] +forceList = [(particle_p.point,particle_p.mass*(g*frame_n.x))] +kane = _me.KanesMethod(frame_n, q_ind=[q1,q2], u_ind=[u_q2_d], u_dependent=[u_q1_d], kd_eqs = kd_eqs, velocity_constraints = velocity_constraints) +fr, frstar = kane.kanes_equations([particle_p], forceList) +zero = fr+frstar +f_c = point_pn.pos_from(particle_p.point).magnitude()-l +config = _sm.Matrix([[0]]) +config[0] = f_c +zero = zero.row_insert(zero.shape[0], _sm.Matrix([[0]])) +zero[zero.shape[0]-1] = config[0] diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/ruletest1.al b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/ruletest1.al new file mode 100644 index 0000000000000000000000000000000000000000..457e79fd646677c0decdc69f921bc05e9e0dcf51 --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/ruletest1.al @@ -0,0 +1,8 @@ +% ruletest1.al +CONSTANTS F = 3, G = 9.81 +CONSTANTS A, B +CONSTANTS S, S1, S2+, S3+, S4- +CONSTANTS K{4}, L{1:3}, P{1:2,1:3} +CONSTANTS C{2,3} +E1 = A*F + S2 - G +E2 = F^2 + K3*K2*G diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/ruletest1.py b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/ruletest1.py new file mode 100644 index 0000000000000000000000000000000000000000..8466392ac930f13f2419c9c04eef9dcc2884e9bd --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/ruletest1.py @@ -0,0 +1,15 @@ +import sympy.physics.mechanics as _me +import sympy as _sm +import math as m +import numpy as _np + +f = _sm.S(3) +g = _sm.S(9.81) +a, b = _sm.symbols('a b', real=True) +s, s1 = _sm.symbols('s s1', real=True) +s2, s3 = _sm.symbols('s2 s3', real=True, nonnegative=True) +s4 = _sm.symbols('s4', real=True, nonpositive=True) +k1, k2, k3, k4, l1, l2, l3, p11, p12, p13, p21, p22, p23 = _sm.symbols('k1 k2 k3 k4 l1 l2 l3 p11 p12 p13 p21 p22 p23', real=True) +c11, c12, c13, c21, c22, c23 = _sm.symbols('c11 c12 c13 c21 c22 c23', real=True) +e1 = a*f+s2-g +e2 = f**2+k3*k2*g diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/ruletest10.py b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/ruletest10.py new file mode 100644 index 0000000000000000000000000000000000000000..2b9674e47d5f6132c5a79a33b9d8d55a131942d6 --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/ruletest10.py @@ -0,0 +1,64 @@ +import sympy.physics.mechanics as _me +import sympy as _sm +import math as m +import numpy as _np + +x, y = _me.dynamicsymbols('x y') +a, b = _sm.symbols('a b', real=True) +e = a*(b*x+y)**2 +m = _sm.Matrix([e,e]).reshape(2, 1) +e = e.expand() +m = _sm.Matrix([i.expand() for i in m]).reshape((m).shape[0], (m).shape[1]) +e = _sm.factor(e, x) +m = _sm.Matrix([_sm.factor(i,x) for i in m]).reshape((m).shape[0], (m).shape[1]) +eqn = _sm.Matrix([[0]]) +eqn[0] = a*x+b*y +eqn = eqn.row_insert(eqn.shape[0], _sm.Matrix([[0]])) +eqn[eqn.shape[0]-1] = 2*a*x-3*b*y +print(_sm.solve(eqn,x,y)) +rhs_y = _sm.solve(eqn,x,y)[y] +e = (x+y)**2+2*x**2 +e.collect(x) +a, b, c = _sm.symbols('a b c', real=True) +m = _sm.Matrix([a,b,c,0]).reshape(2, 2) +m2 = _sm.Matrix([i.subs({a:1,b:2,c:3}) for i in m]).reshape((m).shape[0], (m).shape[1]) +eigvalue = _sm.Matrix([i.evalf() for i in (m2).eigenvals().keys()]) +eigvec = _sm.Matrix([i[2][0].evalf() for i in (m2).eigenvects()]).reshape(m2.shape[0], m2.shape[1]) +frame_n = _me.ReferenceFrame('n') +frame_a = _me.ReferenceFrame('a') +frame_a.orient(frame_n, 'Axis', [x, frame_n.x]) +frame_a.orient(frame_n, 'Axis', [_sm.pi/2, frame_n.x]) +c1, c2, c3 = _sm.symbols('c1 c2 c3', real=True) +v = c1*frame_a.x+c2*frame_a.y+c3*frame_a.z +point_o = _me.Point('o') +point_p = _me.Point('p') +point_o.set_pos(point_p, c1*frame_a.x) +v = (v).express(frame_n) +point_o.set_pos(point_p, (point_o.pos_from(point_p)).express(frame_n)) +frame_a.set_ang_vel(frame_n, c3*frame_a.z) +print(frame_n.ang_vel_in(frame_a)) +point_p.v2pt_theory(point_o,frame_n,frame_a) +particle_p1 = _me.Particle('p1', _me.Point('p1_pt'), _sm.Symbol('m')) +particle_p2 = _me.Particle('p2', _me.Point('p2_pt'), _sm.Symbol('m')) +particle_p2.point.v2pt_theory(particle_p1.point,frame_n,frame_a) +point_p.a2pt_theory(particle_p1.point,frame_n,frame_a) +body_b1_cm = _me.Point('b1_cm') +body_b1_cm.set_vel(frame_n, 0) +body_b1_f = _me.ReferenceFrame('b1_f') +body_b1 = _me.RigidBody('b1', body_b1_cm, body_b1_f, _sm.symbols('m'), (_me.outer(body_b1_f.x,body_b1_f.x),body_b1_cm)) +body_b2_cm = _me.Point('b2_cm') +body_b2_cm.set_vel(frame_n, 0) +body_b2_f = _me.ReferenceFrame('b2_f') +body_b2 = _me.RigidBody('b2', body_b2_cm, body_b2_f, _sm.symbols('m'), (_me.outer(body_b2_f.x,body_b2_f.x),body_b2_cm)) +g = _sm.symbols('g', real=True) +force_p1 = particle_p1.mass*(g*frame_n.x) +force_p2 = particle_p2.mass*(g*frame_n.x) +force_b1 = body_b1.mass*(g*frame_n.x) +force_b2 = body_b2.mass*(g*frame_n.x) +z = _me.dynamicsymbols('z') +v = x*frame_a.x+y*frame_a.z +point_o.set_pos(point_p, x*frame_a.x+y*frame_a.y) +v = (v).subs({x:2*z, y:z}) +point_o.set_pos(point_p, (point_o.pos_from(point_p)).subs({x:2*z, y:z})) +force_o = -1*(x*y*frame_a.x) +force_p1 = particle_p1.mass*(g*frame_n.x)+ x*y*frame_a.x diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/ruletest11.al b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/ruletest11.al new file mode 100644 index 0000000000000000000000000000000000000000..60934c1ca563024828110bfe984a90d5686b89e4 --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/ruletest11.al @@ -0,0 +1,6 @@ +VARIABLES X, Y +CONSTANTS A{1:2, 1:2}, B{1:2} +EQN[1] = A11*x + A12*y - B1 +EQN[2] = A21*x + A22*y - B2 +INPUT A11=2, A12=5, A21=3, A22=4, B1=7, B2=6 +CODE ALGEBRAIC(EQN, X, Y) some_filename.c diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/ruletest2.py b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/ruletest2.py new file mode 100644 index 0000000000000000000000000000000000000000..31c1d9974c2292466b805b91f8254bffaa94e2ac --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/ruletest2.py @@ -0,0 +1,22 @@ +import sympy.physics.mechanics as _me +import sympy as _sm +import math as m +import numpy as _np + +x1, x2 = _me.dynamicsymbols('x1 x2') +f1 = x1*x2+3*x1**2 +f2 = x1*_me.dynamicsymbols._t+x2*_me.dynamicsymbols._t**2 +x, y = _me.dynamicsymbols('x y') +x_d, y_d = _me.dynamicsymbols('x_ y_', 1) +y_dd = _me.dynamicsymbols('y_', 2) +q1, q2, q3, u1, u2 = _me.dynamicsymbols('q1 q2 q3 u1 u2') +p1, p2 = _me.dynamicsymbols('p1 p2') +p1_d, p2_d = _me.dynamicsymbols('p1_ p2_', 1) +w1, w2, w3, r1, r2 = _me.dynamicsymbols('w1 w2 w3 r1 r2') +w1_d, w2_d, w3_d, r1_d, r2_d = _me.dynamicsymbols('w1_ w2_ w3_ r1_ r2_', 1) +r1_dd, r2_dd = _me.dynamicsymbols('r1_ r2_', 2) +c11, c12, c21, c22 = _me.dynamicsymbols('c11 c12 c21 c22') +d11, d12, d13 = _me.dynamicsymbols('d11 d12 d13') +j1, j2 = _me.dynamicsymbols('j1 j2') +n = _sm.symbols('n') +n = _sm.I diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/ruletest3.py b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/ruletest3.py new file mode 100644 index 0000000000000000000000000000000000000000..23f79aa571337f200b3ff4d56b5747f7704985c0 --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/ruletest3.py @@ -0,0 +1,37 @@ +import sympy.physics.mechanics as _me +import sympy as _sm +import math as m +import numpy as _np + +frame_a = _me.ReferenceFrame('a') +frame_b = _me.ReferenceFrame('b') +frame_n = _me.ReferenceFrame('n') +x1, x2, x3 = _me.dynamicsymbols('x1 x2 x3') +l = _sm.symbols('l', real=True) +v1 = x1*frame_a.x+x2*frame_a.y+x3*frame_a.z +v2 = x1*frame_b.x+x2*frame_b.y+x3*frame_b.z +v3 = x1*frame_n.x+x2*frame_n.y+x3*frame_n.z +v = v1+v2+v3 +point_c = _me.Point('c') +point_d = _me.Point('d') +point_po1 = _me.Point('po1') +point_po2 = _me.Point('po2') +point_po3 = _me.Point('po3') +particle_l = _me.Particle('l', _me.Point('l_pt'), _sm.Symbol('m')) +particle_p1 = _me.Particle('p1', _me.Point('p1_pt'), _sm.Symbol('m')) +particle_p2 = _me.Particle('p2', _me.Point('p2_pt'), _sm.Symbol('m')) +particle_p3 = _me.Particle('p3', _me.Point('p3_pt'), _sm.Symbol('m')) +body_s_cm = _me.Point('s_cm') +body_s_cm.set_vel(frame_n, 0) +body_s_f = _me.ReferenceFrame('s_f') +body_s = _me.RigidBody('s', body_s_cm, body_s_f, _sm.symbols('m'), (_me.outer(body_s_f.x,body_s_f.x),body_s_cm)) +body_r1_cm = _me.Point('r1_cm') +body_r1_cm.set_vel(frame_n, 0) +body_r1_f = _me.ReferenceFrame('r1_f') +body_r1 = _me.RigidBody('r1', body_r1_cm, body_r1_f, _sm.symbols('m'), (_me.outer(body_r1_f.x,body_r1_f.x),body_r1_cm)) +body_r2_cm = _me.Point('r2_cm') +body_r2_cm.set_vel(frame_n, 0) +body_r2_f = _me.ReferenceFrame('r2_f') +body_r2 = _me.RigidBody('r2', body_r2_cm, body_r2_f, _sm.symbols('m'), (_me.outer(body_r2_f.x,body_r2_f.x),body_r2_cm)) +v4 = x1*body_s_f.x+x2*body_s_f.y+x3*body_s_f.z +body_s_cm.set_pos(point_c, l*frame_n.x) diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/ruletest5.al b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/ruletest5.al new file mode 100644 index 0000000000000000000000000000000000000000..a859dc8bb1f0251af14809681d995c59b31377ba --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/ruletest5.al @@ -0,0 +1,32 @@ +% ruletest5.al +VARIABLES X', Y' + +E1 = (X+Y)^2 + (X-Y)^3 +E2 = (X-Y)^2 +E3 = X^2 + Y^2 + 2*X*Y + +M1 = [E1;E2] +M2 = [(X+Y)^2,(X-Y)^2] +M3 = M1 + [X;Y] + +AM = EXPAND(M1) +CM = EXPAND([(X+Y)^2,(X-Y)^2]) +EM = EXPAND(M1 + [X;Y]) +F = EXPAND(E1) +G = EXPAND(E2) + +A = FACTOR(E3, X) +BM = FACTOR(M1, X) +CM = FACTOR(M1 + [X;Y], X) + +A = D(E3, X) +B = D(E3, Y) +CM = D(M2, X) +DM = D(M1 + [X;Y], X) +FRAMES A, B +A_B = [1,0,0;1,0,0;1,0,0] +V1> = X*A1> + Y*A2> + X*Y*A3> +E> = D(V1>, X, B) +FM = DT(M1) +GM = DT([(X+Y)^2,(X-Y)^2]) +H> = DT(V1>, B) diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/ruletest6.al b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/ruletest6.al new file mode 100644 index 0000000000000000000000000000000000000000..7ec3ba61590e77772ae631237df048b932fe778c --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/ruletest6.al @@ -0,0 +1,41 @@ +% ruletest6.al +VARIABLES Q{2} +VARIABLES X,Y,Z +Q1 = X^2 + Y^2 +Q2 = X-Y +E = Q1 + Q2 +A = EXPLICIT(E) +E2 = COS(X) +E3 = COS(X*Y) +A = TAYLOR(E2, 0:2, X=0) +B = TAYLOR(E3, 0:2, X=0, Y=0) + +E = EXPAND((X+Y)^2) +A = EVALUATE(E, X=1, Y=Z) +BM = EVALUATE([E;2*E], X=1, Y=Z) + +E = Q1 + Q2 +A = EVALUATE(E, X=2, Y=Z^2) + +CONSTANTS J,K,L +P1 = POLYNOMIAL([J,K,L],X) +P2 = POLYNOMIAL(J*X+K,X,1) + +ROOT1 = ROOTS(P1, X, 2) +ROOT2 = ROOTS([1;2;3]) + +M = [1,2,3,4;5,6,7,8;9,10,11,12;13,14,15,16] + +AM = TRANSPOSE(M) + M +BM = EIG(M) +C1 = DIAGMAT(4, 1) +C2 = DIAGMAT(3, 4, 2) +DM = INV(M+C1) +E = DET(M+C1) + TRACE([1,0;0,1]) +F = ELEMENT(M, 2, 3) + +A = COLS(M) +BM = COLS(M, 1) +CM = COLS(M, 1, 2:4, 3) +DM = ROWS(M, 1) +EM = ROWS(M, 1, 2:4, 3) diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/ruletest6.py b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/ruletest6.py new file mode 100644 index 0000000000000000000000000000000000000000..85f1a0b49518bb0ae5766cbe91b9c24a1b8e9c20 --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/ruletest6.py @@ -0,0 +1,36 @@ +import sympy.physics.mechanics as _me +import sympy as _sm +import math as m +import numpy as _np + +q1, q2 = _me.dynamicsymbols('q1 q2') +x, y, z = _me.dynamicsymbols('x y z') +e = q1+q2 +a = (e).subs({q1:x**2+y**2, q2:x-y}) +e2 = _sm.cos(x) +e3 = _sm.cos(x*y) +a = (e2).series(x, 0, 2).removeO() +b = (e3).series(x, 0, 2).removeO().series(y, 0, 2).removeO() +e = ((x+y)**2).expand() +a = (e).subs({q1:x**2+y**2,q2:x-y}).subs({x:1,y:z}) +bm = _sm.Matrix([i.subs({x:1,y:z}) for i in _sm.Matrix([e,2*e]).reshape(2, 1)]).reshape((_sm.Matrix([e,2*e]).reshape(2, 1)).shape[0], (_sm.Matrix([e,2*e]).reshape(2, 1)).shape[1]) +e = q1+q2 +a = (e).subs({q1:x**2+y**2,q2:x-y}).subs({x:2,y:z**2}) +j, k, l = _sm.symbols('j k l', real=True) +p1 = _sm.Poly(_sm.Matrix([j,k,l]).reshape(1, 3), x) +p2 = _sm.Poly(j*x+k, x) +root1 = [i.evalf() for i in _sm.solve(p1, x)] +root2 = [i.evalf() for i in _sm.solve(_sm.Poly(_sm.Matrix([1,2,3]).reshape(3, 1), x),x)] +m = _sm.Matrix([1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16]).reshape(4, 4) +am = (m).T+m +bm = _sm.Matrix([i.evalf() for i in (m).eigenvals().keys()]) +c1 = _sm.diag(1,1,1,1) +c2 = _sm.Matrix([2 if i==j else 0 for i in range(3) for j in range(4)]).reshape(3, 4) +dm = (m+c1)**(-1) +e = (m+c1).det()+(_sm.Matrix([1,0,0,1]).reshape(2, 2)).trace() +f = (m)[1,2] +a = (m).cols +bm = (m).col(0) +cm = _sm.Matrix([(m).T.row(0),(m).T.row(1),(m).T.row(2),(m).T.row(3),(m).T.row(2)]) +dm = (m).row(0) +em = _sm.Matrix([(m).row(0),(m).row(1),(m).row(2),(m).row(3),(m).row(2)]) diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/ruletest9.py b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/ruletest9.py new file mode 100644 index 0000000000000000000000000000000000000000..09d8ae4ee8385bde5c38b946458a43c8ffdaa9b8 --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/autolev/test-examples/ruletest9.py @@ -0,0 +1,55 @@ +import sympy.physics.mechanics as _me +import sympy as _sm +import math as m +import numpy as _np + +frame_n = _me.ReferenceFrame('n') +frame_a = _me.ReferenceFrame('a') +a = 0 +d = _me.inertia(frame_a, 1, 1, 1) +point_po1 = _me.Point('po1') +point_po2 = _me.Point('po2') +particle_p1 = _me.Particle('p1', _me.Point('p1_pt'), _sm.Symbol('m')) +particle_p2 = _me.Particle('p2', _me.Point('p2_pt'), _sm.Symbol('m')) +c1, c2, c3 = _me.dynamicsymbols('c1 c2 c3') +c1_d, c2_d, c3_d = _me.dynamicsymbols('c1_ c2_ c3_', 1) +body_r_cm = _me.Point('r_cm') +body_r_cm.set_vel(frame_n, 0) +body_r_f = _me.ReferenceFrame('r_f') +body_r = _me.RigidBody('r', body_r_cm, body_r_f, _sm.symbols('m'), (_me.outer(body_r_f.x,body_r_f.x),body_r_cm)) +point_po2.set_pos(particle_p1.point, c1*frame_a.x) +v = 2*point_po2.pos_from(particle_p1.point)+c2*frame_a.y +frame_a.set_ang_vel(frame_n, c3*frame_a.z) +v = 2*frame_a.ang_vel_in(frame_n)+c2*frame_a.y +body_r_f.set_ang_vel(frame_n, c3*frame_a.z) +v = 2*body_r_f.ang_vel_in(frame_n)+c2*frame_a.y +frame_a.set_ang_acc(frame_n, (frame_a.ang_vel_in(frame_n)).dt(frame_a)) +v = 2*frame_a.ang_acc_in(frame_n)+c2*frame_a.y +particle_p1.point.set_vel(frame_a, c1*frame_a.x+c3*frame_a.y) +body_r_cm.set_acc(frame_n, c2*frame_a.y) +v_a = _me.cross(body_r_cm.acc(frame_n), particle_p1.point.vel(frame_a)) +x_b_c = v_a +x_b_d = 2*x_b_c +a_b_c_d_e = x_b_d*2 +a_b_c = 2*c1*c2*c3 +a_b_c += 2*c1 +a_b_c = 3*c1 +q1, q2, u1, u2 = _me.dynamicsymbols('q1 q2 u1 u2') +q1_d, q2_d, u1_d, u2_d = _me.dynamicsymbols('q1_ q2_ u1_ u2_', 1) +x, y = _me.dynamicsymbols('x y') +x_d, y_d = _me.dynamicsymbols('x_ y_', 1) +x_dd, y_dd = _me.dynamicsymbols('x_ y_', 2) +yy = _me.dynamicsymbols('yy') +yy = x*x_d**2+1 +m = _sm.Matrix([[0]]) +m[0] = 2*x +m = m.row_insert(m.shape[0], _sm.Matrix([[0]])) +m[m.shape[0]-1] = 2*y +a = 2*m[0] +m = _sm.Matrix([1,2,3,4,5,6,7,8,9]).reshape(3, 3) +m[0,1] = 5 +a = m[0, 1]*2 +force_ro = q1*frame_n.x +torque_a = q2*frame_n.z +force_ro = q1*frame_n.x + q2*frame_n.y +f = force_ro*2 diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/fortran/__init__.py b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/fortran/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..c65e37cf3de2dddbcee0fa5c7eeac2fdc9f685db --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/fortran/__init__.py @@ -0,0 +1 @@ +"""Used for translating Fortran source code into a SymPy expression. """ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/fortran/__pycache__/__init__.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/fortran/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..04636f613f7bcd65f8af1651cf10f22d5d0a7120 Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/fortran/__pycache__/__init__.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/fortran/__pycache__/fortran_parser.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/fortran/__pycache__/fortran_parser.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1c6390829f5fa7d3154638960aecc9bc6a9f23ec Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/fortran/__pycache__/fortran_parser.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/fortran/fortran_parser.py b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/fortran/fortran_parser.py new file mode 100644 index 0000000000000000000000000000000000000000..504249f6119a59a90d91c5e989f893cffe20e643 --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/fortran/fortran_parser.py @@ -0,0 +1,347 @@ +from sympy.external import import_module + +lfortran = import_module('lfortran') + +if lfortran: + from sympy.codegen.ast import (Variable, IntBaseType, FloatBaseType, String, + Return, FunctionDefinition, Assignment) + from sympy.core import Add, Mul, Integer, Float + from sympy.core.symbol import Symbol + + asr_mod = lfortran.asr + asr = lfortran.asr.asr + src_to_ast = lfortran.ast.src_to_ast + ast_to_asr = lfortran.semantic.ast_to_asr.ast_to_asr + + """ + This module contains all the necessary Classes and Function used to Parse + Fortran code into SymPy expression + + The module and its API are currently under development and experimental. + It is also dependent on LFortran for the ASR that is converted to SymPy syntax + which is also under development. + The module only supports the features currently supported by the LFortran ASR + which will be updated as the development of LFortran and this module progresses + + You might find unexpected bugs and exceptions while using the module, feel free + to report them to the SymPy Issue Tracker + + The API for the module might also change while in development if better and + more effective ways are discovered for the process + + Features Supported + ================== + + - Variable Declarations (integers and reals) + - Function Definitions + - Assignments and Basic Binary Operations + + + Notes + ===== + + The module depends on an external dependency + + LFortran : Required to parse Fortran source code into ASR + + + References + ========== + + .. [1] https://github.com/sympy/sympy/issues + .. [2] https://gitlab.com/lfortran/lfortran + .. [3] https://docs.lfortran.org/ + + """ + + + class ASR2PyVisitor(asr.ASTVisitor): # type: ignore + """ + Visitor Class for LFortran ASR + + It is a Visitor class derived from asr.ASRVisitor which visits all the + nodes of the LFortran ASR and creates corresponding AST node for each + ASR node + + """ + + def __init__(self): + """Initialize the Parser""" + self._py_ast = [] + + def visit_TranslationUnit(self, node): + """ + Function to visit all the elements of the Translation Unit + created by LFortran ASR + """ + for s in node.global_scope.symbols: + sym = node.global_scope.symbols[s] + self.visit(sym) + for item in node.items: + self.visit(item) + + def visit_Assignment(self, node): + """Visitor Function for Assignment + + Visits each Assignment is the LFortran ASR and creates corresponding + assignment for SymPy. + + Notes + ===== + + The function currently only supports variable assignment and binary + operation assignments of varying multitudes. Any type of numberS or + array is not supported. + + Raises + ====== + + NotImplementedError() when called for Numeric assignments or Arrays + + """ + # TODO: Arithmetic Assignment + if isinstance(node.target, asr.Variable): + target = node.target + value = node.value + if isinstance(value, asr.Variable): + new_node = Assignment( + Variable( + target.name + ), + Variable( + value.name + ) + ) + elif (type(value) == asr.BinOp): + exp_ast = call_visitor(value) + for expr in exp_ast: + new_node = Assignment( + Variable(target.name), + expr + ) + else: + raise NotImplementedError("Numeric assignments not supported") + else: + raise NotImplementedError("Arrays not supported") + self._py_ast.append(new_node) + + def visit_BinOp(self, node): + """Visitor Function for Binary Operations + + Visits each binary operation present in the LFortran ASR like addition, + subtraction, multiplication, division and creates the corresponding + operation node in SymPy's AST + + In case of more than one binary operations, the function calls the + call_visitor() function on the child nodes of the binary operations + recursively until all the operations have been processed. + + Notes + ===== + + The function currently only supports binary operations with Variables + or other binary operations. Numerics are not supported as of yet. + + Raises + ====== + + NotImplementedError() when called for Numeric assignments + + """ + # TODO: Integer Binary Operations + op = node.op + lhs = node.left + rhs = node.right + + if (type(lhs) == asr.Variable): + left_value = Symbol(lhs.name) + elif(type(lhs) == asr.BinOp): + l_exp_ast = call_visitor(lhs) + for exp in l_exp_ast: + left_value = exp + else: + raise NotImplementedError("Numbers Currently not supported") + + if (type(rhs) == asr.Variable): + right_value = Symbol(rhs.name) + elif(type(rhs) == asr.BinOp): + r_exp_ast = call_visitor(rhs) + for exp in r_exp_ast: + right_value = exp + else: + raise NotImplementedError("Numbers Currently not supported") + + if isinstance(op, asr.Add): + new_node = Add(left_value, right_value) + elif isinstance(op, asr.Sub): + new_node = Add(left_value, -right_value) + elif isinstance(op, asr.Div): + new_node = Mul(left_value, 1/right_value) + elif isinstance(op, asr.Mul): + new_node = Mul(left_value, right_value) + + self._py_ast.append(new_node) + + def visit_Variable(self, node): + """Visitor Function for Variable Declaration + + Visits each variable declaration present in the ASR and creates a + Symbol declaration for each variable + + Notes + ===== + + The functions currently only support declaration of integer and + real variables. Other data types are still under development. + + Raises + ====== + + NotImplementedError() when called for unsupported data types + + """ + if isinstance(node.type, asr.Integer): + var_type = IntBaseType(String('integer')) + value = Integer(0) + elif isinstance(node.type, asr.Real): + var_type = FloatBaseType(String('real')) + value = Float(0.0) + else: + raise NotImplementedError("Data type not supported") + + if not (node.intent == 'in'): + new_node = Variable( + node.name + ).as_Declaration( + type = var_type, + value = value + ) + self._py_ast.append(new_node) + + def visit_Sequence(self, seq): + """Visitor Function for code sequence + + Visits a code sequence/ block and calls the visitor function on all the + children of the code block to create corresponding code in python + + """ + if seq is not None: + for node in seq: + self._py_ast.append(call_visitor(node)) + + def visit_Num(self, node): + """Visitor Function for Numbers in ASR + + This function is currently under development and will be updated + with improvements in the LFortran ASR + + """ + # TODO:Numbers when the LFortran ASR is updated + # self._py_ast.append(Integer(node.n)) + pass + + def visit_Function(self, node): + """Visitor Function for function Definitions + + Visits each function definition present in the ASR and creates a + function definition node in the Python AST with all the elements of the + given function + + The functions declare all the variables required as SymPy symbols in + the function before the function definition + + This function also the call_visior_function to parse the contents of + the function body + + """ + # TODO: Return statement, variable declaration + fn_args = [Variable(arg_iter.name) for arg_iter in node.args] + fn_body = [] + fn_name = node.name + for i in node.body: + fn_ast = call_visitor(i) + try: + fn_body_expr = fn_ast + except UnboundLocalError: + fn_body_expr = [] + for sym in node.symtab.symbols: + decl = call_visitor(node.symtab.symbols[sym]) + for symbols in decl: + fn_body.append(symbols) + for elem in fn_body_expr: + fn_body.append(elem) + fn_body.append( + Return( + Variable( + node.return_var.name + ) + ) + ) + if isinstance(node.return_var.type, asr.Integer): + ret_type = IntBaseType(String('integer')) + elif isinstance(node.return_var.type, asr.Real): + ret_type = FloatBaseType(String('real')) + else: + raise NotImplementedError("Data type not supported") + new_node = FunctionDefinition( + return_type = ret_type, + name = fn_name, + parameters = fn_args, + body = fn_body + ) + self._py_ast.append(new_node) + + def ret_ast(self): + """Returns the AST nodes""" + return self._py_ast +else: + class ASR2PyVisitor(): # type: ignore + def __init__(self, *args, **kwargs): + raise ImportError('lfortran not available') + +def call_visitor(fort_node): + """Calls the AST Visitor on the Module + + This function is used to call the AST visitor for a program or module + It imports all the required modules and calls the visit() function + on the given node + + Parameters + ========== + + fort_node : LFortran ASR object + Node for the operation for which the NodeVisitor is called + + Returns + ======= + + res_ast : list + list of SymPy AST Nodes + + """ + v = ASR2PyVisitor() + v.visit(fort_node) + res_ast = v.ret_ast() + return res_ast + + +def src_to_sympy(src): + """Wrapper function to convert the given Fortran source code to SymPy Expressions + + Parameters + ========== + + src : string + A string with the Fortran source code + + Returns + ======= + + py_src : string + A string with the Python source code compatible with SymPy + + """ + a_ast = src_to_ast(src, translation_unit=False) + a = ast_to_asr(a_ast) + py_src = call_visitor(a) + return py_src diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/__init__.py b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..bb4c5aa8afe6fd818e136ec0797b7429e2da76cf --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/__init__.py @@ -0,0 +1,50 @@ +""" Rewrite Rules + +DISCLAIMER: This module is experimental. The interface is subject to change. + +A rule is a function that transforms one expression into another + + Rule :: Expr -> Expr + +A strategy is a function that says how a rule should be applied to a syntax +tree. In general strategies take rules and produce a new rule + + Strategy :: [Rules], Other-stuff -> Rule + +This allows developers to separate a mathematical transformation from the +algorithmic details of applying that transformation. The goal is to separate +the work of mathematical programming from algorithmic programming. + +Submodules + +strategies.rl - some fundamental rules +strategies.core - generic non-SymPy specific strategies +strategies.traverse - strategies that traverse a SymPy tree +strategies.tools - some conglomerate strategies that do depend on SymPy +""" + +from . import rl +from . import traverse +from .rl import rm_id, unpack, flatten, sort, glom, distribute, rebuild +from .util import new +from .core import ( + condition, debug, chain, null_safe, do_one, exhaust, minimize, tryit) +from .tools import canon, typed +from . import branch + +__all__ = [ + 'rl', + + 'traverse', + + 'rm_id', 'unpack', 'flatten', 'sort', 'glom', 'distribute', 'rebuild', + + 'new', + + 'condition', 'debug', 'chain', 'null_safe', 'do_one', 'exhaust', + 'minimize', 'tryit', + + 'canon', 'typed', + + 'branch', +] diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/__init__.py b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..fec5afe84a58f3d887a8c762692a3673a2b6d4c8 --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/__init__.py @@ -0,0 +1,14 @@ +from . import traverse +from .core import ( + condition, debug, multiplex, exhaust, notempty, + chain, onaction, sfilter, yieldify, do_one, identity) +from .tools import canon + +__all__ = [ + 'traverse', + + 'condition', 'debug', 'multiplex', 'exhaust', 'notempty', 'chain', + 'onaction', 'sfilter', 'yieldify', 'do_one', 'identity', + + 'canon', +] diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/__pycache__/core.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/__pycache__/core.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..34bc468d5585a37b1c700d727ac7a36ccc182894 Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/__pycache__/core.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/__pycache__/tools.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/__pycache__/tools.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9564d0c6a68123f1ea06dbad9a88dc97ef745434 Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/__pycache__/tools.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/__pycache__/traverse.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/__pycache__/traverse.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f673a415a7ad84d9d886eefe928bb2e00157a55e Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/__pycache__/traverse.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/core.py b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/core.py new file mode 100644 index 0000000000000000000000000000000000000000..2dabaef69b60d994799f71414699223f84e1809b --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/core.py @@ -0,0 +1,116 @@ +""" Generic SymPy-Independent Strategies """ + + +def identity(x): + yield x + + +def exhaust(brule): + """ Apply a branching rule repeatedly until it has no effect """ + def exhaust_brl(expr): + seen = {expr} + for nexpr in brule(expr): + if nexpr not in seen: + seen.add(nexpr) + yield from exhaust_brl(nexpr) + if seen == {expr}: + yield expr + return exhaust_brl + + +def onaction(brule, fn): + def onaction_brl(expr): + for result in brule(expr): + if result != expr: + fn(brule, expr, result) + yield result + return onaction_brl + + +def debug(brule, file=None): + """ Print the input and output expressions at each rule application """ + if not file: + from sys import stdout + file = stdout + + def write(brl, expr, result): + file.write("Rule: %s\n" % brl.__name__) + file.write("In: %s\nOut: %s\n\n" % (expr, result)) + + return onaction(brule, write) + + +def multiplex(*brules): + """ Multiplex many branching rules into one """ + def multiplex_brl(expr): + seen = set() + for brl in brules: + for nexpr in brl(expr): + if nexpr not in seen: + seen.add(nexpr) + yield nexpr + return multiplex_brl + + +def condition(cond, brule): + """ Only apply branching rule if condition is true """ + def conditioned_brl(expr): + if cond(expr): + yield from brule(expr) + else: + pass + return conditioned_brl + + +def sfilter(pred, brule): + """ Yield only those results which satisfy the predicate """ + def filtered_brl(expr): + yield from filter(pred, brule(expr)) + return filtered_brl + + +def notempty(brule): + def notempty_brl(expr): + yielded = False + for nexpr in brule(expr): + yielded = True + yield nexpr + if not yielded: + yield expr + return notempty_brl + + +def do_one(*brules): + """ Execute one of the branching rules """ + def do_one_brl(expr): + yielded = False + for brl in brules: + for nexpr in brl(expr): + yielded = True + yield nexpr + if yielded: + return + return do_one_brl + + +def chain(*brules): + """ + Compose a sequence of brules so that they apply to the expr sequentially + """ + def chain_brl(expr): + if not brules: + yield expr + return + + head, tail = brules[0], brules[1:] + for nexpr in head(expr): + yield from chain(*tail)(nexpr) + + return chain_brl + + +def yieldify(rl): + """ Turn a rule into a branching rule """ + def brl(expr): + yield rl(expr) + return brl diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/tests/__init__.py b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/tests/__pycache__/__init__.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/tests/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..19b58cc045a9d4236d3615fae1fadbd4427fc3b8 Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/tests/__pycache__/__init__.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/tests/__pycache__/test_core.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/tests/__pycache__/test_core.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2668bcf9c0daac3eec89a88f539da4fa31a2604f Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/tests/__pycache__/test_core.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/tests/__pycache__/test_tools.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/tests/__pycache__/test_tools.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2b4b56cec4bf29a55bf58b0ea2c89f8a7d47ae9e Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/tests/__pycache__/test_tools.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/tests/__pycache__/test_traverse.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/tests/__pycache__/test_traverse.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..69c9bee3d4a342ff5c49470e625fa99140f78ba9 Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/tests/__pycache__/test_traverse.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/tests/test_core.py b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/tests/test_core.py new file mode 100644 index 0000000000000000000000000000000000000000..ac620b0afb6dbadc4d97b29ddbb341cd920b6588 --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/tests/test_core.py @@ -0,0 +1,117 @@ +from sympy.strategies.branch.core import ( + exhaust, debug, multiplex, condition, notempty, chain, onaction, sfilter, + yieldify, do_one, identity) + + +def posdec(x): + if x > 0: + yield x - 1 + else: + yield x + + +def branch5(x): + if 0 < x < 5: + yield x - 1 + elif 5 < x < 10: + yield x + 1 + elif x == 5: + yield x + 1 + yield x - 1 + else: + yield x + + +def even(x): + return x % 2 == 0 + + +def inc(x): + yield x + 1 + + +def one_to_n(n): + yield from range(n) + + +def test_exhaust(): + brl = exhaust(branch5) + assert set(brl(3)) == {0} + assert set(brl(7)) == {10} + assert set(brl(5)) == {0, 10} + + +def test_debug(): + from io import StringIO + file = StringIO() + rl = debug(posdec, file) + list(rl(5)) + log = file.getvalue() + file.close() + + assert posdec.__name__ in log + assert '5' in log + assert '4' in log + + +def test_multiplex(): + brl = multiplex(posdec, branch5) + assert set(brl(3)) == {2} + assert set(brl(7)) == {6, 8} + assert set(brl(5)) == {4, 6} + + +def test_condition(): + brl = condition(even, branch5) + assert set(brl(4)) == set(branch5(4)) + assert set(brl(5)) == set() + + +def test_sfilter(): + brl = sfilter(even, one_to_n) + assert set(brl(10)) == {0, 2, 4, 6, 8} + + +def test_notempty(): + def ident_if_even(x): + if even(x): + yield x + + brl = notempty(ident_if_even) + assert set(brl(4)) == {4} + assert set(brl(5)) == {5} + + +def test_chain(): + assert list(chain()(2)) == [2] # identity + assert list(chain(inc, inc)(2)) == [4] + assert list(chain(branch5, inc)(4)) == [4] + assert set(chain(branch5, inc)(5)) == {5, 7} + assert list(chain(inc, branch5)(5)) == [7] + + +def test_onaction(): + L = [] + + def record(fn, input, output): + L.append((input, output)) + + list(onaction(inc, record)(2)) + assert L == [(2, 3)] + + list(onaction(identity, record)(2)) + assert L == [(2, 3)] + + +def test_yieldify(): + yinc = yieldify(lambda x: x + 1) + assert list(yinc(3)) == [4] + + +def test_do_one(): + def bad(expr): + raise ValueError + + assert list(do_one(inc)(3)) == [4] + assert list(do_one(inc, bad)(3)) == [4] + assert list(do_one(inc, posdec)(3)) == [4] diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/tests/test_tools.py b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/tests/test_tools.py new file mode 100644 index 0000000000000000000000000000000000000000..c2bd224030c337f0a000d94f6e7e65f3b8bd118f --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/tests/test_tools.py @@ -0,0 +1,42 @@ +from sympy.strategies.branch.tools import canon +from sympy.core.basic import Basic +from sympy.core.numbers import Integer +from sympy.core.singleton import S + + +def posdec(x): + if isinstance(x, Integer) and x > 0: + yield x - 1 + else: + yield x + + +def branch5(x): + if isinstance(x, Integer): + if 0 < x < 5: + yield x - 1 + elif 5 < x < 10: + yield x + 1 + elif x == 5: + yield x + 1 + yield x - 1 + else: + yield x + + +def test_zero_ints(): + expr = Basic(S(2), Basic(S(5), S(3)), S(8)) + expected = {Basic(S(0), Basic(S(0), S(0)), S(0))} + + brl = canon(posdec) + assert set(brl(expr)) == expected + + +def test_split5(): + expr = Basic(S(2), Basic(S(5), S(3)), S(8)) + expected = { + Basic(S(0), Basic(S(0), S(0)), S(10)), + Basic(S(0), Basic(S(10), S(0)), S(10))} + + brl = canon(branch5) + assert set(brl(expr)) == expected diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/tests/test_traverse.py b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/tests/test_traverse.py new file mode 100644 index 0000000000000000000000000000000000000000..e051928210981223004de28b8c617d0438e11ac6 --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/tests/test_traverse.py @@ -0,0 +1,53 @@ +from sympy.core.basic import Basic +from sympy.core.numbers import Integer +from sympy.core.singleton import S +from sympy.strategies.branch.traverse import top_down, sall +from sympy.strategies.branch.core import do_one, identity + + +def inc(x): + if isinstance(x, Integer): + yield x + 1 + + +def test_top_down_easy(): + expr = Basic(S(1), S(2)) + expected = Basic(S(2), S(3)) + brl = top_down(inc) + + assert set(brl(expr)) == {expected} + + +def test_top_down_big_tree(): + expr = Basic(S(1), Basic(S(2)), Basic(S(3), Basic(S(4)), S(5))) + expected = Basic(S(2), Basic(S(3)), Basic(S(4), Basic(S(5)), S(6))) + brl = top_down(inc) + + assert set(brl(expr)) == {expected} + + +def test_top_down_harder_function(): + def split5(x): + if x == 5: + yield x - 1 + yield x + 1 + + expr = Basic(Basic(S(5), S(6)), S(1)) + expected = {Basic(Basic(S(4), S(6)), S(1)), Basic(Basic(S(6), S(6)), S(1))} + brl = top_down(split5) + + assert set(brl(expr)) == expected + + +def test_sall(): + expr = Basic(S(1), S(2)) + expected = Basic(S(2), S(3)) + brl = sall(inc) + + assert list(brl(expr)) == [expected] + + expr = Basic(S(1), S(2), Basic(S(3), S(4))) + expected = Basic(S(2), S(3), Basic(S(3), S(4))) + brl = sall(do_one(inc, identity)) + + assert list(brl(expr)) == [expected] diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/tools.py b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/tools.py new file mode 100644 index 0000000000000000000000000000000000000000..a6c9097323a7962080ae4497ead976818e386518 --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/tools.py @@ -0,0 +1,12 @@ +from .core import exhaust, multiplex +from .traverse import top_down + + +def canon(*rules): + """ Strategy for canonicalization + + Apply each branching rule in a top-down fashion through the tree. + Multiplex through all branching rule traversals + Keep doing this until there is no change. + """ + return exhaust(multiplex(*map(top_down, rules))) diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/traverse.py b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/traverse.py new file mode 100644 index 0000000000000000000000000000000000000000..28b7098dbda401fc0f0b6d27988d8c37e2f231ae --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/branch/traverse.py @@ -0,0 +1,25 @@ +""" Branching Strategies to Traverse a Tree """ +from itertools import product +from sympy.strategies.util import basic_fns +from .core import chain, identity, do_one + + +def top_down(brule, fns=basic_fns): + """ Apply a rule down a tree running it on the top nodes first """ + return chain(do_one(brule, identity), + lambda expr: sall(top_down(brule, fns), fns)(expr)) + + +def sall(brule, fns=basic_fns): + """ Strategic all - apply rule to args """ + op, new, children, leaf = map(fns.get, ('op', 'new', 'children', 'leaf')) + + def all_rl(expr): + if leaf(expr): + yield expr + else: + myop = op(expr) + argss = product(*map(brule, children(expr))) + for args in argss: + yield new(myop, *args) + return all_rl diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/core.py b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/core.py new file mode 100644 index 0000000000000000000000000000000000000000..75b75cb5f2e0693eea98a7b1c9b3e7f036ec26f6 --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/core.py @@ -0,0 +1,151 @@ +""" Generic SymPy-Independent Strategies """ +from __future__ import annotations +from collections.abc import Callable, Mapping +from typing import TypeVar +from sys import stdout + + +_S = TypeVar('_S') +_T = TypeVar('_T') + + +def identity(x: _T) -> _T: + return x + + +def exhaust(rule: Callable[[_T], _T]) -> Callable[[_T], _T]: + """ Apply a rule repeatedly until it has no effect """ + def exhaustive_rl(expr: _T) -> _T: + new, old = rule(expr), expr + while new != old: + new, old = rule(new), new + return new + return exhaustive_rl + + +def memoize(rule: Callable[[_S], _T]) -> Callable[[_S], _T]: + """Memoized version of a rule + + Notes + ===== + + This cache can grow infinitely, so it is not recommended to use this + than ``functools.lru_cache`` unless you need very heavy computation. + """ + cache: dict[_S, _T] = {} + + def memoized_rl(expr: _S) -> _T: + if expr in cache: + return cache[expr] + else: + result = rule(expr) + cache[expr] = result + return result + return memoized_rl + + +def condition( + cond: Callable[[_T], bool], rule: Callable[[_T], _T] +) -> Callable[[_T], _T]: + """ Only apply rule if condition is true """ + def conditioned_rl(expr: _T) -> _T: + if cond(expr): + return rule(expr) + return expr + return conditioned_rl + + +def chain(*rules: Callable[[_T], _T]) -> Callable[[_T], _T]: + """ + Compose a sequence of rules so that they apply to the expr sequentially + """ + def chain_rl(expr: _T) -> _T: + for rule in rules: + expr = rule(expr) + return expr + return chain_rl + + +def debug(rule, file=None): + """ Print out before and after expressions each time rule is used """ + if file is None: + file = stdout + + def debug_rl(*args, **kwargs): + expr = args[0] + result = rule(*args, **kwargs) + if result != expr: + file.write("Rule: %s\n" % rule.__name__) + file.write("In: %s\nOut: %s\n\n" % (expr, result)) + return result + return debug_rl + + +def null_safe(rule: Callable[[_T], _T | None]) -> Callable[[_T], _T]: + """ Return original expr if rule returns None """ + def null_safe_rl(expr: _T) -> _T: + result = rule(expr) + if result is None: + return expr + return result + return null_safe_rl + + +def tryit(rule: Callable[[_T], _T], exception) -> Callable[[_T], _T]: + """ Return original expr if rule raises exception """ + def try_rl(expr: _T) -> _T: + try: + return rule(expr) + except exception: + return expr + return try_rl + + +def do_one(*rules: Callable[[_T], _T]) -> Callable[[_T], _T]: + """ Try each of the rules until one works. Then stop. """ + def do_one_rl(expr: _T) -> _T: + for rl in rules: + result = rl(expr) + if result != expr: + return result + return expr + return do_one_rl + + +def switch( + key: Callable[[_S], _T], + ruledict: Mapping[_T, Callable[[_S], _S]] +) -> Callable[[_S], _S]: + """ Select a rule based on the result of key called on the function """ + def switch_rl(expr: _S) -> _S: + rl = ruledict.get(key(expr), identity) + return rl(expr) + return switch_rl + + +# XXX Untyped default argument for minimize function +# where python requires SupportsRichComparison type +def _identity(x): + return x + + +def minimize( + *rules: Callable[[_S], _T], + objective=_identity +) -> Callable[[_S], _T]: + """ Select result of rules that minimizes objective + + >>> from sympy.strategies import minimize + >>> inc = lambda x: x + 1 + >>> dec = lambda x: x - 1 + >>> rl = minimize(inc, dec) + >>> rl(4) + 3 + + >>> rl = minimize(inc, dec, objective=lambda x: -x) # maximize + >>> rl(4) + 5 + """ + def minrule(expr: _S) -> _T: + return min([rule(expr) for rule in rules], key=objective) + return minrule diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/rl.py b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/rl.py new file mode 100644 index 0000000000000000000000000000000000000000..c002f8b9aeb73015c1246b44ab0c11710c4d5a72 --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/rl.py @@ -0,0 +1,176 @@ +""" Generic Rules for SymPy + +This file assumes knowledge of Basic and little else. +""" +from sympy.utilities.iterables import sift +from .util import new + + +# Functions that create rules +def rm_id(isid, new=new): + """ Create a rule to remove identities. + + isid - fn :: x -> Bool --- whether or not this element is an identity. + + Examples + ======== + + >>> from sympy.strategies import rm_id + >>> from sympy import Basic, S + >>> remove_zeros = rm_id(lambda x: x==0) + >>> remove_zeros(Basic(S(1), S(0), S(2))) + Basic(1, 2) + >>> remove_zeros(Basic(S(0), S(0))) # If only identites then we keep one + Basic(0) + + See Also: + unpack + """ + def ident_remove(expr): + """ Remove identities """ + ids = list(map(isid, expr.args)) + if sum(ids) == 0: # No identities. Common case + return expr + elif sum(ids) != len(ids): # there is at least one non-identity + return new(expr.__class__, + *[arg for arg, x in zip(expr.args, ids) if not x]) + else: + return new(expr.__class__, expr.args[0]) + + return ident_remove + + +def glom(key, count, combine): + """ Create a rule to conglomerate identical args. + + Examples + ======== + + >>> from sympy.strategies import glom + >>> from sympy import Add + >>> from sympy.abc import x + + >>> key = lambda x: x.as_coeff_Mul()[1] + >>> count = lambda x: x.as_coeff_Mul()[0] + >>> combine = lambda cnt, arg: cnt * arg + >>> rl = glom(key, count, combine) + + >>> rl(Add(x, -x, 3*x, 2, 3, evaluate=False)) + 3*x + 5 + + Wait, how are key, count and combine supposed to work? + + >>> key(2*x) + x + >>> count(2*x) + 2 + >>> combine(2, x) + 2*x + """ + def conglomerate(expr): + """ Conglomerate together identical args x + x -> 2x """ + groups = sift(expr.args, key) + counts = {k: sum(map(count, args)) for k, args in groups.items()} + newargs = [combine(cnt, mat) for mat, cnt in counts.items()] + if set(newargs) != set(expr.args): + return new(type(expr), *newargs) + else: + return expr + + return conglomerate + + +def sort(key, new=new): + """ Create a rule to sort by a key function. + + Examples + ======== + + >>> from sympy.strategies import sort + >>> from sympy import Basic, S + >>> sort_rl = sort(str) + >>> sort_rl(Basic(S(3), S(1), S(2))) + Basic(1, 2, 3) + """ + + def sort_rl(expr): + return new(expr.__class__, *sorted(expr.args, key=key)) + return sort_rl + + +def distribute(A, B): + """ Turns an A containing Bs into a B of As + + where A, B are container types + + >>> from sympy.strategies import distribute + >>> from sympy import Add, Mul, symbols + >>> x, y = symbols('x,y') + >>> dist = distribute(Mul, Add) + >>> expr = Mul(2, x+y, evaluate=False) + >>> expr + 2*(x + y) + >>> dist(expr) + 2*x + 2*y + """ + + def distribute_rl(expr): + for i, arg in enumerate(expr.args): + if isinstance(arg, B): + first, b, tail = expr.args[:i], expr.args[i], expr.args[i + 1:] + return B(*[A(*(first + (arg,) + tail)) for arg in b.args]) + return expr + return distribute_rl + + +def subs(a, b): + """ Replace expressions exactly """ + def subs_rl(expr): + if expr == a: + return b + else: + return expr + return subs_rl + + +# Functions that are rules +def unpack(expr): + """ Rule to unpack singleton args + + >>> from sympy.strategies import unpack + >>> from sympy import Basic, S + >>> unpack(Basic(S(2))) + 2 + """ + if len(expr.args) == 1: + return expr.args[0] + else: + return expr + + +def flatten(expr, new=new): + """ Flatten T(a, b, T(c, d), T2(e)) to T(a, b, c, d, T2(e)) """ + cls = expr.__class__ + args = [] + for arg in expr.args: + if arg.__class__ == cls: + args.extend(arg.args) + else: + args.append(arg) + return new(expr.__class__, *args) + + +def rebuild(expr): + """ Rebuild a SymPy tree. + + Explanation + =========== + + This function recursively calls constructors in the expression tree. + This forces canonicalization and removes ugliness introduced by the use of + Basic.__new__ + """ + if expr.is_Atom: + return expr + else: + return expr.func(*list(map(rebuild, expr.args))) diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/tests/test_tree.py b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/tests/test_tree.py new file mode 100644 index 0000000000000000000000000000000000000000..d5cdde747fe3ab90c8fd181701194403bc526067 --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/tests/test_tree.py @@ -0,0 +1,92 @@ +from sympy.strategies.tree import treeapply, greedy, allresults, brute +from functools import partial, reduce + + +def inc(x): + return x + 1 + + +def dec(x): + return x - 1 + + +def double(x): + return 2 * x + + +def square(x): + return x**2 + + +def add(*args): + return sum(args) + + +def mul(*args): + return reduce(lambda a, b: a * b, args, 1) + + +def test_treeapply(): + tree = ([3, 3], [4, 1], 2) + assert treeapply(tree, {list: min, tuple: max}) == 3 + assert treeapply(tree, {list: add, tuple: mul}) == 60 + + +def test_treeapply_leaf(): + assert treeapply(3, {}, leaf=lambda x: x**2) == 9 + tree = ([3, 3], [4, 1], 2) + treep1 = ([4, 4], [5, 2], 3) + assert treeapply(tree, {list: min, tuple: max}, leaf=lambda x: x + 1) == \ + treeapply(treep1, {list: min, tuple: max}) + + +def test_treeapply_strategies(): + from sympy.strategies import chain, minimize + join = {list: chain, tuple: minimize} + + assert treeapply(inc, join) == inc + assert treeapply((inc, dec), join)(5) == minimize(inc, dec)(5) + assert treeapply([inc, dec], join)(5) == chain(inc, dec)(5) + tree = (inc, [dec, double]) # either inc or dec-then-double + assert treeapply(tree, join)(5) == 6 + assert treeapply(tree, join)(1) == 0 + + maximize = partial(minimize, objective=lambda x: -x) + join = {list: chain, tuple: maximize} + fn = treeapply(tree, join) + assert fn(4) == 6 # highest value comes from the dec then double + assert fn(1) == 2 # highest value comes from the inc + + +def test_greedy(): + tree = [inc, (dec, double)] # either inc or dec-then-double + + fn = greedy(tree, objective=lambda x: -x) + assert fn(4) == 6 # highest value comes from the dec then double + assert fn(1) == 2 # highest value comes from the inc + + tree = [inc, dec, [inc, dec, [(inc, inc), (dec, dec)]]] + lowest = greedy(tree) + assert lowest(10) == 8 + + highest = greedy(tree, objective=lambda x: -x) + assert highest(10) == 12 + + +def test_allresults(): + # square = lambda x: x**2 + + assert set(allresults(inc)(3)) == {inc(3)} + assert set(allresults([inc, dec])(3)) == {2, 4} + assert set(allresults((inc, dec))(3)) == {3} + assert set(allresults([inc, (dec, double)])(4)) == {5, 6} + + +def test_brute(): + tree = ([inc, dec], square) + fn = brute(tree, lambda x: -x) + + assert fn(2) == (2 + 1)**2 + assert fn(-2) == (-2 - 1)**2 + + assert brute(inc)(1) == 2 diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/tools.py b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/tools.py new file mode 100644 index 0000000000000000000000000000000000000000..e6a94c16db57206d7c83c8a5e13930c4cffdde47 --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/tools.py @@ -0,0 +1,53 @@ +from . import rl +from .core import do_one, exhaust, switch +from .traverse import top_down + + +def subs(d, **kwargs): + """ Full simultaneous exact substitution. + + Examples + ======== + + >>> from sympy.strategies.tools import subs + >>> from sympy import Basic, S + >>> mapping = {S(1): S(4), S(4): S(1), Basic(S(5)): Basic(S(6), S(7))} + >>> expr = Basic(S(1), Basic(S(2), S(3)), Basic(S(4), Basic(S(5)))) + >>> subs(mapping)(expr) + Basic(4, Basic(2, 3), Basic(1, Basic(6, 7))) + """ + if d: + return top_down(do_one(*map(rl.subs, *zip(*d.items()))), **kwargs) + else: + return lambda x: x + + +def canon(*rules, **kwargs): + """ Strategy for canonicalization. + + Explanation + =========== + + Apply each rule in a bottom_up fashion through the tree. + Do each one in turn. + Keep doing this until there is no change. + """ + return exhaust(top_down(exhaust(do_one(*rules)), **kwargs)) + + +def typed(ruletypes): + """ Apply rules based on the expression type + + inputs: + ruletypes -- a dict mapping {Type: rule} + + Examples + ======== + + >>> from sympy.strategies import rm_id, typed + >>> from sympy import Add, Mul + >>> rm_zeros = rm_id(lambda x: x==0) + >>> rm_ones = rm_id(lambda x: x==1) + >>> remove_idents = typed({Add: rm_zeros, Mul: rm_ones}) + """ + return switch(type, ruletypes) diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/traverse.py b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/traverse.py new file mode 100644 index 0000000000000000000000000000000000000000..869361f443742b5b7346c9c970f103b955e8473e --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/traverse.py @@ -0,0 +1,37 @@ +"""Strategies to Traverse a Tree.""" +from sympy.strategies.util import basic_fns +from sympy.strategies.core import chain, do_one + + +def top_down(rule, fns=basic_fns): + """Apply a rule down a tree running it on the top nodes first.""" + return chain(rule, lambda expr: sall(top_down(rule, fns), fns)(expr)) + + +def bottom_up(rule, fns=basic_fns): + """Apply a rule down a tree running it on the bottom nodes first.""" + return chain(lambda expr: sall(bottom_up(rule, fns), fns)(expr), rule) + + +def top_down_once(rule, fns=basic_fns): + """Apply a rule down a tree - stop on success.""" + return do_one(rule, lambda expr: sall(top_down(rule, fns), fns)(expr)) + + +def bottom_up_once(rule, fns=basic_fns): + """Apply a rule up a tree - stop on success.""" + return do_one(lambda expr: sall(bottom_up(rule, fns), fns)(expr), rule) + + +def sall(rule, fns=basic_fns): + """Strategic all - apply rule to args.""" + op, new, children, leaf = map(fns.get, ('op', 'new', 'children', 'leaf')) + + def all_rl(expr): + if leaf(expr): + return expr + else: + args = map(rule, children(expr)) + return new(op(expr), *args) + + return all_rl diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/tree.py b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/tree.py new file mode 100644 index 0000000000000000000000000000000000000000..c2006fde4fc5d09f3d38baae4d7335b4cbd971b7 --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/tree.py @@ -0,0 +1,139 @@ +from functools import partial +from sympy.strategies import chain, minimize +from sympy.strategies.core import identity +import sympy.strategies.branch as branch +from sympy.strategies.branch import yieldify + + +def treeapply(tree, join, leaf=identity): + """ Apply functions onto recursive containers (tree). + + Explanation + =========== + + join - a dictionary mapping container types to functions + e.g. ``{list: minimize, tuple: chain}`` + + Keys are containers/iterables. Values are functions [a] -> a. + + Examples + ======== + + >>> from sympy.strategies.tree import treeapply + >>> tree = [(3, 2), (4, 1)] + >>> treeapply(tree, {list: max, tuple: min}) + 2 + + >>> add = lambda *args: sum(args) + >>> def mul(*args): + ... total = 1 + ... for arg in args: + ... total *= arg + ... return total + >>> treeapply(tree, {list: mul, tuple: add}) + 25 + """ + for typ in join: + if isinstance(tree, typ): + return join[typ](*map(partial(treeapply, join=join, leaf=leaf), + tree)) + return leaf(tree) + + +def greedy(tree, objective=identity, **kwargs): + """ Execute a strategic tree. Select alternatives greedily + + Trees + ----- + + Nodes in a tree can be either + + function - a leaf + list - a selection among operations + tuple - a sequence of chained operations + + Textual examples + ---------------- + + Text: Run f, then run g, e.g. ``lambda x: g(f(x))`` + Code: ``(f, g)`` + + Text: Run either f or g, whichever minimizes the objective + Code: ``[f, g]`` + + Textx: Run either f or g, whichever is better, then run h + Code: ``([f, g], h)`` + + Text: Either expand then simplify or try factor then foosimp. Finally print + Code: ``([(expand, simplify), (factor, foosimp)], print)`` + + Objective + --------- + + "Better" is determined by the objective keyword. This function makes + choices to minimize the objective. It defaults to the identity. + + Examples + ======== + + >>> from sympy.strategies.tree import greedy + >>> inc = lambda x: x + 1 + >>> dec = lambda x: x - 1 + >>> double = lambda x: 2*x + + >>> tree = [inc, (dec, double)] # either inc or dec-then-double + >>> fn = greedy(tree) + >>> fn(4) # lowest value comes from the inc + 5 + >>> fn(1) # lowest value comes from dec then double + 0 + + This function selects between options in a tuple. The result is chosen + that minimizes the objective function. + + >>> fn = greedy(tree, objective=lambda x: -x) # maximize + >>> fn(4) # highest value comes from the dec then double + 6 + >>> fn(1) # highest value comes from the inc + 2 + + Greediness + ---------- + + This is a greedy algorithm. In the example: + + ([a, b], c) # do either a or b, then do c + + the choice between running ``a`` or ``b`` is made without foresight to c + """ + optimize = partial(minimize, objective=objective) + return treeapply(tree, {list: optimize, tuple: chain}, **kwargs) + + +def allresults(tree, leaf=yieldify): + """ Execute a strategic tree. Return all possibilities. + + Returns a lazy iterator of all possible results + + Exhaustiveness + -------------- + + This is an exhaustive algorithm. In the example + + ([a, b], [c, d]) + + All of the results from + + (a, c), (b, c), (a, d), (b, d) + + are returned. This can lead to combinatorial blowup. + + See sympy.strategies.greedy for details on input + """ + return treeapply(tree, {list: branch.multiplex, tuple: branch.chain}, + leaf=leaf) + + +def brute(tree, objective=identity, **kwargs): + return lambda expr: min(tuple(allresults(tree, **kwargs)(expr)), + key=objective) diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/util.py b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/util.py new file mode 100644 index 0000000000000000000000000000000000000000..13aab5f6a49650c5ded9cd913c23c6682f18d40a --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/strategies/util.py @@ -0,0 +1,17 @@ +from sympy.core.basic import Basic + +new = Basic.__new__ + + +def assoc(d, k, v): + d = d.copy() + d[k] = v + return d + + +basic_fns = {'op': type, + 'new': Basic.__new__, + 'leaf': lambda x: not isinstance(x, Basic) or x.is_Atom, + 'children': lambda x: x.args} + +expr_fns = assoc(basic_fns, 'new', lambda op, *args: op(*args))