index int64 0 0 | repo_id stringclasses 596 values | file_path stringlengths 31 168 | content stringlengths 1 6.2M |
|---|---|---|---|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chains/test_self_ask_with_search.py | """Integration test for self ask with search."""
from langchain.agents.self_ask_with_search.base import SelfAskWithSearchChain
from langchain_community.llms.openai import OpenAI
from langchain_community.utilities.searchapi import SearchApiAPIWrapper
def test_self_ask_with_search() -> None:
"""Test functionality on a prompt."""
question = "What is the hometown of the reigning men's U.S. Open champion?"
chain = SelfAskWithSearchChain(
llm=OpenAI(temperature=0),
search_chain=SearchApiAPIWrapper(),
input_key="q",
output_key="a",
)
answer = chain.run(question)
final_answer = answer.split("\n")[-1]
assert final_answer == "Belgrade, Serbia"
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chains/test_retrieval_qa_with_sources.py | """Test RetrievalQA functionality."""
from langchain.chains import RetrievalQAWithSourcesChain
from langchain.chains.loading import load_chain
from langchain_text_splitters.character import CharacterTextSplitter
from langchain_community.document_loaders import DirectoryLoader
from langchain_community.embeddings.openai import OpenAIEmbeddings
from langchain_community.llms import OpenAI
from langchain_community.vectorstores import FAISS
def test_retrieval_qa_with_sources_chain_saving_loading(tmp_path: str) -> None:
"""Test saving and loading."""
loader = DirectoryLoader("docs/extras/modules/", glob="*.txt")
documents = loader.load()
text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0)
texts = text_splitter.split_documents(documents)
embeddings = OpenAIEmbeddings()
docsearch = FAISS.from_documents(texts, embeddings)
qa = RetrievalQAWithSourcesChain.from_llm(
llm=OpenAI(), retriever=docsearch.as_retriever()
)
result = qa("What did the president say about Ketanji Brown Jackson?")
assert "question" in result.keys()
assert "answer" in result.keys()
assert "sources" in result.keys()
file_path = str(tmp_path) + "/RetrievalQAWithSourcesChain.yaml"
qa.save(file_path=file_path)
qa_loaded = load_chain(file_path, retriever=docsearch.as_retriever())
assert qa_loaded == qa
qa2 = RetrievalQAWithSourcesChain.from_chain_type(
llm=OpenAI(), retriever=docsearch.as_retriever(), chain_type="stuff"
)
result2 = qa2("What did the president say about Ketanji Brown Jackson?")
assert "question" in result2.keys()
assert "answer" in result2.keys()
assert "sources" in result2.keys()
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chains/test_retrieval_qa.py | """Test RetrievalQA functionality."""
from pathlib import Path
from langchain.chains import RetrievalQA
from langchain.chains.loading import load_chain
from langchain_text_splitters.character import CharacterTextSplitter
from langchain_community.document_loaders import TextLoader
from langchain_community.embeddings.openai import OpenAIEmbeddings
from langchain_community.llms import OpenAI
from langchain_community.vectorstores import FAISS
def test_retrieval_qa_saving_loading(tmp_path: Path) -> None:
"""Test saving and loading."""
loader = TextLoader("docs/extras/modules/state_of_the_union.txt")
documents = loader.load()
text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0)
texts = text_splitter.split_documents(documents)
embeddings = OpenAIEmbeddings()
docsearch = FAISS.from_documents(texts, embeddings)
qa = RetrievalQA.from_llm(llm=OpenAI(), retriever=docsearch.as_retriever())
qa.run("What did the president say about Ketanji Brown Jackson?")
file_path = tmp_path / "RetrievalQA_chain.yaml"
qa.save(file_path=file_path)
qa_loaded = load_chain(file_path, retriever=docsearch.as_retriever())
assert qa_loaded == qa
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chains/test_ontotext_graphdb_qa.py | from unittest.mock import MagicMock, Mock
import pytest
from langchain.chains import LLMChain
from langchain_community.chains.graph_qa.ontotext_graphdb import OntotextGraphDBQAChain
from langchain_community.graphs import OntotextGraphDBGraph
"""
cd libs/langchain/tests/integration_tests/chains/docker-compose-ontotext-graphdb
./start.sh
"""
@pytest.mark.requires("langchain_openai", "rdflib")
@pytest.mark.parametrize("max_fix_retries", [-2, -1, 0, 1, 2])
def test_valid_sparql(max_fix_retries: int) -> None:
from langchain_openai import ChatOpenAI
question = "What is Luke Skywalker's home planet?"
answer = "Tatooine"
graph = OntotextGraphDBGraph(
query_endpoint="http://localhost:7200/repositories/starwars",
query_ontology="CONSTRUCT {?s ?p ?o} "
"FROM <https://swapi.co/ontology/> WHERE {?s ?p ?o}",
)
chain = OntotextGraphDBQAChain.from_llm(
Mock(ChatOpenAI),
graph=graph,
max_fix_retries=max_fix_retries,
)
chain.sparql_generation_chain = Mock(LLMChain)
chain.sparql_fix_chain = Mock(LLMChain)
chain.qa_chain = Mock(LLMChain)
chain.sparql_generation_chain.output_key = "text"
chain.sparql_generation_chain.invoke = MagicMock(
return_value={
"text": "SELECT * {?s ?p ?o} LIMIT 1",
"prompt": question,
"schema": "",
}
)
chain.sparql_fix_chain.output_key = "text"
chain.sparql_fix_chain.invoke = MagicMock()
chain.qa_chain.output_key = "text"
chain.qa_chain.invoke = MagicMock(
return_value={
"text": answer,
"prompt": question,
"context": [],
}
)
result = chain.invoke({chain.input_key: question})
assert chain.sparql_generation_chain.invoke.call_count == 1
assert chain.sparql_fix_chain.invoke.call_count == 0
assert chain.qa_chain.invoke.call_count == 1
assert result == {chain.output_key: answer, chain.input_key: question}
@pytest.mark.requires("langchain_openai", "rdflib")
@pytest.mark.parametrize("max_fix_retries", [-2, -1, 0])
def test_invalid_sparql_non_positive_max_fix_retries(
max_fix_retries: int,
) -> None:
from langchain_openai import ChatOpenAI
question = "What is Luke Skywalker's home planet?"
graph = OntotextGraphDBGraph(
query_endpoint="http://localhost:7200/repositories/starwars",
query_ontology="CONSTRUCT {?s ?p ?o} "
"FROM <https://swapi.co/ontology/> WHERE {?s ?p ?o}",
)
chain = OntotextGraphDBQAChain.from_llm(
Mock(ChatOpenAI),
graph=graph,
max_fix_retries=max_fix_retries,
)
chain.sparql_generation_chain = Mock(LLMChain)
chain.sparql_fix_chain = Mock(LLMChain)
chain.qa_chain = Mock(LLMChain)
chain.sparql_generation_chain.output_key = "text"
chain.sparql_generation_chain.invoke = MagicMock(
return_value={
"text": "```sparql SELECT * {?s ?p ?o} LIMIT 1```",
"prompt": question,
"schema": "",
}
)
chain.sparql_fix_chain.output_key = "text"
chain.sparql_fix_chain.invoke = MagicMock()
chain.qa_chain.output_key = "text"
chain.qa_chain.invoke = MagicMock()
with pytest.raises(ValueError) as e:
chain.invoke({chain.input_key: question})
assert str(e.value) == "The generated SPARQL query is invalid."
assert chain.sparql_generation_chain.invoke.call_count == 1
assert chain.sparql_fix_chain.invoke.call_count == 0
assert chain.qa_chain.invoke.call_count == 0
@pytest.mark.requires("langchain_openai", "rdflib")
@pytest.mark.parametrize("max_fix_retries", [1, 2, 3])
def test_valid_sparql_after_first_retry(max_fix_retries: int) -> None:
from langchain_openai import ChatOpenAI
question = "What is Luke Skywalker's home planet?"
answer = "Tatooine"
generated_invalid_sparql = "```sparql SELECT * {?s ?p ?o} LIMIT 1```"
graph = OntotextGraphDBGraph(
query_endpoint="http://localhost:7200/repositories/starwars",
query_ontology="CONSTRUCT {?s ?p ?o} "
"FROM <https://swapi.co/ontology/> WHERE {?s ?p ?o}",
)
chain = OntotextGraphDBQAChain.from_llm(
Mock(ChatOpenAI),
graph=graph,
max_fix_retries=max_fix_retries,
)
chain.sparql_generation_chain = Mock(LLMChain)
chain.sparql_fix_chain = Mock(LLMChain)
chain.qa_chain = Mock(LLMChain)
chain.sparql_generation_chain.output_key = "text"
chain.sparql_generation_chain.invoke = MagicMock(
return_value={
"text": generated_invalid_sparql,
"prompt": question,
"schema": "",
}
)
chain.sparql_fix_chain.output_key = "text"
chain.sparql_fix_chain.invoke = MagicMock(
return_value={
"text": "SELECT * {?s ?p ?o} LIMIT 1",
"error_message": "pyparsing.exceptions.ParseException: "
"Expected {SelectQuery | ConstructQuery | DescribeQuery | AskQuery}, "
"found '`' (at char 0), (line:1, col:1)",
"generated_sparql": generated_invalid_sparql,
"schema": "",
}
)
chain.qa_chain.output_key = "text"
chain.qa_chain.invoke = MagicMock(
return_value={
"text": answer,
"prompt": question,
"context": [],
}
)
result = chain.invoke({chain.input_key: question})
assert chain.sparql_generation_chain.invoke.call_count == 1
assert chain.sparql_fix_chain.invoke.call_count == 1
assert chain.qa_chain.invoke.call_count == 1
assert result == {chain.output_key: answer, chain.input_key: question}
@pytest.mark.requires("langchain_openai", "rdflib")
@pytest.mark.parametrize("max_fix_retries", [1, 2, 3])
def test_invalid_sparql_server_response_400(max_fix_retries: int) -> None:
from langchain_openai import ChatOpenAI
question = "Who is the oldest character?"
generated_invalid_sparql = (
"PREFIX : <https://swapi.co/vocabulary/> "
"PREFIX owl: <http://www.w3.org/2002/07/owl#> "
"PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> "
"PREFIX xsd: <http://www.w3.org/2001/XMLSchema#> "
"SELECT ?character (MAX(?lifespan) AS ?maxLifespan) "
"WHERE {"
" ?species a :Species ;"
" :character ?character ;"
" :averageLifespan ?lifespan ."
" FILTER(xsd:integer(?lifespan))"
"} "
"ORDER BY DESC(?maxLifespan) "
"LIMIT 1"
)
graph = OntotextGraphDBGraph(
query_endpoint="http://localhost:7200/repositories/starwars",
query_ontology="CONSTRUCT {?s ?p ?o} "
"FROM <https://swapi.co/ontology/> WHERE {?s ?p ?o}",
)
chain = OntotextGraphDBQAChain.from_llm(
Mock(ChatOpenAI),
graph=graph,
max_fix_retries=max_fix_retries,
)
chain.sparql_generation_chain = Mock(LLMChain)
chain.sparql_fix_chain = Mock(LLMChain)
chain.qa_chain = Mock(LLMChain)
chain.sparql_generation_chain.output_key = "text"
chain.sparql_generation_chain.invoke = MagicMock(
return_value={
"text": generated_invalid_sparql,
"prompt": question,
"schema": "",
}
)
chain.sparql_fix_chain.output_key = "text"
chain.sparql_fix_chain.invoke = MagicMock()
chain.qa_chain.output_key = "text"
chain.qa_chain.invoke = MagicMock()
with pytest.raises(ValueError) as e:
chain.invoke({chain.input_key: question})
assert str(e.value) == "Failed to execute the generated SPARQL query."
assert chain.sparql_generation_chain.invoke.call_count == 1
assert chain.sparql_fix_chain.invoke.call_count == 0
assert chain.qa_chain.invoke.call_count == 0
@pytest.mark.requires("langchain_openai", "rdflib")
@pytest.mark.parametrize("max_fix_retries", [1, 2, 3])
def test_invalid_sparql_after_all_retries(max_fix_retries: int) -> None:
from langchain_openai import ChatOpenAI
question = "What is Luke Skywalker's home planet?"
generated_invalid_sparql = "```sparql SELECT * {?s ?p ?o} LIMIT 1```"
graph = OntotextGraphDBGraph(
query_endpoint="http://localhost:7200/repositories/starwars",
query_ontology="CONSTRUCT {?s ?p ?o} "
"FROM <https://swapi.co/ontology/> WHERE {?s ?p ?o}",
)
chain = OntotextGraphDBQAChain.from_llm(
Mock(ChatOpenAI),
graph=graph,
max_fix_retries=max_fix_retries,
)
chain.sparql_generation_chain = Mock(LLMChain)
chain.sparql_fix_chain = Mock(LLMChain)
chain.qa_chain = Mock(LLMChain)
chain.sparql_generation_chain.output_key = "text"
chain.sparql_generation_chain.invoke = MagicMock(
return_value={
"text": generated_invalid_sparql,
"prompt": question,
"schema": "",
}
)
chain.sparql_fix_chain.output_key = "text"
chain.sparql_fix_chain.invoke = MagicMock(
return_value={
"text": generated_invalid_sparql,
"error_message": "pyparsing.exceptions.ParseException: "
"Expected {SelectQuery | ConstructQuery | DescribeQuery | AskQuery}, "
"found '`' (at char 0), (line:1, col:1)",
"generated_sparql": generated_invalid_sparql,
"schema": "",
}
)
chain.qa_chain.output_key = "text"
chain.qa_chain.invoke = MagicMock()
with pytest.raises(ValueError) as e:
chain.invoke({chain.input_key: question})
assert str(e.value) == "The generated SPARQL query is invalid."
assert chain.sparql_generation_chain.invoke.call_count == 1
assert chain.sparql_fix_chain.invoke.call_count == max_fix_retries
assert chain.qa_chain.invoke.call_count == 0
@pytest.mark.requires("langchain_openai", "rdflib")
@pytest.mark.parametrize(
"max_fix_retries,number_of_invalid_responses",
[(1, 0), (2, 0), (2, 1), (10, 6)],
)
def test_valid_sparql_after_some_retries(
max_fix_retries: int, number_of_invalid_responses: int
) -> None:
from langchain_openai import ChatOpenAI
question = "What is Luke Skywalker's home planet?"
answer = "Tatooine"
generated_invalid_sparql = "```sparql SELECT * {?s ?p ?o} LIMIT 1```"
generated_valid_sparql_query = "SELECT * {?s ?p ?o} LIMIT 1"
graph = OntotextGraphDBGraph(
query_endpoint="http://localhost:7200/repositories/starwars",
query_ontology="CONSTRUCT {?s ?p ?o} "
"FROM <https://swapi.co/ontology/> WHERE {?s ?p ?o}",
)
chain = OntotextGraphDBQAChain.from_llm(
Mock(ChatOpenAI),
graph=graph,
max_fix_retries=max_fix_retries,
)
chain.sparql_generation_chain = Mock(LLMChain)
chain.sparql_fix_chain = Mock(LLMChain)
chain.qa_chain = Mock(LLMChain)
chain.sparql_generation_chain.output_key = "text"
chain.sparql_generation_chain.invoke = MagicMock(
return_value={
"text": generated_invalid_sparql,
"prompt": question,
"schema": "",
}
)
chain.sparql_fix_chain.output_key = "text"
chain.sparql_fix_chain.invoke = Mock()
chain.sparql_fix_chain.invoke.side_effect = [
{
"text": generated_invalid_sparql,
"error_message": "pyparsing.exceptions.ParseException: "
"Expected {SelectQuery | ConstructQuery | DescribeQuery | AskQuery}, "
"found '`' (at char 0), (line:1, col:1)",
"generated_sparql": generated_invalid_sparql,
"schema": "",
}
] * number_of_invalid_responses + [
{
"text": generated_valid_sparql_query,
"error_message": "pyparsing.exceptions.ParseException: "
"Expected {SelectQuery | ConstructQuery | DescribeQuery | AskQuery}, "
"found '`' (at char 0), (line:1, col:1)",
"generated_sparql": generated_invalid_sparql,
"schema": "",
}
]
chain.qa_chain.output_key = "text"
chain.qa_chain.invoke = MagicMock(
return_value={
"text": answer,
"prompt": question,
"context": [],
}
)
result = chain.invoke({chain.input_key: question})
assert chain.sparql_generation_chain.invoke.call_count == 1
assert chain.sparql_fix_chain.invoke.call_count == number_of_invalid_responses + 1
assert chain.qa_chain.invoke.call_count == 1
assert result == {chain.output_key: answer, chain.input_key: question}
@pytest.mark.requires("langchain_openai", "rdflib")
@pytest.mark.parametrize(
"model_name,question",
[
("gpt-3.5-turbo-1106", "What is the average height of the Wookiees?"),
("gpt-3.5-turbo-1106", "What is the climate on Tatooine?"),
("gpt-3.5-turbo-1106", "What is Luke Skywalker's home planet?"),
("gpt-4-1106-preview", "What is the average height of the Wookiees?"),
("gpt-4-1106-preview", "What is the climate on Tatooine?"),
("gpt-4-1106-preview", "What is Luke Skywalker's home planet?"),
],
)
def test_chain(model_name: str, question: str) -> None:
from langchain_openai import ChatOpenAI
graph = OntotextGraphDBGraph(
query_endpoint="http://localhost:7200/repositories/starwars",
query_ontology="CONSTRUCT {?s ?p ?o} "
"FROM <https://swapi.co/ontology/> WHERE {?s ?p ?o}",
)
chain = OntotextGraphDBQAChain.from_llm(
ChatOpenAI(temperature=0, model_name=model_name), # type: ignore[call-arg]
graph=graph,
verbose=True, # type: ignore[call-arg]
)
try:
chain.invoke({chain.input_key: question})
except ValueError:
pass
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chains/test_graph_database_sparql.py | """Test RDF/ SPARQL Graph Database Chain."""
import pathlib
import re
from unittest.mock import MagicMock, Mock
from langchain.chains import LLMChain
from langchain_community.chains.graph_qa.sparql import GraphSparqlQAChain
from langchain_community.graphs import RdfGraph
"""
cd libs/langchain/tests/integration_tests/chains/docker-compose-ontotext-graphdb
./start.sh
"""
def test_connect_file_rdf() -> None:
"""
Test loading online resource.
"""
berners_lee_card = "http://www.w3.org/People/Berners-Lee/card"
graph = RdfGraph(
source_file=berners_lee_card,
standard="rdf",
)
query = """SELECT ?s ?p ?o\n""" """WHERE { ?s ?p ?o }"""
output = graph.query(query)
assert len(output) == 86
def test_sparql_select() -> None:
"""
Test for generating and executing simple SPARQL SELECT query.
"""
from langchain_openai import ChatOpenAI
berners_lee_card = "http://www.w3.org/People/Berners-Lee/card"
graph = RdfGraph(
source_file=berners_lee_card,
standard="rdf",
)
question = "What is Tim Berners-Lee's work homepage?"
answer = "Tim Berners-Lee's work homepage is http://www.w3.org/People/Berners-Lee/."
chain = GraphSparqlQAChain.from_llm(
Mock(ChatOpenAI),
graph=graph,
)
chain.sparql_intent_chain = Mock(LLMChain)
chain.sparql_generation_select_chain = Mock(LLMChain)
chain.sparql_generation_update_chain = Mock(LLMChain)
chain.sparql_intent_chain.run = Mock(return_value="SELECT")
chain.sparql_generation_select_chain.run = Mock(
return_value="""PREFIX foaf: <http://xmlns.com/foaf/0.1/>
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
SELECT ?workHomepage
WHERE {
?person rdfs:label "Tim Berners-Lee" .
?person foaf:workplaceHomepage ?workHomepage .
}"""
)
chain.qa_chain = MagicMock(
return_value={
"text": answer,
"prompt": question,
"context": [],
}
)
chain.qa_chain.output_key = "text"
output = chain.invoke({chain.input_key: question})[chain.output_key]
assert output == answer
assert chain.sparql_intent_chain.run.call_count == 1
assert chain.sparql_generation_select_chain.run.call_count == 1
assert chain.sparql_generation_update_chain.run.call_count == 0
assert chain.qa_chain.call_count == 1
def test_sparql_insert(tmp_path: pathlib.Path) -> None:
"""
Test for generating and executing simple SPARQL INSERT query.
"""
from langchain_openai import ChatOpenAI
berners_lee_card = "http://www.w3.org/People/Berners-Lee/card"
local_copy = tmp_path / "test.ttl"
graph = RdfGraph(
source_file=berners_lee_card,
standard="rdf",
local_copy=str(local_copy),
)
query = (
"Save that the person with the name 'Timothy Berners-Lee' "
"has a work homepage at 'http://www.w3.org/foo/bar/'"
)
chain = GraphSparqlQAChain.from_llm(
Mock(ChatOpenAI),
graph=graph,
)
chain.sparql_intent_chain = Mock(LLMChain)
chain.sparql_generation_select_chain = Mock(LLMChain)
chain.sparql_generation_update_chain = Mock(LLMChain)
chain.qa_chain = Mock(LLMChain)
chain.sparql_intent_chain.run = Mock(return_value="UPDATE")
chain.sparql_generation_update_chain.run = Mock(
return_value="""PREFIX foaf: <http://xmlns.com/foaf/0.1/>
INSERT {
?p foaf:workplaceHomepage <http://www.w3.org/foo/bar/> .
}
WHERE {
?p foaf:name "Timothy Berners-Lee" .
}"""
)
output = chain.invoke({chain.input_key: query})[chain.output_key]
assert output == "Successfully inserted triples into the graph."
assert chain.sparql_intent_chain.run.call_count == 1
assert chain.sparql_generation_select_chain.run.call_count == 0
assert chain.sparql_generation_update_chain.run.call_count == 1
assert chain.qa_chain.call_count == 0
query = (
"""PREFIX foaf: <http://xmlns.com/foaf/0.1/>\n"""
"""SELECT ?hp\n"""
"""WHERE {\n"""
""" ?person foaf:name "Timothy Berners-Lee" . \n"""
""" ?person foaf:workplaceHomepage ?hp .\n"""
"""}"""
)
output = graph.query(query)
assert len(output) == 2
def test_sparql_select_return_query() -> None:
"""
Test for generating and executing simple SPARQL SELECT query
and returning the generated SPARQL query.
"""
from langchain_openai import ChatOpenAI
berners_lee_card = "http://www.w3.org/People/Berners-Lee/card"
graph = RdfGraph(
source_file=berners_lee_card,
standard="rdf",
)
question = "What is Tim Berners-Lee's work homepage?"
answer = "Tim Berners-Lee's work homepage is http://www.w3.org/People/Berners-Lee/."
chain = GraphSparqlQAChain.from_llm(
Mock(ChatOpenAI),
graph=graph,
return_sparql_query=True,
)
chain.sparql_intent_chain = Mock(LLMChain)
chain.sparql_generation_select_chain = Mock(LLMChain)
chain.sparql_generation_update_chain = Mock(LLMChain)
chain.sparql_intent_chain.run = Mock(return_value="SELECT")
chain.sparql_generation_select_chain.run = Mock(
return_value="""PREFIX foaf: <http://xmlns.com/foaf/0.1/>
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
SELECT ?workHomepage
WHERE {
?person rdfs:label "Tim Berners-Lee" .
?person foaf:workplaceHomepage ?workHomepage .
}"""
)
chain.qa_chain = MagicMock(
return_value={
"text": answer,
"prompt": question,
"context": [],
}
)
chain.qa_chain.output_key = "text"
output = chain.invoke({chain.input_key: question})
assert output[chain.output_key] == answer
assert "sparql_query" in output
assert chain.sparql_intent_chain.run.call_count == 1
assert chain.sparql_generation_select_chain.run.call_count == 1
assert chain.sparql_generation_update_chain.run.call_count == 0
assert chain.qa_chain.call_count == 1
def test_loading_schema_from_ontotext_graphdb() -> None:
graph = RdfGraph(
query_endpoint="http://localhost:7200/repositories/langchain",
graph_kwargs={"bind_namespaces": "none"},
)
schema = graph.get_schema
prefix = (
"In the following, each IRI is followed by the local name and "
"optionally its description in parentheses. \n"
"The RDF graph supports the following node types:"
)
assert schema.startswith(prefix)
infix = "The RDF graph supports the following relationships:"
assert infix in schema
classes = schema[len(prefix) : schema.index(infix)]
assert len(re.findall("<[^>]+> \\([^)]+\\)", classes)) == 5
relationships = schema[schema.index(infix) + len(infix) :]
assert len(re.findall("<[^>]+> \\([^)]+\\)", relationships)) == 58
def test_graph_qa_chain_with_ontotext_graphdb() -> None:
from langchain_openai import ChatOpenAI
question = "What is Tim Berners-Lee's work homepage?"
answer = "Tim Berners-Lee's work homepage is http://www.w3.org/People/Berners-Lee/."
graph = RdfGraph(
query_endpoint="http://localhost:7200/repositories/langchain",
graph_kwargs={"bind_namespaces": "none"},
)
chain = GraphSparqlQAChain.from_llm(
Mock(ChatOpenAI),
graph=graph,
)
chain.sparql_intent_chain = Mock(LLMChain)
chain.sparql_generation_select_chain = Mock(LLMChain)
chain.sparql_generation_update_chain = Mock(LLMChain)
chain.sparql_intent_chain.run = Mock(return_value="SELECT")
chain.sparql_generation_select_chain.run = Mock(
return_value="""PREFIX foaf: <http://xmlns.com/foaf/0.1/>
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
SELECT ?workHomepage
WHERE {
?person rdfs:label "Tim Berners-Lee" .
?person foaf:workplaceHomepage ?workHomepage .
}"""
)
chain.qa_chain = MagicMock(
return_value={
"text": answer,
"prompt": question,
"context": [],
}
)
chain.qa_chain.output_key = "text"
output = chain.invoke({chain.input_key: question})[chain.output_key]
assert output == answer
assert chain.sparql_intent_chain.run.call_count == 1
assert chain.sparql_generation_select_chain.run.call_count == 1
assert chain.sparql_generation_update_chain.run.call_count == 0
assert chain.qa_chain.call_count == 1
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chains/test_graph_database.py | """Test Graph Database Chain."""
import os
from langchain.chains.loading import load_chain
from langchain_community.chains.graph_qa.cypher import GraphCypherQAChain
from langchain_community.graphs import Neo4jGraph
from langchain_community.llms.openai import OpenAI
def test_connect_neo4j() -> None:
"""Test that Neo4j database is correctly instantiated and connected."""
url = os.environ.get("NEO4J_URI")
username = os.environ.get("NEO4J_USERNAME")
password = os.environ.get("NEO4J_PASSWORD")
assert url is not None
assert username is not None
assert password is not None
graph = Neo4jGraph(
url=url,
username=username,
password=password,
)
output = graph.query(
"""
RETURN "test" AS output
"""
)
expected_output = [{"output": "test"}]
assert output == expected_output
def test_connect_neo4j_env() -> None:
"""Test that Neo4j database environment variables."""
graph = Neo4jGraph()
output = graph.query(
"""
RETURN "test" AS output
"""
)
expected_output = [{"output": "test"}]
assert output == expected_output
def test_cypher_generating_run() -> None:
"""Test that Cypher statement is correctly generated and executed."""
url = os.environ.get("NEO4J_URI")
username = os.environ.get("NEO4J_USERNAME")
password = os.environ.get("NEO4J_PASSWORD")
assert url is not None
assert username is not None
assert password is not None
graph = Neo4jGraph(
url=url,
username=username,
password=password,
)
# Delete all nodes in the graph
graph.query("MATCH (n) DETACH DELETE n")
# Create two nodes and a relationship
graph.query(
"CREATE (a:Actor {name:'Bruce Willis'})"
"-[:ACTED_IN]->(:Movie {title: 'Pulp Fiction'})"
)
# Refresh schema information
graph.refresh_schema()
chain = GraphCypherQAChain.from_llm(OpenAI(temperature=0), graph=graph)
output = chain.run("Who played in Pulp Fiction?")
expected_output = " Bruce Willis played in Pulp Fiction."
assert output == expected_output
def test_cypher_top_k() -> None:
"""Test top_k parameter correctly limits the number of results in the context."""
url = os.environ.get("NEO4J_URI")
username = os.environ.get("NEO4J_USERNAME")
password = os.environ.get("NEO4J_PASSWORD")
assert url is not None
assert username is not None
assert password is not None
TOP_K = 1
graph = Neo4jGraph(
url=url,
username=username,
password=password,
)
# Delete all nodes in the graph
graph.query("MATCH (n) DETACH DELETE n")
# Create two nodes and a relationship
graph.query(
"CREATE (a:Actor {name:'Bruce Willis'})"
"-[:ACTED_IN]->(:Movie {title: 'Pulp Fiction'})"
"<-[:ACTED_IN]-(:Actor {name:'Foo'})"
)
# Refresh schema information
graph.refresh_schema()
chain = GraphCypherQAChain.from_llm(
OpenAI(temperature=0), graph=graph, return_direct=True, top_k=TOP_K
)
output = chain.run("Who played in Pulp Fiction?")
assert len(output) == TOP_K
def test_cypher_intermediate_steps() -> None:
"""Test the returning of the intermediate steps."""
url = os.environ.get("NEO4J_URI")
username = os.environ.get("NEO4J_USERNAME")
password = os.environ.get("NEO4J_PASSWORD")
assert url is not None
assert username is not None
assert password is not None
graph = Neo4jGraph(
url=url,
username=username,
password=password,
)
# Delete all nodes in the graph
graph.query("MATCH (n) DETACH DELETE n")
# Create two nodes and a relationship
graph.query(
"CREATE (a:Actor {name:'Bruce Willis'})"
"-[:ACTED_IN]->(:Movie {title: 'Pulp Fiction'})"
)
# Refresh schema information
graph.refresh_schema()
chain = GraphCypherQAChain.from_llm(
OpenAI(temperature=0), graph=graph, return_intermediate_steps=True
)
output = chain("Who played in Pulp Fiction?")
expected_output = " Bruce Willis played in Pulp Fiction."
assert output["result"] == expected_output
query = output["intermediate_steps"][0]["query"]
# LLM can return variations of the same query
expected_queries = [
(
"\n\nMATCH (a:Actor)-[:ACTED_IN]->"
"(m:Movie {title: 'Pulp Fiction'}) RETURN a.name"
),
(
"\n\nMATCH (a:Actor)-[:ACTED_IN]->"
"(m:Movie {title: 'Pulp Fiction'}) RETURN a.name;"
),
(
"\n\nMATCH (a:Actor)-[:ACTED_IN]->"
"(m:Movie) WHERE m.title = 'Pulp Fiction' RETURN a.name"
),
]
assert query in expected_queries
context = output["intermediate_steps"][1]["context"]
expected_context = [{"a.name": "Bruce Willis"}]
assert context == expected_context
def test_cypher_return_direct() -> None:
"""Test that chain returns direct results."""
url = os.environ.get("NEO4J_URI")
username = os.environ.get("NEO4J_USERNAME")
password = os.environ.get("NEO4J_PASSWORD")
assert url is not None
assert username is not None
assert password is not None
graph = Neo4jGraph(
url=url,
username=username,
password=password,
)
# Delete all nodes in the graph
graph.query("MATCH (n) DETACH DELETE n")
# Create two nodes and a relationship
graph.query(
"CREATE (a:Actor {name:'Bruce Willis'})"
"-[:ACTED_IN]->(:Movie {title: 'Pulp Fiction'})"
)
# Refresh schema information
graph.refresh_schema()
chain = GraphCypherQAChain.from_llm(
OpenAI(temperature=0), graph=graph, return_direct=True
)
output = chain.run("Who played in Pulp Fiction?")
expected_output = [{"a.name": "Bruce Willis"}]
assert output == expected_output
def test_cypher_save_load() -> None:
"""Test saving and loading."""
FILE_PATH = "cypher.yaml"
url = os.environ.get("NEO4J_URI")
username = os.environ.get("NEO4J_USERNAME")
password = os.environ.get("NEO4J_PASSWORD")
assert url is not None
assert username is not None
assert password is not None
graph = Neo4jGraph(
url=url,
username=username,
password=password,
)
chain = GraphCypherQAChain.from_llm(
OpenAI(temperature=0), graph=graph, return_direct=True
)
chain.save(file_path=FILE_PATH)
qa_loaded = load_chain(FILE_PATH, graph=graph)
assert qa_loaded == chain
def test_exclude_types() -> None:
"""Test exclude types from schema."""
url = os.environ.get("NEO4J_URI")
username = os.environ.get("NEO4J_USERNAME")
password = os.environ.get("NEO4J_PASSWORD")
assert url is not None
assert username is not None
assert password is not None
graph = Neo4jGraph(
url=url,
username=username,
password=password,
)
# Delete all nodes in the graph
graph.query("MATCH (n) DETACH DELETE n")
# Create two nodes and a relationship
graph.query(
"CREATE (a:Actor {name:'Bruce Willis'})"
"-[:ACTED_IN]->(:Movie {title: 'Pulp Fiction'})"
"<-[:DIRECTED]-(p:Person {name:'John'})"
)
# Refresh schema information
graph.refresh_schema()
chain = GraphCypherQAChain.from_llm(
OpenAI(temperature=0), graph=graph, exclude_types=["Person", "DIRECTED"]
)
expected_schema = (
"Node properties are the following:\n"
"Movie {title: STRING},Actor {name: STRING}\n"
"Relationship properties are the following:\n\n"
"The relationships are the following:\n"
"(:Actor)-[:ACTED_IN]->(:Movie)"
)
assert chain.graph_schema == expected_schema
def test_include_types() -> None:
"""Test include types from schema."""
url = os.environ.get("NEO4J_URI")
username = os.environ.get("NEO4J_USERNAME")
password = os.environ.get("NEO4J_PASSWORD")
assert url is not None
assert username is not None
assert password is not None
graph = Neo4jGraph(
url=url,
username=username,
password=password,
)
# Delete all nodes in the graph
graph.query("MATCH (n) DETACH DELETE n")
# Create two nodes and a relationship
graph.query(
"CREATE (a:Actor {name:'Bruce Willis'})"
"-[:ACTED_IN]->(:Movie {title: 'Pulp Fiction'})"
"<-[:DIRECTED]-(p:Person {name:'John'})"
)
# Refresh schema information
graph.refresh_schema()
chain = GraphCypherQAChain.from_llm(
OpenAI(temperature=0), graph=graph, include_types=["Movie", "Actor", "ACTED_IN"]
)
expected_schema = (
"Node properties are the following:\n"
"Movie {title: STRING},Actor {name: STRING}\n"
"Relationship properties are the following:\n\n"
"The relationships are the following:\n"
"(:Actor)-[:ACTED_IN]->(:Movie)"
)
assert chain.graph_schema == expected_schema
def test_include_types2() -> None:
"""Test include types from schema."""
url = os.environ.get("NEO4J_URI")
username = os.environ.get("NEO4J_USERNAME")
password = os.environ.get("NEO4J_PASSWORD")
assert url is not None
assert username is not None
assert password is not None
graph = Neo4jGraph(
url=url,
username=username,
password=password,
)
# Delete all nodes in the graph
graph.query("MATCH (n) DETACH DELETE n")
# Create two nodes and a relationship
graph.query(
"CREATE (a:Actor {name:'Bruce Willis'})"
"-[:ACTED_IN]->(:Movie {title: 'Pulp Fiction'})"
"<-[:DIRECTED]-(p:Person {name:'John'})"
)
# Refresh schema information
graph.refresh_schema()
chain = GraphCypherQAChain.from_llm(
OpenAI(temperature=0), graph=graph, include_types=["Movie", "ACTED_IN"]
)
expected_schema = (
"Node properties are the following:\n"
"Movie {title: STRING}\n"
"Relationship properties are the following:\n\n"
"The relationships are the following:\n"
)
assert chain.graph_schema == expected_schema
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chains/test_dalle_agent.py | """Integration test for Dall-E image generator agent."""
from langchain.agents import AgentType, initialize_agent
from langchain_community.agent_toolkits.load_tools import load_tools
from langchain_community.llms import OpenAI
def test_call() -> None:
"""Test that the agent runs and returns output."""
llm = OpenAI(temperature=0.9)
tools = load_tools(["dalle-image-generator"])
agent = initialize_agent(
tools, llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, verbose=True
)
output = agent.run("Create an image of a volcano island")
assert output is not None
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chains/test_graph_database_arangodb.py | """Test Graph Database Chain."""
from typing import Any
from langchain_community.chains.graph_qa.arangodb import ArangoGraphQAChain
from langchain_community.graphs import ArangoGraph
from langchain_community.graphs.arangodb_graph import get_arangodb_client
from langchain_community.llms.openai import OpenAI
def populate_arangodb_database(db: Any) -> None:
if db.has_graph("GameOfThrones"):
return
db.create_graph(
"GameOfThrones",
edge_definitions=[
{
"edge_collection": "ChildOf",
"from_vertex_collections": ["Characters"],
"to_vertex_collections": ["Characters"],
},
],
)
documents = [
{
"_key": "NedStark",
"name": "Ned",
"surname": "Stark",
"alive": True,
"age": 41,
"gender": "male",
},
{
"_key": "AryaStark",
"name": "Arya",
"surname": "Stark",
"alive": True,
"age": 11,
"gender": "female",
},
]
edges = [{"_to": "Characters/NedStark", "_from": "Characters/AryaStark"}]
db.collection("Characters").import_bulk(documents)
db.collection("ChildOf").import_bulk(edges)
def test_connect_arangodb() -> None:
"""Test that the ArangoDB database is correctly instantiated and connected."""
graph = ArangoGraph(get_arangodb_client())
sample_aql_result = graph.query("RETURN 'hello world'")
assert ["hello_world"] == sample_aql_result
def test_empty_schema_on_no_data() -> None:
"""Test that the schema is empty for an empty ArangoDB Database"""
db = get_arangodb_client()
db.delete_graph("GameOfThrones", drop_collections=True, ignore_missing=True)
db.delete_collection("empty_collection", ignore_missing=True)
db.create_collection("empty_collection")
graph = ArangoGraph(db)
assert graph.schema == {
"Graph Schema": [],
"Collection Schema": [],
}
def test_aql_generation() -> None:
"""Test that AQL statement is correctly generated and executed."""
db = get_arangodb_client()
populate_arangodb_database(db)
graph = ArangoGraph(db)
chain = ArangoGraphQAChain.from_llm(OpenAI(temperature=0), graph=graph)
chain.return_aql_result = True
output = chain("Is Ned Stark alive?")
assert output["aql_result"] == [True]
assert "Yes" in output["result"]
output = chain("How old is Arya Stark?")
assert output["aql_result"] == [11]
assert "11" in output["result"]
output = chain("What is the relationship between Arya Stark and Ned Stark?")
assert len(output["aql_result"]) == 1
assert "child of" in output["result"]
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chains/test_react.py | """Integration test for self ask with search."""
from langchain.agents.react.base import ReActChain
from langchain_community.docstore import Wikipedia
from langchain_community.llms.openai import OpenAI
def test_react() -> None:
"""Test functionality on a prompt."""
llm = OpenAI(temperature=0, model_name="gpt-3.5-turbo-instruct") # type: ignore[call-arg]
react = ReActChain(llm=llm, docstore=Wikipedia())
question = (
"Author David Chanoff has collaborated with a U.S. Navy admiral "
"who served as the ambassador to the United Kingdom under "
"which President?"
)
output = react.run(question)
assert output == "Bill Clinton"
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/adapters/test_openai.py | from typing import Any
from langchain_community.adapters import openai as lcopenai
def _test_no_stream(**kwargs: Any) -> None:
import openai
result = openai.ChatCompletion.create(**kwargs) # type: ignore[attr-defined]
lc_result = lcopenai.ChatCompletion.create(**kwargs)
if isinstance(lc_result, dict):
if isinstance(result, dict):
result_dict = result["choices"][0]["message"].to_dict_recursive()
lc_result_dict = lc_result["choices"][0]["message"]
assert result_dict == lc_result_dict
return
def _test_stream(**kwargs: Any) -> None:
import openai
result = []
for c in openai.ChatCompletion.create(**kwargs): # type: ignore[attr-defined]
result.append(c["choices"][0]["delta"].to_dict_recursive())
lc_result = []
for c in lcopenai.ChatCompletion.create(**kwargs):
lc_result.append(c["choices"][0]["delta"])
assert result == lc_result
async def _test_async(**kwargs: Any) -> None:
import openai
result = await openai.ChatCompletion.acreate(**kwargs) # type: ignore[attr-defined]
lc_result = await lcopenai.ChatCompletion.acreate(**kwargs)
if isinstance(lc_result, dict):
if isinstance(result, dict):
result_dict = result["choices"][0]["message"].to_dict_recursive()
lc_result_dict = lc_result["choices"][0]["message"]
assert result_dict == lc_result_dict
return
async def _test_astream(**kwargs: Any) -> None:
import openai
result = []
async for c in await openai.ChatCompletion.acreate(**kwargs): # type: ignore[attr-defined]
result.append(c["choices"][0]["delta"].to_dict_recursive())
lc_result = []
async for c in await lcopenai.ChatCompletion.acreate(**kwargs):
lc_result.append(c["choices"][0]["delta"])
assert result == lc_result
FUNCTIONS = [
{
"name": "get_current_weather",
"description": "Get the current weather in a given location",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA",
},
"unit": {"type": "string", "enum": ["celsius", "fahrenheit"]},
},
"required": ["location"],
},
}
]
async def _test_module(**kwargs: Any) -> None:
_test_no_stream(**kwargs)
await _test_async(**kwargs)
_test_stream(stream=True, **kwargs)
await _test_astream(stream=True, **kwargs)
async def test_normal_call() -> None:
await _test_module(
messages=[{"role": "user", "content": "hi"}],
model="gpt-3.5-turbo",
temperature=0,
)
async def test_function_calling() -> None:
await _test_module(
messages=[{"role": "user", "content": "whats the weather in boston"}],
model="gpt-3.5-turbo",
functions=FUNCTIONS,
temperature=0,
)
async def test_answer_with_function_calling() -> None:
await _test_module(
messages=[
{"role": "user", "content": "say hi, then whats the weather in boston"}
],
model="gpt-3.5-turbo",
functions=FUNCTIONS,
temperature=0,
)
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/memory/test_firestore.py | import json
from langchain.memory import ConversationBufferMemory
from langchain_core.messages import message_to_dict
from langchain_community.chat_message_histories import FirestoreChatMessageHistory
def test_memory_with_message_store() -> None:
"""Test the memory with a message store."""
message_history = FirestoreChatMessageHistory(
collection_name="chat_history",
session_id="my-test-session",
user_id="my-test-user",
)
memory = ConversationBufferMemory(
memory_key="baz", chat_memory=message_history, return_messages=True
)
# add some messages
memory.chat_memory.add_ai_message("This is me, the AI")
memory.chat_memory.add_user_message("This is me, the human")
# get the message history from the memory store
# and check if the messages are there as expected
message_history = FirestoreChatMessageHistory(
collection_name="chat_history",
session_id="my-test-session",
user_id="my-test-user",
)
memory = ConversationBufferMemory(
memory_key="baz", chat_memory=message_history, return_messages=True
)
messages = memory.chat_memory.messages
messages_json = json.dumps([message_to_dict(msg) for msg in messages])
assert "This is me, the AI" in messages_json
assert "This is me, the human" in messages_json
# remove the record from Firestore, so the next test run won't pick it up
memory.chat_memory.clear()
assert memory.chat_memory.messages == []
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/memory/test_cosmos_db.py | import json
import os
from langchain.memory import ConversationBufferMemory
from langchain_core.messages import message_to_dict
from langchain_community.chat_message_histories import CosmosDBChatMessageHistory
# Replace these with your Azure Cosmos DB endpoint and key
endpoint = os.environ.get("COSMOS_DB_ENDPOINT", "")
credential = os.environ.get("COSMOS_DB_KEY", "")
def test_memory_with_message_store() -> None:
"""Test the memory with a message store."""
# setup Azure Cosmos DB as a message store
message_history = CosmosDBChatMessageHistory(
cosmos_endpoint=endpoint,
cosmos_database="chat_history",
cosmos_container="messages",
credential=credential,
session_id="my-test-session",
user_id="my-test-user",
ttl=10,
)
message_history.prepare_cosmos()
memory = ConversationBufferMemory(
memory_key="baz", chat_memory=message_history, return_messages=True
)
# add some messages
memory.chat_memory.add_ai_message("This is me, the AI")
memory.chat_memory.add_user_message("This is me, the human")
# get the message history from the memory store and turn it into a json
messages = memory.chat_memory.messages
messages_json = json.dumps([message_to_dict(msg) for msg in messages])
assert "This is me, the AI" in messages_json
assert "This is me, the human" in messages_json
# remove the record from Azure Cosmos DB, so the next test run won't pick it up
memory.chat_memory.clear()
assert memory.chat_memory.messages == []
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/memory/test_singlestoredb.py | import json
from langchain.memory import ConversationBufferMemory
from langchain_core.messages import message_to_dict
from langchain_community.chat_message_histories import SingleStoreDBChatMessageHistory
# Replace these with your mongodb connection string
TEST_SINGLESTOREDB_URL = "root:pass@localhost:3306/db"
def test_memory_with_message_store() -> None:
"""Test the memory with a message store."""
# setup SingleStoreDB as a message store
message_history = SingleStoreDBChatMessageHistory(
session_id="test-session",
host=TEST_SINGLESTOREDB_URL,
)
memory = ConversationBufferMemory(
memory_key="baz", chat_memory=message_history, return_messages=True
)
# add some messages
memory.chat_memory.add_ai_message("This is me, the AI")
memory.chat_memory.add_user_message("This is me, the human")
# get the message history from the memory store and turn it into a json
messages = memory.chat_memory.messages
messages_json = json.dumps([message_to_dict(msg) for msg in messages])
assert "This is me, the AI" in messages_json
assert "This is me, the human" in messages_json
# remove the record from SingleStoreDB, so the next test run won't pick it up
memory.chat_memory.clear()
assert memory.chat_memory.messages == []
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/memory/test_mongodb.py | import json
import os
from langchain.memory import ConversationBufferMemory
from langchain_core.messages import message_to_dict
from langchain_community.chat_message_histories import MongoDBChatMessageHistory
# Replace these with your mongodb connection string
connection_string = os.environ.get("MONGODB_CONNECTION_STRING", "")
def test_memory_with_message_store() -> None:
"""Test the memory with a message store."""
# setup MongoDB as a message store
message_history = MongoDBChatMessageHistory(
connection_string=connection_string, session_id="test-session"
)
memory = ConversationBufferMemory(
memory_key="baz", chat_memory=message_history, return_messages=True
)
# add some messages
memory.chat_memory.add_ai_message("This is me, the AI")
memory.chat_memory.add_user_message("This is me, the human")
# get the message history from the memory store and turn it into a json
messages = memory.chat_memory.messages
messages_json = json.dumps([message_to_dict(msg) for msg in messages])
assert "This is me, the AI" in messages_json
assert "This is me, the human" in messages_json
# remove the record from Azure Cosmos DB, so the next test run won't pick it up
memory.chat_memory.clear()
assert memory.chat_memory.messages == []
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/memory/test_rockset.py | """Tests RocksetChatMessageHistory by creating a collection
for message history, adding to it, and clearing it.
To run these tests, make sure you have the ROCKSET_API_KEY
and ROCKSET_REGION environment variables set.
"""
import json
import os
from langchain.memory import ConversationBufferMemory
from langchain_core.messages import message_to_dict
from langchain_community.chat_message_histories import RocksetChatMessageHistory
collection_name = "langchain_demo"
session_id = "MySession"
class TestRockset:
memory: RocksetChatMessageHistory
@classmethod
def setup_class(cls) -> None:
from rockset import DevRegions, Regions, RocksetClient
assert os.environ.get("ROCKSET_API_KEY") is not None
assert os.environ.get("ROCKSET_REGION") is not None
api_key = os.environ.get("ROCKSET_API_KEY")
region = os.environ.get("ROCKSET_REGION")
if region == "use1a1":
host = Regions.use1a1
elif region == "usw2a1" or not region:
host = Regions.usw2a1
elif region == "euc1a1":
host = Regions.euc1a1
elif region == "dev":
host = DevRegions.usw2a1
else:
host = region
client = RocksetClient(host, api_key)
cls.memory = RocksetChatMessageHistory(
session_id, client, collection_name, sync=True
)
def test_memory_with_message_store(self) -> None:
memory = ConversationBufferMemory(
memory_key="messages", chat_memory=self.memory, return_messages=True
)
memory.chat_memory.add_ai_message("This is me, the AI")
memory.chat_memory.add_user_message("This is me, the human")
messages = memory.chat_memory.messages
messages_json = json.dumps([message_to_dict(msg) for msg in messages])
assert "This is me, the AI" in messages_json
assert "This is me, the human" in messages_json
memory.chat_memory.clear()
assert memory.chat_memory.messages == []
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/memory/test_memory_cassandra.py | import os
import time
from typing import Any, Optional
from langchain.memory import ConversationBufferMemory
from langchain_core.messages import AIMessage, HumanMessage
from langchain_community.chat_message_histories.cassandra import (
CassandraChatMessageHistory,
)
def _chat_message_history(
session_id: str = "test-session",
drop: bool = True,
ttl_seconds: Optional[int] = None,
) -> CassandraChatMessageHistory:
from cassandra.cluster import Cluster
keyspace = "cmh_test_keyspace"
table_name = "cmh_test_table"
# get db connection
if "CASSANDRA_CONTACT_POINTS" in os.environ:
contact_points = os.environ["CONTACT_POINTS"].split(",")
cluster = Cluster(contact_points)
else:
cluster = Cluster()
#
session = cluster.connect()
# ensure keyspace exists
session.execute(
(
f"CREATE KEYSPACE IF NOT EXISTS {keyspace} "
f"WITH replication = {{'class': 'SimpleStrategy', 'replication_factor': 1}}"
)
)
# drop table if required
if drop:
session.execute(f"DROP TABLE IF EXISTS {keyspace}.{table_name}")
kwargs: Any = {} if ttl_seconds is None else {"ttl_seconds": ttl_seconds}
return CassandraChatMessageHistory(
session_id=session_id,
session=session,
keyspace=keyspace,
table_name=table_name,
**kwargs,
)
def test_memory_with_message_store() -> None:
"""Test the memory with a message store."""
# setup cassandra as a message store
message_history = _chat_message_history()
memory = ConversationBufferMemory(
memory_key="baz",
chat_memory=message_history,
return_messages=True,
)
assert memory.chat_memory.messages == []
# add some messages
memory.chat_memory.add_ai_message("This is me, the AI")
memory.chat_memory.add_user_message("This is me, the human")
messages = memory.chat_memory.messages
expected = [
AIMessage(content="This is me, the AI"),
HumanMessage(content="This is me, the human"),
]
assert messages == expected
# clear the store
memory.chat_memory.clear()
assert memory.chat_memory.messages == []
def test_memory_separate_session_ids() -> None:
"""Test that separate session IDs do not share entries."""
message_history1 = _chat_message_history(session_id="test-session1")
memory1 = ConversationBufferMemory(
memory_key="mk1",
chat_memory=message_history1,
return_messages=True,
)
message_history2 = _chat_message_history(session_id="test-session2")
memory2 = ConversationBufferMemory(
memory_key="mk2",
chat_memory=message_history2,
return_messages=True,
)
memory1.chat_memory.add_ai_message("Just saying.")
assert memory2.chat_memory.messages == []
memory1.chat_memory.clear()
memory2.chat_memory.clear()
def test_memory_ttl() -> None:
"""Test time-to-live feature of the memory."""
message_history = _chat_message_history(ttl_seconds=5)
memory = ConversationBufferMemory(
memory_key="baz",
chat_memory=message_history,
return_messages=True,
)
#
assert memory.chat_memory.messages == []
memory.chat_memory.add_ai_message("Nothing special here.")
time.sleep(2)
assert memory.chat_memory.messages != []
time.sleep(5)
assert memory.chat_memory.messages == []
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/memory/test_elasticsearch.py | import json
import os
import uuid
from typing import Generator, Union
import pytest
from langchain.memory import ConversationBufferMemory
from langchain_core.messages import message_to_dict
from langchain_community.chat_message_histories import ElasticsearchChatMessageHistory
"""
cd tests/integration_tests/memory/docker-compose
docker-compose -f elasticsearch.yml up
By default runs against local docker instance of Elasticsearch.
To run against Elastic Cloud, set the following environment variables:
- ES_CLOUD_ID
- ES_USERNAME
- ES_PASSWORD
"""
class TestElasticsearch:
@pytest.fixture(scope="class", autouse=True)
def elasticsearch_connection(self) -> Union[dict, Generator[dict, None, None]]: # type: ignore[return]
# Run this integration test against Elasticsearch on localhost,
# or an Elastic Cloud instance
from elasticsearch import Elasticsearch
es_url = os.environ.get("ES_URL", "http://localhost:9200")
es_cloud_id = os.environ.get("ES_CLOUD_ID")
es_username = os.environ.get("ES_USERNAME", "elastic")
es_password = os.environ.get("ES_PASSWORD", "changeme")
if es_cloud_id:
es = Elasticsearch(
cloud_id=es_cloud_id,
basic_auth=(es_username, es_password),
)
yield {
"es_cloud_id": es_cloud_id,
"es_user": es_username,
"es_password": es_password,
}
else:
# Running this integration test with local docker instance
es = Elasticsearch(hosts=es_url)
yield {"es_url": es_url}
# Clear all indexes
index_names = es.indices.get(index="_all").keys()
for index_name in index_names:
if index_name.startswith("test_"):
es.indices.delete(index=index_name)
es.indices.refresh(index="_all")
@pytest.fixture(scope="function")
def index_name(self) -> str:
"""Return the index name."""
return f"test_{uuid.uuid4().hex}"
def test_memory_with_message_store(
self, elasticsearch_connection: dict, index_name: str
) -> None:
"""Test the memory with a message store."""
# setup Elasticsearch as a message store
message_history = ElasticsearchChatMessageHistory(
**elasticsearch_connection, index=index_name, session_id="test-session"
)
memory = ConversationBufferMemory(
memory_key="baz", chat_memory=message_history, return_messages=True
)
# add some messages
memory.chat_memory.add_ai_message("This is me, the AI")
memory.chat_memory.add_user_message("This is me, the human")
# get the message history from the memory store and turn it into a json
messages = memory.chat_memory.messages
messages_json = json.dumps([message_to_dict(msg) for msg in messages])
assert "This is me, the AI" in messages_json
assert "This is me, the human" in messages_json
# remove the record from Elasticsearch, so the next test run won't pick it up
memory.chat_memory.clear()
assert memory.chat_memory.messages == []
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/memory/test_memory_astradb.py | import os
from typing import AsyncIterable, Iterable
import pytest
from langchain.memory import ConversationBufferMemory
from langchain_core.messages import AIMessage, HumanMessage
from langchain_community.chat_message_histories.astradb import (
AstraDBChatMessageHistory,
)
from langchain_community.utilities.astradb import SetupMode
def _has_env_vars() -> bool:
return all(
[
"ASTRA_DB_APPLICATION_TOKEN" in os.environ,
"ASTRA_DB_API_ENDPOINT" in os.environ,
]
)
@pytest.fixture(scope="function")
def history1() -> Iterable[AstraDBChatMessageHistory]:
history1 = AstraDBChatMessageHistory(
session_id="session-test-1",
collection_name="langchain_cmh_test",
token=os.environ["ASTRA_DB_APPLICATION_TOKEN"],
api_endpoint=os.environ["ASTRA_DB_API_ENDPOINT"],
namespace=os.environ.get("ASTRA_DB_KEYSPACE"),
)
yield history1
history1.collection.astra_db.delete_collection("langchain_cmh_test")
@pytest.fixture(scope="function")
def history2() -> Iterable[AstraDBChatMessageHistory]:
history2 = AstraDBChatMessageHistory(
session_id="session-test-2",
collection_name="langchain_cmh_test",
token=os.environ["ASTRA_DB_APPLICATION_TOKEN"],
api_endpoint=os.environ["ASTRA_DB_API_ENDPOINT"],
namespace=os.environ.get("ASTRA_DB_KEYSPACE"),
)
yield history2
history2.collection.astra_db.delete_collection("langchain_cmh_test")
@pytest.fixture
async def async_history1() -> AsyncIterable[AstraDBChatMessageHistory]:
history1 = AstraDBChatMessageHistory(
session_id="async-session-test-1",
collection_name="langchain_cmh_test",
token=os.environ["ASTRA_DB_APPLICATION_TOKEN"],
api_endpoint=os.environ["ASTRA_DB_API_ENDPOINT"],
namespace=os.environ.get("ASTRA_DB_KEYSPACE"),
setup_mode=SetupMode.ASYNC,
)
yield history1
await history1.async_collection.astra_db.delete_collection("langchain_cmh_test")
@pytest.fixture(scope="function")
async def async_history2() -> AsyncIterable[AstraDBChatMessageHistory]:
history2 = AstraDBChatMessageHistory(
session_id="async-session-test-2",
collection_name="langchain_cmh_test",
token=os.environ["ASTRA_DB_APPLICATION_TOKEN"],
api_endpoint=os.environ["ASTRA_DB_API_ENDPOINT"],
namespace=os.environ.get("ASTRA_DB_KEYSPACE"),
setup_mode=SetupMode.ASYNC,
)
yield history2
await history2.async_collection.astra_db.delete_collection("langchain_cmh_test")
@pytest.mark.requires("astrapy")
@pytest.mark.skipif(not _has_env_vars(), reason="Missing Astra DB env. vars")
def test_memory_with_message_store(history1: AstraDBChatMessageHistory) -> None:
"""Test the memory with a message store."""
memory = ConversationBufferMemory(
memory_key="baz",
chat_memory=history1,
return_messages=True,
)
assert memory.chat_memory.messages == []
# add some messages
memory.chat_memory.add_messages(
[
AIMessage(content="This is me, the AI"),
HumanMessage(content="This is me, the human"),
]
)
messages = memory.chat_memory.messages
expected = [
AIMessage(content="This is me, the AI"),
HumanMessage(content="This is me, the human"),
]
assert messages == expected
# clear the store
memory.chat_memory.clear()
assert memory.chat_memory.messages == []
@pytest.mark.requires("astrapy")
@pytest.mark.skipif(not _has_env_vars(), reason="Missing Astra DB env. vars")
async def test_memory_with_message_store_async(
async_history1: AstraDBChatMessageHistory,
) -> None:
"""Test the memory with a message store."""
memory = ConversationBufferMemory(
memory_key="baz",
chat_memory=async_history1,
return_messages=True,
)
assert await memory.chat_memory.aget_messages() == []
# add some messages
await memory.chat_memory.aadd_messages(
[
AIMessage(content="This is me, the AI"),
HumanMessage(content="This is me, the human"),
]
)
messages = await memory.chat_memory.aget_messages()
expected = [
AIMessage(content="This is me, the AI"),
HumanMessage(content="This is me, the human"),
]
assert messages == expected
# clear the store
await memory.chat_memory.aclear()
assert await memory.chat_memory.aget_messages() == []
@pytest.mark.requires("astrapy")
@pytest.mark.skipif(not _has_env_vars(), reason="Missing Astra DB env. vars")
def test_memory_separate_session_ids(
history1: AstraDBChatMessageHistory, history2: AstraDBChatMessageHistory
) -> None:
"""Test that separate session IDs do not share entries."""
memory1 = ConversationBufferMemory(
memory_key="mk1",
chat_memory=history1,
return_messages=True,
)
memory2 = ConversationBufferMemory(
memory_key="mk2",
chat_memory=history2,
return_messages=True,
)
memory1.chat_memory.add_messages([AIMessage(content="Just saying.")])
assert memory2.chat_memory.messages == []
memory2.chat_memory.clear()
assert memory1.chat_memory.messages != []
memory1.chat_memory.clear()
assert memory1.chat_memory.messages == []
@pytest.mark.requires("astrapy")
@pytest.mark.skipif(not _has_env_vars(), reason="Missing Astra DB env. vars")
async def test_memory_separate_session_ids_async(
async_history1: AstraDBChatMessageHistory, async_history2: AstraDBChatMessageHistory
) -> None:
"""Test that separate session IDs do not share entries."""
memory1 = ConversationBufferMemory(
memory_key="mk1",
chat_memory=async_history1,
return_messages=True,
)
memory2 = ConversationBufferMemory(
memory_key="mk2",
chat_memory=async_history2,
return_messages=True,
)
await memory1.chat_memory.aadd_messages([AIMessage(content="Just saying.")])
assert await memory2.chat_memory.aget_messages() == []
await memory2.chat_memory.aclear()
assert await memory1.chat_memory.aget_messages() != []
await memory1.chat_memory.aclear()
assert await memory1.chat_memory.aget_messages() == []
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/memory/test_momento.py | """Test Momento chat message history functionality.
To run tests, set the environment variable MOMENTO_AUTH_TOKEN to a valid
Momento auth token. This can be obtained by signing up for a free
Momento account at https://gomomento.com/.
"""
import json
import uuid
from datetime import timedelta
from typing import Iterator
import pytest
from langchain.memory import ConversationBufferMemory
from langchain_core.messages import message_to_dict
from langchain_community.chat_message_histories import MomentoChatMessageHistory
def random_string() -> str:
return str(uuid.uuid4())
@pytest.fixture(scope="function")
def message_history() -> Iterator[MomentoChatMessageHistory]:
from momento import CacheClient, Configurations, CredentialProvider
cache_name = f"langchain-test-cache-{random_string()}"
client = CacheClient(
Configurations.Laptop.v1(),
CredentialProvider.from_environment_variable("MOMENTO_API_KEY"),
default_ttl=timedelta(seconds=30),
)
try:
chat_message_history = MomentoChatMessageHistory(
session_id="my-test-session",
cache_client=client,
cache_name=cache_name,
)
yield chat_message_history
finally:
client.delete_cache(cache_name)
def test_memory_empty_on_new_session(
message_history: MomentoChatMessageHistory,
) -> None:
memory = ConversationBufferMemory(
memory_key="foo", chat_memory=message_history, return_messages=True
)
assert memory.chat_memory.messages == []
def test_memory_with_message_store(message_history: MomentoChatMessageHistory) -> None:
memory = ConversationBufferMemory(
memory_key="baz", chat_memory=message_history, return_messages=True
)
# Add some messages to the memory store
memory.chat_memory.add_ai_message("This is me, the AI")
memory.chat_memory.add_user_message("This is me, the human")
# Verify that the messages are in the store
messages = memory.chat_memory.messages
messages_json = json.dumps([message_to_dict(msg) for msg in messages])
assert "This is me, the AI" in messages_json
assert "This is me, the human" in messages_json
# Verify clearing the store
memory.chat_memory.clear()
assert memory.chat_memory.messages == []
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/memory/test_redis.py | import json
from langchain.memory import ConversationBufferMemory
from langchain_core.messages import message_to_dict
from langchain_community.chat_message_histories import RedisChatMessageHistory
def test_memory_with_message_store() -> None:
"""Test the memory with a message store."""
# setup Redis as a message store
message_history = RedisChatMessageHistory(
url="redis://localhost:6379/0", ttl=10, session_id="my-test-session"
)
memory = ConversationBufferMemory(
memory_key="baz", chat_memory=message_history, return_messages=True
)
# add some messages
memory.chat_memory.add_ai_message("This is me, the AI")
memory.chat_memory.add_user_message("This is me, the human")
# get the message history from the memory store and turn it into a json
messages = memory.chat_memory.messages
messages_json = json.dumps([message_to_dict(msg) for msg in messages])
assert "This is me, the AI" in messages_json
assert "This is me, the human" in messages_json
# remove the record from Redis, so the next test run won't pick it up
memory.chat_memory.clear()
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/memory/test_upstash_redis.py | import json
import pytest
from langchain.memory import ConversationBufferMemory
from langchain_core.messages import message_to_dict
from langchain_community.chat_message_histories.upstash_redis import (
UpstashRedisChatMessageHistory,
)
URL = "<UPSTASH_REDIS_REST_URL>"
TOKEN = "<UPSTASH_REDIS_REST_TOKEN>"
@pytest.mark.requires("upstash_redis")
def test_memory_with_message_store() -> None:
"""Test the memory with a message store."""
# setup Upstash Redis as a message store
message_history = UpstashRedisChatMessageHistory(
url=URL, token=TOKEN, ttl=10, session_id="my-test-session"
)
memory = ConversationBufferMemory(
memory_key="baz", chat_memory=message_history, return_messages=True
)
# add some messages
memory.chat_memory.add_ai_message("This is me, the AI")
memory.chat_memory.add_user_message("This is me, the human")
# get the message history from the memory store and turn it into a json
messages = memory.chat_memory.messages
messages_json = json.dumps([message_to_dict(msg) for msg in messages])
assert "This is me, the AI" in messages_json
assert "This is me, the human" in messages_json
# remove the record from Redis, so the next test run won't pick it up
memory.chat_memory.clear()
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/memory/test_neo4j.py | import json
from langchain.memory import ConversationBufferMemory
from langchain_core.messages import message_to_dict
from langchain_community.chat_message_histories import Neo4jChatMessageHistory
def test_memory_with_message_store() -> None:
"""Test the memory with a message store."""
# setup MongoDB as a message store
message_history = Neo4jChatMessageHistory(session_id="test-session")
memory = ConversationBufferMemory(
memory_key="baz", chat_memory=message_history, return_messages=True
)
# add some messages
memory.chat_memory.add_ai_message("This is me, the AI")
memory.chat_memory.add_user_message("This is me, the human")
# get the message history from the memory store and turn it into a json
messages = memory.chat_memory.messages
messages_json = json.dumps([message_to_dict(msg) for msg in messages])
assert "This is me, the AI" in messages_json
assert "This is me, the human" in messages_json
# remove the record from Azure Cosmos DB, so the next test run won't pick it up
memory.chat_memory.clear()
assert memory.chat_memory.messages == []
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/memory/test_xata.py | """Test Xata chat memory store functionality.
Before running this test, please create a Xata database.
"""
import json
import os
from langchain.memory import ConversationBufferMemory
from langchain_core.messages import message_to_dict
from langchain_community.chat_message_histories import XataChatMessageHistory
class TestXata:
@classmethod
def setup_class(cls) -> None:
assert os.getenv("XATA_API_KEY"), "XATA_API_KEY environment variable is not set"
assert os.getenv("XATA_DB_URL"), "XATA_DB_URL environment variable is not set"
def test_xata_chat_memory(self) -> None:
message_history = XataChatMessageHistory(
api_key=os.getenv("XATA_API_KEY", ""),
db_url=os.getenv("XATA_DB_URL", ""),
session_id="integration-test-session",
)
memory = ConversationBufferMemory(
memory_key="baz", chat_memory=message_history, return_messages=True
)
# add some messages
memory.chat_memory.add_ai_message("This is me, the AI")
memory.chat_memory.add_user_message("This is me, the human")
# get the message history from the memory store and turn it into a json
messages = memory.chat_memory.messages
messages_json = json.dumps([message_to_dict(msg) for msg in messages])
assert "This is me, the AI" in messages_json
assert "This is me, the human" in messages_json
# remove the record from Redis, so the next test run won't pick it up
memory.chat_memory.clear()
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_octoai.py | from langchain_core.messages import AIMessage, HumanMessage
from langchain_community.chat_models.octoai import ChatOctoAI
def test_chat_octoai() -> None:
chat = ChatOctoAI()
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_sparkllm.py | from typing import Any
from langchain_core.messages import AIMessage, AIMessageChunk, HumanMessage
from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate
from langchain_community.chat_models.sparkllm import ChatSparkLLM
_FUNCTIONS: Any = [
{
"type": "function",
"function": {
"name": "get_current_weather",
"description": "Get the current weather in a given location",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA",
},
"unit": {"type": "string", "enum": ["celsius", "fahrenheit"]},
},
"required": ["location"],
},
},
}
]
def test_functions_call_thoughts() -> None:
chat = ChatSparkLLM(timeout=30)
prompt_tmpl = "Use the given functions to answer following question: {input}"
prompt_msgs = [
HumanMessagePromptTemplate.from_template(prompt_tmpl),
]
prompt = ChatPromptTemplate(messages=prompt_msgs)
chain = prompt | chat.bind(functions=_FUNCTIONS)
message = HumanMessage(content="What's the weather like in Shanghai today?")
response = chain.batch([{"input": message}])
assert isinstance(response[0], AIMessage)
assert "tool_calls" in response[0].additional_kwargs
def test_initialization() -> None:
"""Test chat model initialization."""
for model in [
ChatSparkLLM(
api_key="secret",
temperature=0.5,
timeout=30,
),
ChatSparkLLM(
spark_api_key="secret",
request_timeout=30,
), # type: ignore[call-arg]
]:
assert model.request_timeout == 30
assert model.spark_api_key == "secret"
assert model.temperature == 0.5
def test_chat_spark_llm() -> None:
chat = ChatSparkLLM() # type: ignore[call-arg]
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
def test_chat_spark_llm_streaming() -> None:
chat = ChatSparkLLM(streaming=True) # type: ignore[call-arg]
for chunk in chat.stream("Hello!"):
assert isinstance(chunk, AIMessageChunk)
assert isinstance(chunk.content, str)
def test_chat_spark_llm_with_domain() -> None:
chat = ChatSparkLLM(spark_llm_domain="generalv3") # type: ignore[call-arg]
message = HumanMessage(content="Hello")
response = chat.invoke([message])
print(response) # noqa: T201
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
def test_chat_spark_llm_with_temperature() -> None:
chat = ChatSparkLLM(temperature=0.9, top_k=2) # type: ignore[call-arg]
message = HumanMessage(content="Hello")
response = chat.invoke([message])
print(response) # noqa: T201
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
def test_chat_spark_llm_streaming_with_stream_method() -> None:
chat = ChatSparkLLM() # type: ignore[call-arg]
for chunk in chat.stream("Hello!"):
assert isinstance(chunk, AIMessageChunk)
assert isinstance(chunk.content, str)
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_vertexai.py | """Test Vertex AI API wrapper.
In order to run this test, you need to install VertexAI SDK (that is is the private
preview) and be whitelisted to list the models themselves:
In order to run this test, you need to install VertexAI SDK
pip install google-cloud-aiplatform>=1.35.0
Your end-user credentials would be used to make the calls (make sure you've run
`gcloud auth login` first).
"""
from typing import Optional
from unittest.mock import MagicMock, Mock, patch
import pytest
from langchain_core.messages import (
AIMessage,
AIMessageChunk,
HumanMessage,
SystemMessage,
)
from langchain_core.outputs import LLMResult
from langchain_community.chat_models import ChatVertexAI
from langchain_community.chat_models.vertexai import (
_parse_chat_history,
_parse_examples,
)
model_names_to_test = [None, "codechat-bison", "chat-bison", "gemini-pro"]
@pytest.mark.parametrize("model_name", model_names_to_test)
def test_vertexai_instantiation(model_name: str) -> None:
if model_name:
model = ChatVertexAI(model_name=model_name)
else:
model = ChatVertexAI()
assert model._llm_type == "vertexai"
try:
assert model.model_name == model.client._model_id
except AttributeError:
assert model.model_name == model.client._model_name.split("/")[-1]
@pytest.mark.scheduled
@pytest.mark.parametrize("model_name", model_names_to_test)
def test_vertexai_single_call(model_name: str) -> None:
if model_name:
model = ChatVertexAI(model_name=model_name)
else:
model = ChatVertexAI()
message = HumanMessage(content="Hello")
response = model.invoke([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
# mark xfail because Vertex API randomly doesn't respect
# the n/candidate_count parameter
@pytest.mark.xfail
@pytest.mark.scheduled
def test_candidates() -> None:
model = ChatVertexAI(model_name="chat-bison@001", temperature=0.3, n=2)
message = HumanMessage(content="Hello")
response = model.generate(messages=[[message]])
assert isinstance(response, LLMResult)
assert len(response.generations) == 1
assert len(response.generations[0]) == 2
@pytest.mark.scheduled
@pytest.mark.parametrize("model_name", ["chat-bison@001", "gemini-pro"])
async def test_vertexai_agenerate(model_name: str) -> None:
model = ChatVertexAI(temperature=0, model_name=model_name)
message = HumanMessage(content="Hello")
response = await model.agenerate([[message]])
assert isinstance(response, LLMResult)
assert isinstance(response.generations[0][0].message, AIMessage) # type: ignore
sync_response = model.generate([[message]])
assert response.generations[0][0] == sync_response.generations[0][0]
@pytest.mark.scheduled
@pytest.mark.parametrize("model_name", ["chat-bison@001", "gemini-pro"])
def test_vertexai_stream(model_name: str) -> None:
model = ChatVertexAI(temperature=0, model_name=model_name)
message = HumanMessage(content="Hello")
sync_response = model.stream([message])
for chunk in sync_response:
assert isinstance(chunk, AIMessageChunk)
@pytest.mark.scheduled
def test_vertexai_single_call_with_context() -> None:
model = ChatVertexAI()
raw_context = (
"My name is Ned. You are my personal assistant. My favorite movies "
"are Lord of the Rings and Hobbit."
)
question = (
"Hello, could you recommend a good movie for me to watch this evening, please?"
)
context = SystemMessage(content=raw_context)
message = HumanMessage(content=question)
response = model.invoke([context, message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
def test_multimodal() -> None:
llm = ChatVertexAI(model_name="gemini-ultra-vision")
gcs_url = (
"gs://cloud-samples-data/generative-ai/image/"
"320px-Felis_catus-cat_on_snow.jpg"
)
image_message = {
"type": "image_url",
"image_url": {"url": gcs_url},
}
text_message = {
"type": "text",
"text": "What is shown in this image?",
}
message = HumanMessage(content=[text_message, image_message])
output = llm.invoke([message])
assert isinstance(output.content, str)
def test_multimodal_history() -> None:
llm = ChatVertexAI(model_name="gemini-ultra-vision")
gcs_url = (
"gs://cloud-samples-data/generative-ai/image/"
"320px-Felis_catus-cat_on_snow.jpg"
)
image_message = {
"type": "image_url",
"image_url": {"url": gcs_url},
}
text_message = {
"type": "text",
"text": "What is shown in this image?",
}
message1 = HumanMessage(content=[text_message, image_message])
message2 = AIMessage(
content=(
"This is a picture of a cat in the snow. The cat is a tabby cat, which is "
"a type of cat with a striped coat. The cat is standing in the snow, and "
"its fur is covered in snow."
)
)
message3 = HumanMessage(content="What time of day is it?")
response = llm.invoke([message1, message2, message3])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
@pytest.mark.scheduled
def test_vertexai_single_call_with_examples() -> None:
model = ChatVertexAI()
raw_context = "My name is Ned. You are my personal assistant."
question = "2+2"
text_question, text_answer = "4+4", "8"
inp = HumanMessage(content=text_question)
output = AIMessage(content=text_answer)
context = SystemMessage(content=raw_context)
message = HumanMessage(content=question)
response = model.invoke([context, message], examples=[inp, output])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
@pytest.mark.scheduled
@pytest.mark.parametrize("model_name", model_names_to_test)
def test_vertexai_single_call_with_history(model_name: str) -> None:
if model_name:
model = ChatVertexAI(model_name=model_name)
else:
model = ChatVertexAI()
text_question1, text_answer1 = "How much is 2+2?", "4"
text_question2 = "How much is 3+3?"
message1 = HumanMessage(content=text_question1)
message2 = AIMessage(content=text_answer1)
message3 = HumanMessage(content=text_question2)
response = model.invoke([message1, message2, message3])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
def test_parse_chat_history_correct() -> None:
from vertexai.language_models import ChatMessage
text_context = (
"My name is Ned. You are my personal assistant. My "
"favorite movies are Lord of the Rings and Hobbit."
)
context = SystemMessage(content=text_context)
text_question = (
"Hello, could you recommend a good movie for me to watch this evening, please?"
)
question = HumanMessage(content=text_question)
text_answer = (
"Sure, You might enjoy The Lord of the Rings: The Fellowship of the Ring "
"(2001): This is the first movie in the Lord of the Rings trilogy."
)
answer = AIMessage(content=text_answer)
history = _parse_chat_history([context, question, answer, question, answer])
assert history.context == context.content
assert len(history.history) == 4
assert history.history == [
ChatMessage(content=text_question, author="user"),
ChatMessage(content=text_answer, author="bot"),
ChatMessage(content=text_question, author="user"),
ChatMessage(content=text_answer, author="bot"),
]
def test_vertexai_single_call_fails_no_message() -> None:
chat = ChatVertexAI()
with pytest.raises(ValueError) as exc_info:
_ = chat.invoke([])
assert (
str(exc_info.value)
== "You should provide at least one message to start the chat!"
)
@pytest.mark.parametrize("stop", [None, "stop1"])
def test_vertexai_args_passed(stop: Optional[str]) -> None:
response_text = "Goodbye"
user_prompt = "Hello"
prompt_params = {
"max_output_tokens": 1,
"temperature": 10000.0,
"top_k": 10,
"top_p": 0.5,
}
# Mock the library to ensure the args are passed correctly
with patch(
"vertexai.language_models._language_models.ChatModel.start_chat"
) as start_chat:
mock_response = MagicMock()
mock_response.candidates = [Mock(text=response_text)]
mock_chat = MagicMock()
start_chat.return_value = mock_chat
mock_send_message = MagicMock(return_value=mock_response)
mock_chat.send_message = mock_send_message
model = ChatVertexAI(**prompt_params) # type: ignore[arg-type]
message = HumanMessage(content=user_prompt)
if stop:
response = model.invoke([message], stop=[stop])
else:
response = model.invoke([message])
assert response.content == response_text
mock_send_message.assert_called_once_with(user_prompt, candidate_count=1)
expected_stop_sequence = [stop] if stop else None
start_chat.assert_called_once_with(
context=None,
message_history=[],
**prompt_params,
stop_sequences=expected_stop_sequence,
)
def test_parse_examples_correct() -> None:
from vertexai.language_models import InputOutputTextPair
text_question = (
"Hello, could you recommend a good movie for me to watch this evening, please?"
)
question = HumanMessage(content=text_question)
text_answer = (
"Sure, You might enjoy The Lord of the Rings: The Fellowship of the Ring "
"(2001): This is the first movie in the Lord of the Rings trilogy."
)
answer = AIMessage(content=text_answer)
examples = _parse_examples([question, answer, question, answer])
assert len(examples) == 2
assert examples == [
InputOutputTextPair(input_text=text_question, output_text=text_answer),
InputOutputTextPair(input_text=text_question, output_text=text_answer),
]
def test_parse_examples_failes_wrong_sequence() -> None:
with pytest.raises(ValueError) as exc_info:
_ = _parse_examples([AIMessage(content="a")])
print(str(exc_info.value)) # noqa: T201
assert (
str(exc_info.value)
== "Expect examples to have an even amount of messages, got 1."
)
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_baichuan.py | from langchain_core.messages import AIMessage, HumanMessage
from langchain_community.chat_models.baichuan import ChatBaichuan
# For testing, run:
# TEST_FILE=tests/integration_tests/chat_models/test_baichuan.py make test
def test_chat_baichuan_default() -> None:
chat = ChatBaichuan(streaming=True) # type: ignore[call-arg]
message = HumanMessage(content="请完整背诵将进酒,背诵5遍")
response = chat.invoke([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
def test_chat_baichuan_default_non_streaming() -> None:
chat = ChatBaichuan() # type: ignore[call-arg]
message = HumanMessage(content="请完整背诵将进酒,背诵5遍")
response = chat.invoke([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
def test_chat_baichuan_turbo() -> None:
chat = ChatBaichuan(model="Baichuan2-Turbo", streaming=True) # type: ignore[call-arg]
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
def test_chat_baichuan_turbo_non_streaming() -> None:
chat = ChatBaichuan(model="Baichuan2-Turbo") # type: ignore[call-arg]
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
def test_chat_baichuan_with_temperature() -> None:
chat = ChatBaichuan(temperature=1.0) # type: ignore[call-arg]
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
def test_chat_baichuan_with_kwargs() -> None:
chat = ChatBaichuan() # type: ignore[call-arg]
message = HumanMessage(content="百川192K API是什么时候上线的?")
response = chat.invoke(
[message], temperature=0.88, top_p=0.7, with_search_enhance=True
)
print(response) # noqa: T201
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
def test_extra_kwargs() -> None:
chat = ChatBaichuan(temperature=0.88, top_p=0.7, with_search_enhance=True) # type: ignore[call-arg]
assert chat.temperature == 0.88
assert chat.top_p == 0.7
assert chat.with_search_enhance is True
async def test_chat_baichuan_agenerate() -> None:
chat = ChatBaichuan() # type: ignore[call-arg]
response = await chat.ainvoke("你好呀")
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
async def test_chat_baichuan_astream() -> None:
chat = ChatBaichuan() # type: ignore[call-arg]
async for chunk in chat.astream("今天天气如何?"):
assert isinstance(chunk, AIMessage)
assert isinstance(chunk.content, str)
def test_chat_baichuan_with_system_role() -> None:
chat = ChatBaichuan() # type: ignore[call-arg]
messages = [
("system", "你是一名专业的翻译家,可以将用户的中文翻译为英文。"),
("human", "我喜欢编程。"),
]
response = chat.invoke(messages)
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_naver.py | """Test ChatNaver chat model."""
from langchain_core.messages import AIMessage, AIMessageChunk
from langchain_community.chat_models import ChatClovaX
def test_stream() -> None:
"""Test streaming tokens from ChatClovaX."""
llm = ChatClovaX()
for token in llm.stream("I'm Clova"):
assert isinstance(token, AIMessageChunk)
assert isinstance(token.content, str)
async def test_astream() -> None:
"""Test streaming tokens from ChatClovaX."""
llm = ChatClovaX()
async for token in llm.astream("I'm Clova"):
assert isinstance(token, AIMessageChunk)
assert isinstance(token.content, str)
async def test_abatch() -> None:
"""Test streaming tokens from ChatClovaX."""
llm = ChatClovaX()
result = await llm.abatch(["I'm Clova", "I'm not Clova"])
for token in result:
assert isinstance(token, AIMessage)
assert isinstance(token.content, str)
async def test_abatch_tags() -> None:
"""Test batch tokens from ChatClovaX."""
llm = ChatClovaX()
result = await llm.abatch(["I'm Clova", "I'm not Clova"], config={"tags": ["foo"]})
for token in result:
assert isinstance(token, AIMessage)
assert isinstance(token.content, str)
def test_batch() -> None:
"""Test batch tokens from ChatClovaX."""
llm = ChatClovaX()
result = llm.batch(["I'm Clova", "I'm not Clova"])
for token in result:
assert isinstance(token, AIMessage)
assert isinstance(token.content, str)
async def test_ainvoke() -> None:
"""Test invoke tokens from ChatClovaX."""
llm = ChatClovaX()
result = await llm.ainvoke("I'm Clova", config={"tags": ["foo"]})
assert isinstance(result, AIMessage)
assert isinstance(result.content, str)
def test_invoke() -> None:
"""Test invoke tokens from ChatClovaX."""
llm = ChatClovaX()
result = llm.invoke("I'm Clova", config=dict(tags=["foo"]))
assert isinstance(result, AIMessage)
assert isinstance(result.content, str)
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_zhipuai.py | """Test ZhipuAI Chat Model."""
from langchain_core.callbacks import CallbackManager
from langchain_core.messages import AIMessage, BaseMessage, HumanMessage, ToolMessage
from langchain_core.outputs import ChatGeneration, LLMResult
from langchain_core.tools import tool
from langchain_community.chat_models.zhipuai import ChatZhipuAI
from tests.unit_tests.callbacks.fake_callback_handler import FakeCallbackHandler
def test_default_call() -> None:
"""Test default model call."""
chat = ChatZhipuAI()
response = chat.invoke([HumanMessage(content="Hello")])
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_model() -> None:
"""Test model kwarg works."""
chat = ChatZhipuAI(model="glm-4")
response = chat.invoke([HumanMessage(content="Hello")])
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_multiple_history() -> None:
"""Tests multiple history works."""
chat = ChatZhipuAI()
response = chat.invoke(
[
HumanMessage(content="Hello."),
AIMessage(content="Hello!"),
HumanMessage(content="How are you doing?"),
]
)
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_stream() -> None:
"""Test that stream works."""
chat = ChatZhipuAI(streaming=True)
callback_handler = FakeCallbackHandler()
callback_manager = CallbackManager([callback_handler])
response = chat.invoke(
[
HumanMessage(content="Hello."),
AIMessage(content="Hello!"),
HumanMessage(content="Who are you?"),
],
stream=True,
config={"callbacks": callback_manager},
)
assert callback_handler.llm_streams > 0
assert isinstance(response.content, str)
def test_multiple_messages() -> None:
"""Tests multiple messages works."""
chat = ChatZhipuAI()
message = HumanMessage(content="Hi, how are you.")
response = chat.generate([[message], [message]])
assert isinstance(response, LLMResult)
assert len(response.generations) == 2
for generations in response.generations:
assert len(generations) == 1
for generation in generations:
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
assert generation.text == generation.message.content
@tool
def add(a: int, b: int) -> int:
"""Adds a and b."""
return a + b
@tool
def multiply(a: int, b: int) -> int:
"""Multiplies a and b."""
return a * b
def test_tool_call() -> None:
"""Test tool calling by ChatZhipuAI"""
chat = ChatZhipuAI(model="glm-4-long") # type: ignore[call-arg]
tools = [add, multiply]
chat_with_tools = chat.bind_tools(tools)
query = "What is 3 * 12?"
messages = [HumanMessage(query)]
ai_msg = chat_with_tools.invoke(messages)
assert isinstance(ai_msg, AIMessage)
assert isinstance(ai_msg.tool_calls, list)
assert len(ai_msg.tool_calls) == 1
tool_call = ai_msg.tool_calls[0]
assert "args" in tool_call
messages.append(ai_msg) # type: ignore[arg-type]
for tool_call in ai_msg.tool_calls:
selected_tool = {"add": add, "multiply": multiply}[tool_call["name"].lower()]
tool_output = selected_tool.invoke(tool_call["args"]) # type: ignore[attr-defined]
messages.append(ToolMessage(tool_output, tool_call_id=tool_call["id"])) # type: ignore[arg-type]
response = chat_with_tools.invoke(messages)
assert isinstance(response, AIMessage)
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_kinetica.py | """Test Kinetica Chat API wrapper."""
import logging
from typing import TYPE_CHECKING, Generator
import pandas as pd
import pytest
from langchain_core.messages import (
AIMessage,
HumanMessage,
SystemMessage,
)
from langchain_core.prompts import ChatPromptTemplate
from langchain_community.chat_models.kinetica import (
ChatKinetica,
KineticaSqlOutputParser,
KineticaSqlResponse,
KineticaUtil,
)
if TYPE_CHECKING:
import gpudb
LOG = logging.getLogger(__name__)
@pytest.fixture(scope="module")
def vcr_config() -> dict:
return {
# Replace the Authorization request header with "DUMMY" in cassettes
"filter_headers": [("authorization", "DUMMY")],
}
class TestChatKinetica:
"""Integration tests for `Kinetica` chat models.
You must have `gpudb`, `typeguard`, and `faker` packages installed to run these
tests. pytest-vcr cassettes are provided for offline testing.
For more information see https://docs.kinetica.com/7.1/sql-gpt/concepts/.
These integration tests follow a workflow:
1. The `test_setup()` will create a table with fake user profiles and and a related
LLM context for the table.
2. The LLM context is retrieved from the DB and used to create a chat prompt
template.
3. A chain is constructed from the chat prompt template.
4. The chain is executed to generate the SQL and execute the query.
"""
table_name = "demo.test_profiles"
context_name = "demo.test_llm_ctx"
num_records = 100
@classmethod
@pytest.mark.vcr()
def test_setup(cls) -> "gpudb.GPUdb":
"""Create the connection, test table, and LLM context."""
kdbc = KineticaUtil.create_kdbc()
cls._create_test_table(kdbc, cls.table_name, cls.num_records)
cls._create_llm_context(kdbc, cls.context_name)
return kdbc
@pytest.mark.vcr()
def test_create_llm(self) -> None:
"""Create an LLM instance."""
import gpudb
kinetica_llm = ChatKinetica() # type: ignore[call-arg]
LOG.info(kinetica_llm._identifying_params)
assert isinstance(kinetica_llm.kdbc, gpudb.GPUdb)
assert kinetica_llm._llm_type == "kinetica-sqlassist"
@pytest.mark.vcr()
def test_load_context(self) -> None:
"""Load the LLM context from the DB."""
kinetica_llm = ChatKinetica() # type: ignore[call-arg]
ctx_messages = kinetica_llm.load_messages_from_context(self.context_name)
system_message = ctx_messages[0]
assert isinstance(system_message, SystemMessage)
last_question = ctx_messages[-2]
assert isinstance(last_question, HumanMessage)
assert last_question.content == "How many male users are there?"
@pytest.mark.vcr()
def test_generate(self) -> None:
"""Generate SQL from a chain."""
kinetica_llm = ChatKinetica() # type: ignore[call-arg]
# create chain
ctx_messages = kinetica_llm.load_messages_from_context(self.context_name)
ctx_messages.append(("human", "{input}"))
prompt_template = ChatPromptTemplate.from_messages(ctx_messages)
chain = prompt_template | kinetica_llm
resp_message = chain.invoke(
{"input": "What are the female users ordered by username?"}
)
LOG.info(f"SQL Response: {resp_message.content}")
assert isinstance(resp_message, AIMessage)
@pytest.mark.vcr()
def test_full_chain(self) -> None:
"""Generate SQL from a chain and execute the query."""
kinetica_llm = ChatKinetica() # type: ignore[call-arg]
# create chain
ctx_messages = kinetica_llm.load_messages_from_context(self.context_name)
ctx_messages.append(("human", "{input}"))
prompt_template = ChatPromptTemplate.from_messages(ctx_messages)
chain = (
prompt_template
| kinetica_llm
| KineticaSqlOutputParser(kdbc=kinetica_llm.kdbc)
)
sql_response: KineticaSqlResponse = chain.invoke(
{"input": "What are the female users ordered by username?"}
)
assert isinstance(sql_response, KineticaSqlResponse)
LOG.info(f"SQL Response: {sql_response.sql}")
assert isinstance(sql_response.dataframe, pd.DataFrame)
users = sql_response.dataframe["username"]
assert users[0] == "alexander40"
@classmethod
def _create_fake_records(cls, count: int) -> Generator:
"""Generator for fake records."""
import faker
faker.Faker.seed(5467)
faker_inst = faker.Faker(locale="en-US")
for id in range(0, count):
rec = dict(id=id, **faker_inst.simple_profile())
rec["birthdate"] = pd.Timestamp(rec["birthdate"])
yield rec
@classmethod
def _create_test_table(
cls, kinetica_dbc: "gpudb.GPUdb", table_name: str, num_records: int
) -> "gpudb.GPUdbTable":
"""Create a table from the fake records generator."""
import gpudb
table_df = pd.DataFrame.from_records(
data=cls._create_fake_records(num_records), index="id"
)
LOG.info(f"Creating test table '{table_name}' with {num_records} records...")
gpudb_table = gpudb.GPUdbTable.from_df(
table_df,
db=kinetica_dbc,
table_name=table_name,
clear_table=True,
load_data=True,
column_types={},
)
return gpudb_table
@classmethod
def _check_error(cls, response: dict) -> None:
"""Convert a DB error into an exception."""
status = response["status_info"]["status"]
if status != "OK":
message = response["status_info"]["message"]
raise Exception("[%s]: %s" % (status, message))
@classmethod
def _create_llm_context(
cls, kinetica_dbc: "gpudb.GPUdb", context_name: str
) -> None:
"""Create an LLM context for the table."""
sql = f"""
CREATE OR REPLACE CONTEXT {context_name}
(
TABLE = {cls.table_name}
COMMENT = 'Contains user profiles.'
),
(
SAMPLES = (
'How many male users are there?' =
'select count(1) as num_users
from {cls.table_name}
where sex = ''M'';')
)
"""
LOG.info(f"Creating context: {context_name}")
response = kinetica_dbc.execute_sql(sql)
cls._check_error(response)
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_litellm_router.py | """Test LiteLLM Router API wrapper."""
import asyncio
from copy import deepcopy
from typing import Any, AsyncGenerator, Coroutine, Dict, List, Tuple, Union, cast
import pytest
from langchain_core.messages import AIMessage, BaseMessage, HumanMessage
from langchain_core.outputs import ChatGeneration, LLMResult
from langchain_community.chat_models.litellm_router import ChatLiteLLMRouter
from tests.unit_tests.callbacks.fake_callback_handler import FakeCallbackHandler
model_group = "gpt-4"
fake_model_prefix = "azure/fake-deployment-name-"
fake_models_names = [fake_model_prefix + suffix for suffix in ["1", "2"]]
fake_api_key = "fakekeyvalue"
fake_api_version = "XXXX-XX-XX"
fake_api_base = "https://faketesturl/"
fake_chunks = ["This is ", "a fake answer."]
fake_answer = "".join(fake_chunks)
token_usage_key_name = "token_usage"
model_list = [
{
"model_name": model_group,
"litellm_params": {
"model": fake_models_names[0],
"api_key": fake_api_key,
"api_version": fake_api_version,
"api_base": fake_api_base,
},
},
{
"model_name": model_group,
"litellm_params": {
"model": fake_models_names[1],
"api_key": fake_api_key,
"api_version": fake_api_version,
"api_base": fake_api_base,
},
},
]
class FakeCompletion:
def __init__(self) -> None:
self.seen_inputs: List[Any] = []
@staticmethod
def _get_new_result_and_choices(
base_result: Dict[str, Any],
) -> Tuple[Dict[str, Any], List[Dict[str, Any]]]:
result = deepcopy(base_result)
choices = cast(List[Dict[str, Any]], result["choices"])
return result, choices
@staticmethod
def _get_next_result(
agen: AsyncGenerator[Dict[str, Any], None],
) -> Dict[str, Any]:
coroutine = cast(Coroutine, agen.__anext__())
return asyncio.run(coroutine)
async def _get_fake_results_agenerator(
self, **kwargs: Any
) -> AsyncGenerator[Dict[str, Any], None]:
from litellm import Usage
self.seen_inputs.append(kwargs)
base_result = {
"choices": [
{
"index": 0,
}
],
"created": 0,
"id": "",
"model": model_group,
"object": "chat.completion",
}
if kwargs["stream"]:
for chunk_index in range(0, len(fake_chunks)):
result, choices = self._get_new_result_and_choices(base_result)
choice = choices[0]
choice["delta"] = {
"role": "assistant",
"content": fake_chunks[chunk_index],
"function_call": None,
}
choice["finish_reason"] = None
# no usage here, since no usage from OpenAI API for streaming yet
# https://community.openai.com/t/usage-info-in-api-responses/18862
yield result
result, choices = self._get_new_result_and_choices(base_result)
choice = choices[0]
choice["delta"] = {}
choice["finish_reason"] = "stop"
# no usage here, since no usage from OpenAI API for streaming yet
# https://community.openai.com/t/usage-info-in-api-responses/18862
yield result
else:
result, choices = self._get_new_result_and_choices(base_result)
choice = choices[0]
choice["message"] = {
"content": fake_answer,
"role": "assistant",
}
choice["finish_reason"] = "stop"
result["usage"] = Usage(
completion_tokens=1, prompt_tokens=2, total_tokens=3
)
yield result
def completion(self, **kwargs: Any) -> Union[List, Dict[str, Any]]:
agen = self._get_fake_results_agenerator(**kwargs)
if kwargs["stream"]:
results: List[Dict[str, Any]] = []
while True:
try:
results.append(self._get_next_result(agen))
except StopAsyncIteration:
break
return results
else:
# there is only one result for non-streaming
return self._get_next_result(agen)
async def acompletion(
self, **kwargs: Any
) -> Union[AsyncGenerator[Dict[str, Any], None], Dict[str, Any]]:
agen = self._get_fake_results_agenerator(**kwargs)
if kwargs["stream"]:
return agen
else:
# there is only one result for non-streaming
return await agen.__anext__()
def check_inputs(self, expected_num_calls: int) -> None:
assert len(self.seen_inputs) == expected_num_calls
for kwargs in self.seen_inputs:
metadata = kwargs["metadata"]
assert metadata["model_group"] == model_group
# LiteLLM router chooses one model name from the model_list
assert kwargs["model"] in fake_models_names
assert metadata["deployment"] in fake_models_names
assert kwargs["api_key"] == fake_api_key
assert kwargs["api_version"] == fake_api_version
assert kwargs["api_base"] == fake_api_base
@pytest.fixture
def fake_completion() -> FakeCompletion:
"""Fake AI completion for testing."""
import litellm
fake_completion = FakeCompletion()
# Turn off LiteLLM's built-in telemetry
litellm.telemetry = False
litellm.completion = fake_completion.completion
litellm.acompletion = fake_completion.acompletion
return fake_completion
@pytest.fixture
def litellm_router() -> Any:
"""LiteLLM router for testing."""
from litellm import Router
return Router(
model_list=model_list,
)
@pytest.mark.scheduled
def test_litellm_router_call(
fake_completion: FakeCompletion, litellm_router: Any
) -> None:
"""Test valid call to LiteLLM Router."""
chat = ChatLiteLLMRouter(router=litellm_router)
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
assert response.content == fake_answer
# no usage check here, since response is only an AIMessage
fake_completion.check_inputs(expected_num_calls=1)
@pytest.mark.scheduled
def test_litellm_router_generate(
fake_completion: FakeCompletion, litellm_router: Any
) -> None:
"""Test generate method of LiteLLM Router."""
from litellm import Usage
chat = ChatLiteLLMRouter(router=litellm_router)
chat_messages: List[List[BaseMessage]] = [
[HumanMessage(content="How many toes do dogs have?")]
]
messages_copy = [messages.copy() for messages in chat_messages]
result: LLMResult = chat.generate(chat_messages)
assert isinstance(result, LLMResult)
for generations in result.generations:
assert len(generations) == 1
for generation in generations:
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
assert generation.message.content == generation.text
assert generation.message.content == fake_answer
assert chat_messages == messages_copy
assert result.llm_output is not None
assert result.llm_output[token_usage_key_name] == Usage(
completion_tokens=1, prompt_tokens=2, total_tokens=3
)
fake_completion.check_inputs(expected_num_calls=1)
@pytest.mark.scheduled
def test_litellm_router_streaming(
fake_completion: FakeCompletion, litellm_router: Any
) -> None:
"""Test streaming tokens from LiteLLM Router."""
chat = ChatLiteLLMRouter(router=litellm_router, streaming=True)
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
assert response.content == fake_answer
# no usage check here, since response is only an AIMessage
fake_completion.check_inputs(expected_num_calls=1)
@pytest.mark.scheduled
def test_litellm_router_streaming_callback(
fake_completion: FakeCompletion, litellm_router: Any
) -> None:
"""Test that streaming correctly invokes on_llm_new_token callback."""
callback_handler = FakeCallbackHandler()
chat = ChatLiteLLMRouter(
router=litellm_router,
streaming=True,
callbacks=[callback_handler],
verbose=True,
)
message = HumanMessage(content="Write me a sentence with 10 words.")
response = chat.invoke([message])
assert callback_handler.llm_streams > 1
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
assert response.content == fake_answer
# no usage check here, since response is only an AIMessage
fake_completion.check_inputs(expected_num_calls=1)
@pytest.mark.scheduled
async def test_async_litellm_router(
fake_completion: FakeCompletion, litellm_router: Any
) -> None:
"""Test async generation."""
from litellm import Usage
chat = ChatLiteLLMRouter(router=litellm_router)
message = HumanMessage(content="Hello")
response = await chat.agenerate([[message], [message]])
assert isinstance(response, LLMResult)
assert len(response.generations) == 2
for generations in response.generations:
assert len(generations) == 1
for generation in generations:
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
assert generation.message.content == generation.text
assert generation.message.content == fake_answer
assert response.llm_output is not None
assert response.llm_output[token_usage_key_name] == Usage(
completion_tokens=2, prompt_tokens=4, total_tokens=6
)
fake_completion.check_inputs(expected_num_calls=2)
@pytest.mark.scheduled
async def test_async_litellm_router_streaming(
fake_completion: FakeCompletion, litellm_router: Any
) -> None:
"""Test that streaming correctly invokes on_llm_new_token callback."""
callback_handler = FakeCallbackHandler()
chat = ChatLiteLLMRouter(
router=litellm_router,
streaming=True,
callbacks=[callback_handler],
verbose=True,
)
message = HumanMessage(content="Hello")
response = await chat.agenerate([[message], [message]])
assert callback_handler.llm_streams > 0
assert isinstance(response, LLMResult)
assert len(response.generations) == 2
for generations in response.generations:
assert len(generations) == 1
for generation in generations:
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
assert generation.message.content == generation.text
assert generation.message.content == fake_answer
# no usage check here, since no usage from OpenAI API for streaming yet
# https://community.openai.com/t/usage-info-in-api-responses/18862
fake_completion.check_inputs(expected_num_calls=2)
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_openai.py | """Test ChatOpenAI wrapper."""
from typing import Any, Optional
import pytest
from langchain_core.callbacks import CallbackManager
from langchain_core.messages import AIMessage, BaseMessage, HumanMessage, SystemMessage
from langchain_core.outputs import (
ChatGeneration,
ChatResult,
LLMResult,
)
from langchain_core.prompts import ChatPromptTemplate
from pydantic import BaseModel, Field
from langchain_community.chat_models.openai import ChatOpenAI
from tests.unit_tests.callbacks.fake_callback_handler import FakeCallbackHandler
@pytest.mark.scheduled
def test_chat_openai() -> None:
"""Test ChatOpenAI wrapper."""
chat = ChatOpenAI(
temperature=0.7,
base_url=None,
organization=None,
openai_proxy=None,
timeout=10.0,
max_retries=3,
http_client=None,
n=1,
max_tokens=10,
default_headers=None,
default_query=None,
)
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_chat_openai_model() -> None:
"""Test ChatOpenAI wrapper handles model_name."""
chat = ChatOpenAI(model="foo")
assert chat.model_name == "foo"
chat = ChatOpenAI(model_name="bar") # type: ignore[call-arg]
assert chat.model_name == "bar"
def test_chat_openai_system_message() -> None:
"""Test ChatOpenAI wrapper with system message."""
chat = ChatOpenAI(max_tokens=10)
system_message = SystemMessage(content="You are to chat with the user.")
human_message = HumanMessage(content="Hello")
response = chat.invoke([system_message, human_message])
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
@pytest.mark.scheduled
def test_chat_openai_generate() -> None:
"""Test ChatOpenAI wrapper with generate."""
chat = ChatOpenAI(max_tokens=10, n=2)
message = HumanMessage(content="Hello")
response = chat.generate([[message], [message]])
assert isinstance(response, LLMResult)
assert len(response.generations) == 2
assert response.llm_output
for generations in response.generations:
assert len(generations) == 2
for generation in generations:
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
assert generation.text == generation.message.content
@pytest.mark.scheduled
def test_chat_openai_multiple_completions() -> None:
"""Test ChatOpenAI wrapper with multiple completions."""
chat = ChatOpenAI(max_tokens=10, n=5)
message = HumanMessage(content="Hello")
response = chat._generate([message])
assert isinstance(response, ChatResult)
assert len(response.generations) == 5
for generation in response.generations:
assert isinstance(generation.message, BaseMessage)
assert isinstance(generation.message.content, str)
@pytest.mark.scheduled
def test_chat_openai_streaming() -> None:
"""Test that streaming correctly invokes on_llm_new_token callback."""
callback_handler = FakeCallbackHandler()
callback_manager = CallbackManager([callback_handler])
chat = ChatOpenAI(
max_tokens=10,
streaming=True,
temperature=0,
callback_manager=callback_manager,
verbose=True,
)
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert callback_handler.llm_streams > 0
assert isinstance(response, BaseMessage)
@pytest.mark.scheduled
def test_chat_openai_streaming_generation_info() -> None:
"""Test that generation info is preserved when streaming."""
class _FakeCallback(FakeCallbackHandler):
saved_things: dict = {}
def on_llm_end(
self,
*args: Any,
**kwargs: Any,
) -> Any:
# Save the generation
self.saved_things["generation"] = args[0]
callback = _FakeCallback()
callback_manager = CallbackManager([callback])
chat = ChatOpenAI(
max_tokens=2,
temperature=0,
callback_manager=callback_manager,
)
list(chat.stream("hi"))
generation = callback.saved_things["generation"]
# `Hello!` is two tokens, assert that that is what is returned
assert generation.generations[0][0].text == "Hello!"
def test_chat_openai_llm_output_contains_model_name() -> None:
"""Test llm_output contains model_name."""
chat = ChatOpenAI(max_tokens=10)
message = HumanMessage(content="Hello")
llm_result = chat.generate([[message]])
assert llm_result.llm_output is not None
assert llm_result.llm_output["model_name"] == chat.model_name
def test_chat_openai_streaming_llm_output_contains_model_name() -> None:
"""Test llm_output contains model_name."""
chat = ChatOpenAI(max_tokens=10, streaming=True)
message = HumanMessage(content="Hello")
llm_result = chat.generate([[message]])
assert llm_result.llm_output is not None
assert llm_result.llm_output["model_name"] == chat.model_name
def test_chat_openai_invalid_streaming_params() -> None:
"""Test that streaming correctly invokes on_llm_new_token callback."""
with pytest.raises(ValueError):
ChatOpenAI(
max_tokens=10,
streaming=True,
temperature=0,
n=5,
)
@pytest.mark.scheduled
async def test_async_chat_openai() -> None:
"""Test async generation."""
chat = ChatOpenAI(max_tokens=10, n=2)
message = HumanMessage(content="Hello")
response = await chat.agenerate([[message], [message]])
assert isinstance(response, LLMResult)
assert len(response.generations) == 2
assert response.llm_output
for generations in response.generations:
assert len(generations) == 2
for generation in generations:
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
assert generation.text == generation.message.content
@pytest.mark.scheduled
async def test_async_chat_openai_streaming() -> None:
"""Test that streaming correctly invokes on_llm_new_token callback."""
callback_handler = FakeCallbackHandler()
callback_manager = CallbackManager([callback_handler])
chat = ChatOpenAI(
max_tokens=10,
streaming=True,
temperature=0,
callback_manager=callback_manager,
verbose=True,
)
message = HumanMessage(content="Hello")
response = await chat.agenerate([[message], [message]])
assert callback_handler.llm_streams > 0
assert isinstance(response, LLMResult)
assert len(response.generations) == 2
for generations in response.generations:
assert len(generations) == 1
for generation in generations:
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
assert generation.text == generation.message.content
@pytest.mark.scheduled
async def test_async_chat_openai_bind_functions() -> None:
"""Test ChatOpenAI wrapper with multiple completions."""
class Person(BaseModel):
"""Identifying information about a person."""
name: str = Field(..., title="Name", description="The person's name")
age: int = Field(..., title="Age", description="The person's age")
fav_food: Optional[str] = Field(
default=None, title="Fav Food", description="The person's favorite food"
)
chat = ChatOpenAI(
max_tokens=30,
n=1,
streaming=True,
).bind_functions(functions=[Person], function_call="Person")
prompt = ChatPromptTemplate.from_messages(
[
("system", "Use the provided Person function"),
("user", "{input}"),
]
)
chain = prompt | chat
message = HumanMessage(content="Sally is 13 years old")
response = await chain.abatch([{"input": message}])
assert isinstance(response, list)
assert len(response) == 1
for generation in response:
assert isinstance(generation, AIMessage)
def test_chat_openai_extra_kwargs() -> None:
"""Test extra kwargs to chat openai."""
# Check that foo is saved in extra_kwargs.
llm = ChatOpenAI(foo=3, max_tokens=10) # type: ignore[call-arg]
assert llm.max_tokens == 10
assert llm.model_kwargs == {"foo": 3}
# Test that if extra_kwargs are provided, they are added to it.
llm = ChatOpenAI(foo=3, model_kwargs={"bar": 2}) # type: ignore[call-arg]
assert llm.model_kwargs == {"foo": 3, "bar": 2}
# Test that if provided twice it errors
with pytest.raises(ValueError):
ChatOpenAI(foo=3, model_kwargs={"foo": 2}) # type: ignore[call-arg]
# Test that if explicit param is specified in kwargs it errors
with pytest.raises(ValueError):
ChatOpenAI(model_kwargs={"temperature": 0.2})
# Test that "model" cannot be specified in kwargs
with pytest.raises(ValueError):
ChatOpenAI(model_kwargs={"model": "gpt-3.5-turbo-instruct"})
@pytest.mark.scheduled
def test_openai_streaming() -> None:
"""Test streaming tokens from OpenAI."""
llm = ChatOpenAI(max_tokens=10)
for token in llm.stream("I'm Pickle Rick"):
assert isinstance(token.content, str)
@pytest.mark.scheduled
async def test_openai_astream() -> None:
"""Test streaming tokens from OpenAI."""
llm = ChatOpenAI(max_tokens=10)
async for token in llm.astream("I'm Pickle Rick"):
assert isinstance(token.content, str)
@pytest.mark.scheduled
async def test_openai_abatch() -> None:
"""Test streaming tokens from ChatOpenAI."""
llm = ChatOpenAI(max_tokens=10)
result = await llm.abatch(["I'm Pickle Rick", "I'm not Pickle Rick"])
for token in result:
assert isinstance(token.content, str)
@pytest.mark.scheduled
async def test_openai_abatch_tags() -> None:
"""Test batch tokens from ChatOpenAI."""
llm = ChatOpenAI(max_tokens=10)
result = await llm.abatch(
["I'm Pickle Rick", "I'm not Pickle Rick"], config={"tags": ["foo"]}
)
for token in result:
assert isinstance(token.content, str)
@pytest.mark.scheduled
def test_openai_batch() -> None:
"""Test batch tokens from ChatOpenAI."""
llm = ChatOpenAI(max_tokens=10)
result = llm.batch(["I'm Pickle Rick", "I'm not Pickle Rick"])
for token in result:
assert isinstance(token.content, str)
@pytest.mark.scheduled
async def test_openai_ainvoke() -> None:
"""Test invoke tokens from ChatOpenAI."""
llm = ChatOpenAI(max_tokens=10)
result = await llm.ainvoke("I'm Pickle Rick", config={"tags": ["foo"]})
assert isinstance(result.content, str)
@pytest.mark.scheduled
def test_openai_invoke() -> None:
"""Test invoke tokens from ChatOpenAI."""
llm = ChatOpenAI(max_tokens=10)
result = llm.invoke("I'm Pickle Rick", config=dict(tags=["foo"]))
assert isinstance(result.content, str)
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_edenai.py | """Test EdenAI API wrapper."""
from typing import List
import pytest
from langchain_core.messages import AIMessage, BaseMessage, HumanMessage
from langchain_core.outputs import ChatGeneration, LLMResult
from langchain_community.chat_models.edenai import (
ChatEdenAI,
)
@pytest.mark.scheduled
def test_chat_edenai() -> None:
"""Test ChatEdenAI wrapper."""
chat = ChatEdenAI( # type: ignore[call-arg]
provider="openai", model="gpt-3.5-turbo", temperature=0, max_tokens=1000
)
message = HumanMessage(content="Who are you ?")
response = chat.invoke([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
@pytest.mark.scheduled
def test_edenai_generate() -> None:
"""Test generate method of edenai."""
chat = ChatEdenAI(provider="google") # type: ignore[call-arg]
chat_messages: List[List[BaseMessage]] = [
[HumanMessage(content="What is the meaning of life?")]
]
messages_copy = [messages.copy() for messages in chat_messages]
result: LLMResult = chat.generate(chat_messages)
assert isinstance(result, LLMResult)
for response in result.generations[0]:
assert isinstance(response, ChatGeneration)
assert isinstance(response.text, str)
assert response.text == response.message.content
assert chat_messages == messages_copy
@pytest.mark.scheduled
async def test_edenai_async_generate() -> None:
"""Test async generation."""
chat = ChatEdenAI(provider="google", max_tokens=50) # type: ignore[call-arg]
message = HumanMessage(content="Hello")
result: LLMResult = await chat.agenerate([[message], [message]])
assert isinstance(result, LLMResult)
for response in result.generations[0]:
assert isinstance(response, ChatGeneration)
assert isinstance(response.text, str)
assert response.text == response.message.content
@pytest.mark.scheduled
def test_edenai_streaming() -> None:
"""Test streaming EdenAI chat."""
llm = ChatEdenAI(provider="openai", max_tokens=50) # type: ignore[call-arg]
for chunk in llm.stream("Generate a high fantasy story."):
assert isinstance(chunk.content, str)
@pytest.mark.scheduled
async def test_edenai_astream() -> None:
"""Test streaming from EdenAI."""
llm = ChatEdenAI(provider="openai", max_tokens=50) # type: ignore[call-arg]
async for token in llm.astream("Generate a high fantasy story."):
assert isinstance(token.content, str)
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_litellm.py | """Test Anthropic API wrapper."""
from typing import List
from langchain_core.callbacks import (
CallbackManager,
)
from langchain_core.messages import AIMessage, BaseMessage, HumanMessage
from langchain_core.outputs import ChatGeneration, LLMResult
from langchain_community.chat_models.litellm import ChatLiteLLM
from tests.unit_tests.callbacks.fake_callback_handler import FakeCallbackHandler
def test_litellm_call() -> None:
"""Test valid call to litellm."""
chat = ChatLiteLLM( # type: ignore[call-arg]
model="test",
)
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
def test_litellm_generate() -> None:
"""Test generate method of anthropic."""
chat = ChatLiteLLM(model="test") # type: ignore[call-arg]
chat_messages: List[List[BaseMessage]] = [
[HumanMessage(content="How many toes do dogs have?")]
]
messages_copy = [messages.copy() for messages in chat_messages]
result: LLMResult = chat.generate(chat_messages)
assert isinstance(result, LLMResult)
for response in result.generations[0]:
assert isinstance(response, ChatGeneration)
assert isinstance(response.text, str)
assert response.text == response.message.content
assert chat_messages == messages_copy
def test_litellm_streaming() -> None:
"""Test streaming tokens from anthropic."""
chat = ChatLiteLLM(model="test", streaming=True) # type: ignore[call-arg]
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
def test_litellm_streaming_callback() -> None:
"""Test that streaming correctly invokes on_llm_new_token callback."""
callback_handler = FakeCallbackHandler()
callback_manager = CallbackManager([callback_handler])
chat = ChatLiteLLM( # type: ignore[call-arg]
model="test",
streaming=True,
callback_manager=callback_manager,
verbose=True,
)
message = HumanMessage(content="Write me a sentence with 10 words.")
chat.invoke([message])
assert callback_handler.llm_streams > 1
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_promptlayer_openai.py | """Test PromptLayerChatOpenAI wrapper."""
import pytest
from langchain_core.callbacks import CallbackManager
from langchain_core.messages import BaseMessage, HumanMessage, SystemMessage
from langchain_core.outputs import ChatGeneration, ChatResult, LLMResult
from langchain_community.chat_models.promptlayer_openai import PromptLayerChatOpenAI
from tests.unit_tests.callbacks.fake_callback_handler import FakeCallbackHandler
def test_promptlayer_chat_openai() -> None:
"""Test PromptLayerChatOpenAI wrapper."""
chat = PromptLayerChatOpenAI(max_tokens=10) # type: ignore[call-arg]
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_promptlayer_chat_openai_system_message() -> None:
"""Test PromptLayerChatOpenAI wrapper with system message."""
chat = PromptLayerChatOpenAI(max_tokens=10) # type: ignore[call-arg]
system_message = SystemMessage(content="You are to chat with the user.")
human_message = HumanMessage(content="Hello")
response = chat.invoke([system_message, human_message])
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_promptlayer_chat_openai_generate() -> None:
"""Test PromptLayerChatOpenAI wrapper with generate."""
chat = PromptLayerChatOpenAI(max_tokens=10, n=2) # type: ignore[call-arg]
message = HumanMessage(content="Hello")
response = chat.generate([[message], [message]])
assert isinstance(response, LLMResult)
assert len(response.generations) == 2
for generations in response.generations:
assert len(generations) == 2
for generation in generations:
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
assert generation.text == generation.message.content
def test_promptlayer_chat_openai_multiple_completions() -> None:
"""Test PromptLayerChatOpenAI wrapper with multiple completions."""
chat = PromptLayerChatOpenAI(max_tokens=10, n=5) # type: ignore[call-arg]
message = HumanMessage(content="Hello")
response = chat._generate([message])
assert isinstance(response, ChatResult)
assert len(response.generations) == 5
for generation in response.generations:
assert isinstance(generation.message, BaseMessage)
assert isinstance(generation.message.content, str)
def test_promptlayer_chat_openai_streaming() -> None:
"""Test that streaming correctly invokes on_llm_new_token callback."""
callback_handler = FakeCallbackHandler()
callback_manager = CallbackManager([callback_handler])
chat = PromptLayerChatOpenAI( # type: ignore[call-arg]
max_tokens=10,
streaming=True,
temperature=0,
callback_manager=callback_manager,
verbose=True,
)
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert callback_handler.llm_streams > 0
assert isinstance(response, BaseMessage)
def test_promptlayer_chat_openai_invalid_streaming_params() -> None:
"""Test that streaming correctly invokes on_llm_new_token callback."""
with pytest.raises(ValueError):
PromptLayerChatOpenAI( # type: ignore[call-arg]
max_tokens=10,
streaming=True,
temperature=0,
n=5,
)
async def test_async_promptlayer_chat_openai() -> None:
"""Test async generation."""
chat = PromptLayerChatOpenAI(max_tokens=10, n=2) # type: ignore[call-arg]
message = HumanMessage(content="Hello")
response = await chat.agenerate([[message], [message]])
assert isinstance(response, LLMResult)
assert len(response.generations) == 2
for generations in response.generations:
assert len(generations) == 2
for generation in generations:
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
assert generation.text == generation.message.content
async def test_async_promptlayer_chat_openai_streaming() -> None:
"""Test that streaming correctly invokes on_llm_new_token callback."""
callback_handler = FakeCallbackHandler()
callback_manager = CallbackManager([callback_handler])
chat = PromptLayerChatOpenAI( # type: ignore[call-arg]
max_tokens=10,
streaming=True,
temperature=0,
callback_manager=callback_manager,
verbose=True,
)
message = HumanMessage(content="Hello")
response = await chat.agenerate([[message], [message]])
assert callback_handler.llm_streams > 0
assert isinstance(response, LLMResult)
assert len(response.generations) == 2
for generations in response.generations:
assert len(generations) == 1
for generation in generations:
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
assert generation.text == generation.message.content
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_pai_eas_chat_endpoint.py | """Test AliCloud Pai Eas Chat Model."""
import os
from langchain_core.callbacks import CallbackManager
from langchain_core.messages import AIMessage, BaseMessage, HumanMessage
from langchain_core.outputs import ChatGeneration, LLMResult
from langchain_community.chat_models.pai_eas_endpoint import PaiEasChatEndpoint
from tests.unit_tests.callbacks.fake_callback_handler import FakeCallbackHandler
def test_pai_eas_call() -> None:
chat = PaiEasChatEndpoint(
eas_service_url=os.getenv("EAS_SERVICE_URL"), # type: ignore[arg-type]
eas_service_token=os.getenv("EAS_SERVICE_TOKEN"), # type: ignore[arg-type]
)
response = chat.invoke([HumanMessage(content="Say foo:")])
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_multiple_history() -> None:
"""Tests multiple history works."""
chat = PaiEasChatEndpoint(
eas_service_url=os.getenv("EAS_SERVICE_URL"), # type: ignore[arg-type]
eas_service_token=os.getenv("EAS_SERVICE_TOKEN"), # type: ignore[arg-type]
)
response = chat.invoke(
[
HumanMessage(content="Hello."),
AIMessage(content="Hello!"),
HumanMessage(content="How are you doing?"),
]
)
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_stream() -> None:
"""Test that stream works."""
chat = PaiEasChatEndpoint(
eas_service_url=os.getenv("EAS_SERVICE_URL"), # type: ignore[arg-type]
eas_service_token=os.getenv("EAS_SERVICE_TOKEN"), # type: ignore[arg-type]
streaming=True,
)
callback_handler = FakeCallbackHandler()
callback_manager = CallbackManager([callback_handler])
response = chat.invoke(
[
HumanMessage(content="Hello."),
AIMessage(content="Hello!"),
HumanMessage(content="Who are you?"),
],
stream=True,
config={"callbacks": callback_manager},
)
assert callback_handler.llm_streams > 0
assert isinstance(response.content, str)
def test_multiple_messages() -> None:
"""Tests multiple messages works."""
chat = PaiEasChatEndpoint(
eas_service_url=os.getenv("EAS_SERVICE_URL"), # type: ignore[arg-type]
eas_service_token=os.getenv("EAS_SERVICE_TOKEN"), # type: ignore[arg-type]
)
message = HumanMessage(content="Hi, how are you.")
response = chat.generate([[message], [message]])
assert isinstance(response, LLMResult)
assert len(response.generations) == 2
for generations in response.generations:
assert len(generations) == 1
for generation in generations:
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
assert generation.text == generation.message.content
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_hunyuan.py | import uuid
from operator import itemgetter
from typing import Any
import pytest
from langchain.prompts.chat import (
ChatPromptTemplate,
HumanMessagePromptTemplate,
SystemMessagePromptTemplate,
)
from langchain_core.messages import AIMessage, HumanMessage
from langchain_core.runnables.base import RunnableSerializable
from langchain_community.chat_models.hunyuan import ChatHunyuan
@pytest.mark.requires("tencentcloud-sdk-python")
def test_chat_hunyuan() -> None:
chat = ChatHunyuan()
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
assert response.id is not None, "request_id is empty"
assert uuid.UUID(response.id), "Invalid UUID"
@pytest.mark.requires("tencentcloud-sdk-python")
def test_chat_hunyuan_with_temperature() -> None:
chat = ChatHunyuan(temperature=0.6)
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
assert response.id is not None, "request_id is empty"
assert uuid.UUID(response.id), "Invalid UUID"
@pytest.mark.requires("tencentcloud-sdk-python")
def test_chat_hunyuan_with_model_name() -> None:
chat = ChatHunyuan(model="hunyuan-standard")
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
assert response.id is not None, "request_id is empty"
assert uuid.UUID(response.id), "Invalid UUID"
@pytest.mark.requires("tencentcloud-sdk-python")
def test_chat_hunyuan_with_stream() -> None:
chat = ChatHunyuan(streaming=True)
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
assert response.id is not None, "request_id is empty"
assert uuid.UUID(response.id), "Invalid UUID"
@pytest.mark.requires("tencentcloud-sdk-python")
def test_chat_hunyuan_with_prompt_template() -> None:
system_prompt = SystemMessagePromptTemplate.from_template(
"You are a helpful assistant! Your name is {name}."
)
user_prompt = HumanMessagePromptTemplate.from_template("Question: {query}")
chat_prompt = ChatPromptTemplate.from_messages([system_prompt, user_prompt])
chat: RunnableSerializable[Any, Any] = (
{"query": itemgetter("query"), "name": itemgetter("name")}
| chat_prompt
| ChatHunyuan()
)
response = chat.invoke({"query": "Hello", "name": "Tom"})
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
assert response.id is not None, "request_id is empty"
assert uuid.UUID(response.id), "Invalid UUID"
def test_extra_kwargs() -> None:
chat = ChatHunyuan(temperature=0.88, top_p=0.7)
assert chat.temperature == 0.88
assert chat.top_p == 0.7
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_ernie.py | import pytest
from langchain_core.messages import AIMessage, HumanMessage
from langchain_community.chat_models.ernie import ErnieBotChat
def test_chat_ernie_bot() -> None:
chat = ErnieBotChat()
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
def test_chat_ernie_bot_with_model_name() -> None:
chat = ErnieBotChat(model_name="ERNIE-Bot")
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
def test_chat_ernie_bot_with_temperature() -> None:
chat = ErnieBotChat(model_name="ERNIE-Bot", temperature=1.0)
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
def test_chat_ernie_bot_with_kwargs() -> None:
chat = ErnieBotChat()
message = HumanMessage(content="Hello")
response = chat.invoke([message], temperature=0.88, top_p=0.7)
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
def test_extra_kwargs() -> None:
chat = ErnieBotChat(temperature=0.88, top_p=0.7)
assert chat.temperature == 0.88
assert chat.top_p == 0.7
def test_wrong_temperature_1() -> None:
chat = ErnieBotChat()
message = HumanMessage(content="Hello")
with pytest.raises(ValueError) as e:
chat.invoke([message], temperature=1.2)
assert "parameter check failed, temperature range is (0, 1.0]" in str(e)
def test_wrong_temperature_2() -> None:
chat = ErnieBotChat()
message = HumanMessage(content="Hello")
with pytest.raises(ValueError) as e:
chat.invoke([message], temperature=0)
assert "parameter check failed, temperature range is (0, 1.0]" in str(e)
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_javelin_ai_gateway.py | """Test `Javelin AI Gateway` chat models"""
from pydantic import SecretStr
from langchain_community.chat_models import ChatJavelinAIGateway
def test_api_key_is_secret_string() -> None:
llm = ChatJavelinAIGateway(
gateway_uri="<javelin-ai-gateway-uri>",
route="<javelin-ai-gateway-chat-route>",
javelin_api_key="secret-api-key",
params={"temperature": 0.1},
)
assert isinstance(llm.javelin_api_key, SecretStr)
assert llm.javelin_api_key.get_secret_value() == "secret-api-key"
def test_api_key_masked_when_passed_via_constructor() -> None:
llm = ChatJavelinAIGateway(
gateway_uri="<javelin-ai-gateway-uri>",
route="<javelin-ai-gateway-chat-route>",
javelin_api_key="secret-api-key",
params={"temperature": 0.1},
)
assert str(llm.javelin_api_key) == "**********"
assert "secret-api-key" not in repr(llm.javelin_api_key)
assert "secret-api-key" not in repr(llm)
def test_api_key_alias() -> None:
for model in [
ChatJavelinAIGateway(
route="<javelin-ai-gateway-chat-route>",
javelin_api_key="secret-api-key",
),
ChatJavelinAIGateway(
route="<javelin-ai-gateway-chat-route>", api_key="secret-api-key"
),
]:
assert str(model.javelin_api_key) == "**********"
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_bedrock.py | """Test Bedrock chat model."""
from typing import Any, cast
import pytest
from langchain_core.callbacks import CallbackManager
from langchain_core.messages import (
AIMessageChunk,
BaseMessage,
HumanMessage,
SystemMessage,
)
from langchain_core.outputs import ChatGeneration, LLMResult
from langchain_community.chat_models import BedrockChat
from tests.unit_tests.callbacks.fake_callback_handler import FakeCallbackHandler
@pytest.fixture
def chat() -> BedrockChat:
return BedrockChat(model_id="anthropic.claude-v2", model_kwargs={"temperature": 0}) # type: ignore[call-arg]
@pytest.mark.scheduled
def test_chat_bedrock(chat: BedrockChat) -> None:
"""Test BedrockChat wrapper."""
system = SystemMessage(content="You are a helpful assistant.")
human = HumanMessage(content="Hello")
response = chat.invoke([system, human])
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
@pytest.mark.scheduled
def test_chat_bedrock_generate(chat: BedrockChat) -> None:
"""Test BedrockChat wrapper with generate."""
message = HumanMessage(content="Hello")
response = chat.generate([[message], [message]])
assert isinstance(response, LLMResult)
assert len(response.generations) == 2
for generations in response.generations:
for generation in generations:
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
assert generation.text == generation.message.content
@pytest.mark.scheduled
def test_chat_bedrock_generate_with_token_usage(chat: BedrockChat) -> None:
"""Test BedrockChat wrapper with generate."""
message = HumanMessage(content="Hello")
response = chat.generate([[message], [message]])
assert isinstance(response, LLMResult)
assert isinstance(response.llm_output, dict)
usage = response.llm_output["usage"]
assert usage["prompt_tokens"] == 20
assert usage["completion_tokens"] > 0
assert usage["total_tokens"] > 0
@pytest.mark.scheduled
def test_chat_bedrock_streaming() -> None:
"""Test that streaming correctly invokes on_llm_new_token callback."""
callback_handler = FakeCallbackHandler()
callback_manager = CallbackManager([callback_handler])
chat = BedrockChat( # type: ignore[call-arg]
model_id="anthropic.claude-v2",
streaming=True,
callback_manager=callback_manager,
verbose=True,
)
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert callback_handler.llm_streams > 0
assert isinstance(response, BaseMessage)
@pytest.mark.scheduled
def test_chat_bedrock_streaming_generation_info() -> None:
"""Test that generation info is preserved when streaming."""
class _FakeCallback(FakeCallbackHandler):
saved_things: dict = {}
def on_llm_end(
self,
*args: Any,
**kwargs: Any,
) -> Any:
# Save the generation
self.saved_things["generation"] = args[0]
callback = _FakeCallback()
callback_manager = CallbackManager([callback])
chat = BedrockChat( # type: ignore[call-arg]
model_id="anthropic.claude-v2",
callback_manager=callback_manager,
)
list(chat.stream("hi"))
generation = callback.saved_things["generation"]
# `Hello!` is two tokens, assert that that is what is returned
assert generation.generations[0][0].text == "Hello!"
@pytest.mark.scheduled
def test_bedrock_streaming(chat: BedrockChat) -> None:
"""Test streaming tokens from OpenAI."""
full = None
for token in chat.stream("I'm Pickle Rick"):
full = token if full is None else full + token # type: ignore[operator]
assert isinstance(token.content, str)
assert isinstance(cast(AIMessageChunk, full).content, str)
@pytest.mark.scheduled
async def test_bedrock_astream(chat: BedrockChat) -> None:
"""Test streaming tokens from OpenAI."""
async for token in chat.astream("I'm Pickle Rick"):
assert isinstance(token.content, str)
@pytest.mark.scheduled
async def test_bedrock_abatch(chat: BedrockChat) -> None:
"""Test streaming tokens from BedrockChat."""
result = await chat.abatch(["I'm Pickle Rick", "I'm not Pickle Rick"])
for token in result:
assert isinstance(token.content, str)
@pytest.mark.scheduled
async def test_bedrock_abatch_tags(chat: BedrockChat) -> None:
"""Test batch tokens from BedrockChat."""
result = await chat.abatch(
["I'm Pickle Rick", "I'm not Pickle Rick"], config={"tags": ["foo"]}
)
for token in result:
assert isinstance(token.content, str)
@pytest.mark.scheduled
def test_bedrock_batch(chat: BedrockChat) -> None:
"""Test batch tokens from BedrockChat."""
result = chat.batch(["I'm Pickle Rick", "I'm not Pickle Rick"])
for token in result:
assert isinstance(token.content, str)
@pytest.mark.scheduled
async def test_bedrock_ainvoke(chat: BedrockChat) -> None:
"""Test invoke tokens from BedrockChat."""
result = await chat.ainvoke("I'm Pickle Rick", config={"tags": ["foo"]})
assert isinstance(result.content, str)
@pytest.mark.scheduled
def test_bedrock_invoke(chat: BedrockChat) -> None:
"""Test invoke tokens from BedrockChat."""
result = chat.invoke("I'm Pickle Rick", config=dict(tags=["foo"]))
assert isinstance(result.content, str)
assert all([k in result.response_metadata for k in ("usage", "model_id")])
assert result.response_metadata["usage"]["prompt_tokens"] == 13
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_dappier.py | from typing import List
import pytest
from langchain_core.messages import AIMessage, BaseMessage, HumanMessage
from langchain_core.outputs import ChatGeneration, LLMResult
from langchain_community.chat_models.dappier import (
ChatDappierAI,
)
@pytest.mark.scheduled
def test_dappier_chat() -> None:
"""Test ChatDappierAI wrapper."""
chat = ChatDappierAI( # type: ignore[call-arg]
dappier_endpoint="https://api.dappier.com/app/datamodelconversation",
dappier_model="dm_01hpsxyfm2fwdt2zet9cg6fdxt",
)
message = HumanMessage(content="Who are you ?")
response = chat.invoke([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
@pytest.mark.scheduled
def test_dappier_generate() -> None:
"""Test generate method of Dappier AI."""
chat = ChatDappierAI( # type: ignore[call-arg]
dappier_endpoint="https://api.dappier.com/app/datamodelconversation",
dappier_model="dm_01hpsxyfm2fwdt2zet9cg6fdxt",
)
chat_messages: List[List[BaseMessage]] = [
[HumanMessage(content="Who won the last super bowl?")],
]
messages_copy = [messages.copy() for messages in chat_messages]
result: LLMResult = chat.generate(chat_messages)
assert isinstance(result, LLMResult)
for response in result.generations[0]:
assert isinstance(response, ChatGeneration)
assert isinstance(response.text, str)
assert response.text == response.message.content
assert chat_messages == messages_copy
@pytest.mark.scheduled
async def test_dappier_agenerate() -> None:
"""Test async generation."""
chat = ChatDappierAI( # type: ignore[call-arg]
dappier_endpoint="https://api.dappier.com/app/datamodelconversation",
dappier_model="dm_01hpsxyfm2fwdt2zet9cg6fdxt",
)
message = HumanMessage(content="Who won the last super bowl?")
result: LLMResult = await chat.agenerate([[message], [message]])
assert isinstance(result, LLMResult)
for response in result.generations[0]:
assert isinstance(response, ChatGeneration)
assert isinstance(response.text, str)
assert response.text == response.message.content
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_llamacpp.py | from pydantic import BaseModel, Field
from langchain_community.chat_models import ChatLlamaCpp
class Joke(BaseModel):
"""Joke to tell user."""
setup: str = Field(description="question to set up a joke")
punchline: str = Field(description="answer to resolve the joke")
# TODO: replace with standard integration tests
# See example in tests/integration_tests/chat_models/test_litellm.py
def test_structured_output() -> None:
llm = ChatLlamaCpp(model_path="/path/to/Meta-Llama-3.1-8B-Instruct.Q4_K_M.gguf")
structured_llm = llm.with_structured_output(Joke)
result = structured_llm.invoke("Tell me a short joke about cats.")
assert isinstance(result, Joke)
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_snowflake.py | """Test ChatSnowflakeCortex
Note: This test must be run with the following environment variables set:
SNOWFLAKE_ACCOUNT="YOUR_SNOWFLAKE_ACCOUNT",
SNOWFLAKE_USERNAME="YOUR_SNOWFLAKE_USERNAME",
SNOWFLAKE_PASSWORD="YOUR_SNOWFLAKE_PASSWORD",
SNOWFLAKE_DATABASE="YOUR_SNOWFLAKE_DATABASE",
SNOWFLAKE_SCHEMA="YOUR_SNOWFLAKE_SCHEMA",
SNOWFLAKE_WAREHOUSE="YOUR_SNOWFLAKE_WAREHOUSE"
SNOWFLAKE_ROLE="YOUR_SNOWFLAKE_ROLE",
"""
import pytest
from langchain_core.messages import BaseMessage, HumanMessage, SystemMessage
from langchain_core.outputs import ChatGeneration, LLMResult
from langchain_community.chat_models import ChatSnowflakeCortex
@pytest.fixture
def chat() -> ChatSnowflakeCortex:
return ChatSnowflakeCortex()
def test_chat_snowflake_cortex(chat: ChatSnowflakeCortex) -> None:
"""Test ChatSnowflakeCortex."""
message = HumanMessage(content="Hello")
response = chat([message])
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_chat_snowflake_cortex_system_message(chat: ChatSnowflakeCortex) -> None:
"""Test ChatSnowflakeCortex for system message"""
system_message = SystemMessage(content="You are to chat with the user.")
human_message = HumanMessage(content="Hello")
response = chat([system_message, human_message])
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_chat_snowflake_cortex_model() -> None:
"""Test ChatSnowflakeCortex handles model_name."""
chat = ChatSnowflakeCortex(
model="foo",
)
assert chat.model == "foo"
def test_chat_snowflake_cortex_generate(chat: ChatSnowflakeCortex) -> None:
"""Test ChatSnowflakeCortex with generate."""
message = HumanMessage(content="Hello")
response = chat.generate([[message], [message]])
assert isinstance(response, LLMResult)
assert len(response.generations) == 2
for generations in response.generations:
for generation in generations:
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
assert generation.text == generation.message.content
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_premai.py | """Test ChatPremAI from PremAI API wrapper.
Note: This test must be run with the PREMAI_API_KEY environment variable set to a valid
API key and a valid project_id.
For this we need to have a project setup in PremAI's platform: https://app.premai.io
"""
import pytest
from langchain_core.messages import BaseMessage, HumanMessage, SystemMessage
from langchain_core.outputs import ChatGeneration, LLMResult
from langchain_community.chat_models import ChatPremAI
@pytest.fixture
def chat() -> ChatPremAI:
return ChatPremAI(project_id=8) # type: ignore[call-arg]
def test_chat_premai() -> None:
"""Test ChatPremAI wrapper."""
chat = ChatPremAI(project_id=8) # type: ignore[call-arg]
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_chat_prem_system_message() -> None:
"""Test ChatPremAI wrapper for system message"""
chat = ChatPremAI(project_id=8) # type: ignore[call-arg]
system_message = SystemMessage(content="You are to chat with the user.")
human_message = HumanMessage(content="Hello")
response = chat.invoke([system_message, human_message])
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_chat_prem_model() -> None:
"""Test ChatPremAI wrapper handles model_name."""
chat = ChatPremAI(model="foo", project_id=8) # type: ignore[call-arg]
assert chat.model == "foo"
def test_chat_prem_generate() -> None:
"""Test ChatPremAI wrapper with generate."""
chat = ChatPremAI(project_id=8) # type: ignore[call-arg]
message = HumanMessage(content="Hello")
response = chat.generate([[message], [message]])
assert isinstance(response, LLMResult)
assert len(response.generations) == 2
for generations in response.generations:
for generation in generations:
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
assert generation.text == generation.message.content
async def test_prem_invoke(chat: ChatPremAI) -> None:
"""Tests chat completion with invoke"""
result = chat.invoke("How is the weather in New York today?")
assert isinstance(result.content, str)
def test_prem_streaming() -> None:
"""Test streaming tokens from Prem."""
chat = ChatPremAI(project_id=8, streaming=True) # type: ignore[call-arg]
for token in chat.stream("I'm Pickle Rick"):
assert isinstance(token.content, str)
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_friendli.py | """Test Friendli chat API."""
import pytest
from langchain_core.messages.ai import AIMessage
from langchain_core.messages.human import HumanMessage
from langchain_core.outputs.generation import Generation
from langchain_core.outputs.llm_result import LLMResult
from langchain_community.chat_models.friendli import ChatFriendli
@pytest.fixture
def friendli_chat() -> ChatFriendli:
"""Friendli LLM for chat."""
return ChatFriendli(temperature=0, max_tokens=10)
def test_friendli_invoke(friendli_chat: ChatFriendli) -> None:
"""Test invoke."""
output = friendli_chat.invoke("What is generative AI?")
assert isinstance(output, AIMessage)
assert isinstance(output.content, str)
async def test_friendli_ainvoke(friendli_chat: ChatFriendli) -> None:
"""Test async invoke."""
output = await friendli_chat.ainvoke("What is generative AI?")
assert isinstance(output, AIMessage)
assert isinstance(output.content, str)
def test_friendli_batch(friendli_chat: ChatFriendli) -> None:
"""Test batch."""
outputs = friendli_chat.batch(["What is generative AI?", "What is generative AI?"])
for output in outputs:
assert isinstance(output, AIMessage)
assert isinstance(output.content, str)
async def test_friendli_abatch(friendli_chat: ChatFriendli) -> None:
"""Test async batch."""
outputs = await friendli_chat.abatch(
["What is generative AI?", "What is generative AI?"]
)
for output in outputs:
assert isinstance(output, AIMessage)
assert isinstance(output.content, str)
def test_friendli_generate(friendli_chat: ChatFriendli) -> None:
"""Test generate."""
message = HumanMessage(content="What is generative AI?")
result = friendli_chat.generate([[message], [message]])
assert isinstance(result, LLMResult)
generations = result.generations
assert len(generations) == 2
for generation in generations:
gen_ = generation[0]
assert isinstance(gen_, Generation)
text = gen_.text
assert isinstance(text, str)
generation_info = gen_.generation_info
if generation_info is not None:
assert "token" in generation_info
async def test_friendli_agenerate(friendli_chat: ChatFriendli) -> None:
"""Test async generate."""
message = HumanMessage(content="What is generative AI?")
result = await friendli_chat.agenerate([[message], [message]])
assert isinstance(result, LLMResult)
generations = result.generations
assert len(generations) == 2
for generation in generations:
gen_ = generation[0]
assert isinstance(gen_, Generation)
text = gen_.text
assert isinstance(text, str)
generation_info = gen_.generation_info
if generation_info is not None:
assert "token" in generation_info
def test_friendli_stream(friendli_chat: ChatFriendli) -> None:
"""Test stream."""
stream = friendli_chat.stream("Say hello world.")
for chunk in stream:
assert isinstance(chunk, AIMessage)
assert isinstance(chunk.content, str)
async def test_friendli_astream(friendli_chat: ChatFriendli) -> None:
"""Test async stream."""
stream = friendli_chat.astream("Say hello world.")
async for chunk in stream:
assert isinstance(chunk, AIMessage)
assert isinstance(chunk.content, str)
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_fireworks.py | """Test ChatFireworks wrapper."""
import sys
from typing import cast
import pytest
from langchain_core.messages import BaseMessage, HumanMessage, SystemMessage
from langchain_core.outputs import ChatGeneration, ChatResult, LLMResult
from langchain_community.chat_models.fireworks import ChatFireworks
if sys.version_info < (3, 9):
pytest.skip("fireworks-ai requires Python > 3.8", allow_module_level=True)
@pytest.fixture
def chat() -> ChatFireworks:
return ChatFireworks(model_kwargs={"temperature": 0, "max_tokens": 512})
@pytest.mark.scheduled
def test_chat_fireworks(chat: ChatFireworks) -> None:
"""Test ChatFireworks wrapper."""
message = HumanMessage(content="What is the weather in Redwood City, CA today")
response = chat.invoke([message])
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
@pytest.mark.scheduled
def test_chat_fireworks_model() -> None:
"""Test ChatFireworks wrapper handles model_name."""
chat = ChatFireworks(model="foo")
assert chat.model == "foo"
@pytest.mark.scheduled
def test_chat_fireworks_system_message(chat: ChatFireworks) -> None:
"""Test ChatFireworks wrapper with system message."""
system_message = SystemMessage(content="You are to chat with the user.")
human_message = HumanMessage(content="Hello")
response = chat.invoke([system_message, human_message])
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
@pytest.mark.scheduled
def test_chat_fireworks_generate() -> None:
"""Test ChatFireworks wrapper with generate."""
chat = ChatFireworks(model_kwargs={"n": 2})
message = HumanMessage(content="Hello")
response = chat.generate([[message], [message]])
assert isinstance(response, LLMResult)
assert len(response.generations) == 2
for generations in response.generations:
assert len(generations) == 2
for generation in generations:
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
assert generation.text == generation.message.content
@pytest.mark.scheduled
def test_chat_fireworks_multiple_completions() -> None:
"""Test ChatFireworks wrapper with multiple completions."""
chat = ChatFireworks(model_kwargs={"n": 5})
message = HumanMessage(content="Hello")
response = chat._generate([message])
assert isinstance(response, ChatResult)
assert len(response.generations) == 5
for generation in response.generations:
assert isinstance(generation.message, BaseMessage)
assert isinstance(generation.message.content, str)
@pytest.mark.scheduled
def test_chat_fireworks_llm_output_contains_model_id(chat: ChatFireworks) -> None:
"""Test llm_output contains model_id."""
message = HumanMessage(content="Hello")
llm_result = chat.generate([[message]])
assert llm_result.llm_output is not None
assert llm_result.llm_output["model"] == chat.model
@pytest.mark.scheduled
def test_fireworks_invoke(chat: ChatFireworks) -> None:
"""Tests chat completion with invoke"""
result = chat.invoke("How is the weather in New York today?", stop=[","])
assert isinstance(result.content, str)
assert result.content[-1] == ","
@pytest.mark.scheduled
async def test_fireworks_ainvoke(chat: ChatFireworks) -> None:
"""Tests chat completion with invoke"""
result = await chat.ainvoke("How is the weather in New York today?", stop=[","])
assert isinstance(result.content, str)
assert result.content[-1] == ","
@pytest.mark.scheduled
def test_fireworks_batch(chat: ChatFireworks) -> None:
"""Test batch tokens from ChatFireworks."""
result = chat.batch(
[
"What is the weather in Redwood City, CA today?",
"What is the weather in Redwood City, CA today?",
"What is the weather in Redwood City, CA today?",
],
config={"max_concurrency": 2},
stop=[","],
)
for token in result:
assert isinstance(token.content, str)
assert token.content[-1] == ",", token.content
@pytest.mark.scheduled
async def test_fireworks_abatch(chat: ChatFireworks) -> None:
"""Test batch tokens from ChatFireworks."""
result = await chat.abatch(
[
"What is the weather in Redwood City, CA today?",
"What is the weather in Redwood City, CA today?",
],
config={"max_concurrency": 5},
stop=[","],
)
for token in result:
assert isinstance(token.content, str)
assert token.content[-1] == ","
@pytest.mark.scheduled
def test_fireworks_streaming(chat: ChatFireworks) -> None:
"""Test streaming tokens from Fireworks."""
for token in chat.stream("I'm Pickle Rick"):
assert isinstance(token.content, str)
@pytest.mark.scheduled
def test_fireworks_streaming_stop_words(chat: ChatFireworks) -> None:
"""Test streaming tokens with stop words."""
last_token = ""
for token in chat.stream("I'm Pickle Rick", stop=[","]):
last_token = cast(str, token.content)
assert isinstance(token.content, str)
assert last_token[-1] == ","
@pytest.mark.scheduled
async def test_chat_fireworks_agenerate() -> None:
"""Test ChatFireworks wrapper with generate."""
chat = ChatFireworks(model_kwargs={"n": 2})
message = HumanMessage(content="Hello")
response = await chat.agenerate([[message], [message]])
assert isinstance(response, LLMResult)
assert len(response.generations) == 2
for generations in response.generations:
assert len(generations) == 2
for generation in generations:
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
assert generation.text == generation.message.content
@pytest.mark.scheduled
async def test_fireworks_astream(chat: ChatFireworks) -> None:
"""Test streaming tokens from Fireworks."""
last_token = ""
async for token in chat.astream(
"Who's the best quarterback in the NFL?", stop=[","]
):
last_token = cast(str, token.content)
assert isinstance(token.content, str)
assert last_token[-1] == ","
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_llama_edge.py | import pytest
from langchain_core.messages import AIMessage, HumanMessage, SystemMessage
from langchain_community.chat_models.llama_edge import LlamaEdgeChatService
@pytest.mark.enable_socket
def test_chat_wasm_service() -> None:
"""This test requires the port 8080 is not occupied."""
# service url
service_url = "https://b008-54-186-154-209.ngrok-free.app"
# create wasm-chat service instance
chat = LlamaEdgeChatService(service_url=service_url)
# create message sequence
system_message = SystemMessage(content="You are an AI assistant")
user_message = HumanMessage(content="What is the capital of France?")
messages = [system_message, user_message]
# chat with wasm-chat service
response = chat.invoke(messages)
# check response
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
assert "Paris" in response.content
@pytest.mark.enable_socket
def test_chat_wasm_service_streaming() -> None:
"""This test requires the port 8080 is not occupied."""
# service url
service_url = "https://b008-54-186-154-209.ngrok-free.app"
# create wasm-chat service instance
chat = LlamaEdgeChatService(service_url=service_url, streaming=True)
# create message sequence
user_message = HumanMessage(content="What is the capital of France?")
messages = [
user_message,
]
output = ""
for chunk in chat.stream(messages):
print(chunk.content, end="", flush=True) # noqa: T201
output += chunk.content # type: ignore[operator]
assert "Paris" in output
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_google_palm.py | """Test Google PaLM Chat API wrapper.
Note: This test must be run with the GOOGLE_API_KEY environment variable set to a
valid API key.
"""
from langchain_core.messages import BaseMessage, HumanMessage, SystemMessage
from langchain_core.outputs import ChatGeneration, ChatResult, LLMResult
from langchain_community.chat_models import ChatGooglePalm
def test_chat_google_palm() -> None:
"""Test Google PaLM Chat API wrapper."""
chat = ChatGooglePalm() # type: ignore[call-arg]
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_chat_google_palm_system_message() -> None:
"""Test Google PaLM Chat API wrapper with system message."""
chat = ChatGooglePalm() # type: ignore[call-arg]
system_message = SystemMessage(content="You are to chat with the user.")
human_message = HumanMessage(content="Hello")
response = chat.invoke([system_message, human_message])
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_chat_google_palm_generate() -> None:
"""Test Google PaLM Chat API wrapper with generate."""
chat = ChatGooglePalm(n=2, temperature=1.0) # type: ignore[call-arg]
message = HumanMessage(content="Hello")
response = chat.generate([[message], [message]])
assert isinstance(response, LLMResult)
assert len(response.generations) == 2
for generations in response.generations:
assert len(generations) == 2
for generation in generations:
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
assert generation.text == generation.message.content
def test_chat_google_palm_multiple_completions() -> None:
"""Test Google PaLM Chat API wrapper with multiple completions."""
# The API de-dupes duplicate responses, so set temperature higher. This
# could be a flakey test though...
chat = ChatGooglePalm(n=5, temperature=1.0) # type: ignore[call-arg]
message = HumanMessage(content="Hello")
response = chat._generate([message])
assert isinstance(response, ChatResult)
assert len(response.generations) == 5
for generation in response.generations:
assert isinstance(generation.message, BaseMessage)
assert isinstance(generation.message.content, str)
async def test_async_chat_google_palm() -> None:
"""Test async generation."""
chat = ChatGooglePalm(n=2, temperature=1.0) # type: ignore[call-arg]
message = HumanMessage(content="Hello")
response = await chat.agenerate([[message], [message]])
assert isinstance(response, LLMResult)
assert len(response.generations) == 2
for generations in response.generations:
assert len(generations) == 2
for generation in generations:
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
assert generation.text == generation.message.content
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_azureml_endpoint.py | """Test AzureML Chat Endpoint wrapper."""
from langchain_core.messages import AIMessage, BaseMessage, HumanMessage
from langchain_core.outputs import ChatGeneration, LLMResult
from langchain_community.chat_models.azureml_endpoint import (
AzureMLChatOnlineEndpoint,
CustomOpenAIChatContentFormatter,
)
def test_llama_call() -> None:
"""Test valid call to Open Source Foundation Model."""
chat = AzureMLChatOnlineEndpoint(
content_formatter=CustomOpenAIChatContentFormatter()
)
response = chat.invoke([HumanMessage(content="Foo")])
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_temperature_kwargs() -> None:
"""Test that timeout kwarg works."""
chat = AzureMLChatOnlineEndpoint(
content_formatter=CustomOpenAIChatContentFormatter()
)
response = chat.invoke([HumanMessage(content="FOO")], temperature=0.8)
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_message_history() -> None:
"""Test that multiple messages works."""
chat = AzureMLChatOnlineEndpoint(
content_formatter=CustomOpenAIChatContentFormatter()
)
response = chat.invoke(
[
HumanMessage(content="Hello."),
AIMessage(content="Hello!"),
HumanMessage(content="How are you doing?"),
]
)
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_multiple_messages() -> None:
chat = AzureMLChatOnlineEndpoint(
content_formatter=CustomOpenAIChatContentFormatter()
)
message = HumanMessage(content="Hi!")
response = chat.generate([[message], [message]])
assert isinstance(response, LLMResult)
assert len(response.generations) == 2
for generations in response.generations:
assert len(generations) == 1
for generation in generations:
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
assert generation.text == generation.message.content
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_jinachat.py | """Test JinaChat wrapper."""
from typing import cast
import pytest
from langchain_core.callbacks import CallbackManager
from langchain_core.messages import BaseMessage, HumanMessage, SystemMessage
from langchain_core.outputs import ChatGeneration, LLMResult
from pydantic import SecretStr
from pytest import CaptureFixture, MonkeyPatch
from langchain_community.chat_models.jinachat import JinaChat
from tests.unit_tests.callbacks.fake_callback_handler import FakeCallbackHandler
def test_jinachat_api_key_is_secret_string() -> None:
llm = JinaChat(jinachat_api_key="secret-api-key") # type: ignore[arg-type, call-arg]
assert isinstance(llm.jinachat_api_key, SecretStr)
def test_jinachat_api_key_masked_when_passed_from_env(
monkeypatch: MonkeyPatch, capsys: CaptureFixture
) -> None:
"""Test initialization with an API key provided via an env variable"""
monkeypatch.setenv("JINACHAT_API_KEY", "secret-api-key")
llm = JinaChat() # type: ignore[call-arg]
print(llm.jinachat_api_key, end="") # noqa: T201
captured = capsys.readouterr()
assert captured.out == "**********"
def test_jinachat_api_key_masked_when_passed_via_constructor(
capsys: CaptureFixture,
) -> None:
"""Test initialization with an API key provided via the initializer"""
llm = JinaChat(jinachat_api_key="secret-api-key") # type: ignore[arg-type, call-arg]
print(llm.jinachat_api_key, end="") # noqa: T201
captured = capsys.readouterr()
assert captured.out == "**********"
def test_uses_actual_secret_value_from_secretstr() -> None:
"""Test that actual secret is retrieved using `.get_secret_value()`."""
llm = JinaChat(jinachat_api_key="secret-api-key") # type: ignore[arg-type, call-arg]
assert cast(SecretStr, llm.jinachat_api_key).get_secret_value() == "secret-api-key"
def test_jinachat() -> None:
"""Test JinaChat wrapper."""
chat = JinaChat(max_tokens=10) # type: ignore[call-arg]
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_jinachat_system_message() -> None:
"""Test JinaChat wrapper with system message."""
chat = JinaChat(max_tokens=10) # type: ignore[call-arg]
system_message = SystemMessage(content="You are to chat with the user.")
human_message = HumanMessage(content="Hello")
response = chat.invoke([system_message, human_message])
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_jinachat_generate() -> None:
"""Test JinaChat wrapper with generate."""
chat = JinaChat(max_tokens=10) # type: ignore[call-arg]
message = HumanMessage(content="Hello")
response = chat.generate([[message], [message]])
assert isinstance(response, LLMResult)
assert len(response.generations) == 2
for generations in response.generations:
assert len(generations) == 1
for generation in generations:
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
assert generation.text == generation.message.content
def test_jinachat_streaming() -> None:
"""Test that streaming correctly invokes on_llm_new_token callback."""
callback_handler = FakeCallbackHandler()
callback_manager = CallbackManager([callback_handler])
chat = JinaChat( # type: ignore[call-arg]
max_tokens=10,
streaming=True,
temperature=0,
callback_manager=callback_manager,
verbose=True,
)
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert callback_handler.llm_streams > 0
assert isinstance(response, BaseMessage)
async def test_async_jinachat() -> None:
"""Test async generation."""
chat = JinaChat(max_tokens=102) # type: ignore[call-arg]
message = HumanMessage(content="Hello")
response = await chat.agenerate([[message], [message]])
assert isinstance(response, LLMResult)
assert len(response.generations) == 2
for generations in response.generations:
assert len(generations) == 1
for generation in generations:
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
assert generation.text == generation.message.content
async def test_async_jinachat_streaming() -> None:
"""Test that streaming correctly invokes on_llm_new_token callback."""
callback_handler = FakeCallbackHandler()
callback_manager = CallbackManager([callback_handler])
chat = JinaChat( # type: ignore[call-arg]
max_tokens=10,
streaming=True,
temperature=0,
callback_manager=callback_manager,
verbose=True,
)
message = HumanMessage(content="Hello")
response = await chat.agenerate([[message], [message]])
assert callback_handler.llm_streams > 0
assert isinstance(response, LLMResult)
assert len(response.generations) == 2
for generations in response.generations:
assert len(generations) == 1
for generation in generations:
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
assert generation.text == generation.message.content
def test_jinachat_extra_kwargs() -> None:
"""Test extra kwargs to chat openai."""
# Check that foo is saved in extra_kwargs.
llm = JinaChat(foo=3, max_tokens=10) # type: ignore[call-arg]
assert llm.max_tokens == 10
assert llm.model_kwargs == {"foo": 3}
# Test that if extra_kwargs are provided, they are added to it.
llm = JinaChat(foo=3, model_kwargs={"bar": 2}) # type: ignore[call-arg]
assert llm.model_kwargs == {"foo": 3, "bar": 2}
# Test that if provided twice it errors
with pytest.raises(ValueError):
JinaChat(foo=3, model_kwargs={"foo": 2}) # type: ignore[call-arg]
# Test that if explicit param is specified in kwargs it errors
with pytest.raises(ValueError):
JinaChat(model_kwargs={"temperature": 0.2}) # type: ignore[call-arg]
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_yuan2.py | """Test ChatYuan2 wrapper."""
from typing import List
import pytest
from langchain_core.callbacks import CallbackManager
from langchain_core.messages import BaseMessage, HumanMessage, SystemMessage
from langchain_core.outputs import (
ChatGeneration,
LLMResult,
)
from langchain_community.chat_models.yuan2 import ChatYuan2
from tests.unit_tests.callbacks.fake_callback_handler import FakeCallbackHandler
@pytest.mark.scheduled
def test_chat_yuan2() -> None:
"""Test ChatYuan2 wrapper."""
chat = ChatYuan2( # type: ignore[call-arg]
yuan2_api_key="EMPTY",
yuan2_api_base="http://127.0.0.1:8001/v1",
temperature=1.0,
model_name="yuan2",
max_retries=3,
streaming=False,
)
messages = [
HumanMessage(content="Hello"),
]
response = chat.invoke(messages)
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_chat_yuan2_system_message() -> None:
"""Test ChatYuan2 wrapper with system message."""
chat = ChatYuan2( # type: ignore[call-arg]
yuan2_api_key="EMPTY",
yuan2_api_base="http://127.0.0.1:8001/v1",
temperature=1.0,
model_name="yuan2",
max_retries=3,
streaming=False,
)
messages = [
SystemMessage(content="You are an AI assistant."),
HumanMessage(content="Hello"),
]
response = chat.invoke(messages)
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
@pytest.mark.scheduled
def test_chat_yuan2_generate() -> None:
"""Test ChatYuan2 wrapper with generate."""
chat = ChatYuan2( # type: ignore[call-arg]
yuan2_api_key="EMPTY",
yuan2_api_base="http://127.0.0.1:8001/v1",
temperature=1.0,
model_name="yuan2",
max_retries=3,
streaming=False,
)
messages: List = [
HumanMessage(content="Hello"),
]
response = chat.generate([messages])
assert isinstance(response, LLMResult)
assert len(response.generations) == 1
assert response.llm_output
generation = response.generations[0]
for gen in generation:
assert isinstance(gen, ChatGeneration)
assert isinstance(gen.text, str)
assert gen.text == gen.message.content
@pytest.mark.scheduled
def test_chat_yuan2_streaming() -> None:
"""Test that streaming correctly invokes on_llm_new_token callback."""
callback_handler = FakeCallbackHandler()
callback_manager = CallbackManager([callback_handler])
chat = ChatYuan2( # type: ignore[call-arg]
yuan2_api_key="EMPTY",
yuan2_api_base="http://127.0.0.1:8001/v1",
temperature=1.0,
model_name="yuan2",
max_retries=3,
streaming=True,
callbacks=callback_manager,
)
messages = [
HumanMessage(content="Hello"),
]
response = chat.invoke(messages)
assert callback_handler.llm_streams > 0
assert isinstance(response, BaseMessage)
async def test_async_chat_yuan2() -> None:
"""Test async generation."""
chat = ChatYuan2( # type: ignore[call-arg]
yuan2_api_key="EMPTY",
yuan2_api_base="http://127.0.0.1:8001/v1",
temperature=1.0,
model_name="yuan2",
max_retries=3,
streaming=False,
)
messages: List = [
HumanMessage(content="Hello"),
]
response = await chat.agenerate([messages])
assert isinstance(response, LLMResult)
assert len(response.generations) == 1
generations = response.generations[0]
for generation in generations:
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
assert generation.text == generation.message.content
async def test_async_chat_yuan2_streaming() -> None:
"""Test that streaming correctly invokes on_llm_new_token callback."""
callback_handler = FakeCallbackHandler()
callback_manager = CallbackManager([callback_handler])
chat = ChatYuan2( # type: ignore[call-arg]
yuan2_api_key="EMPTY",
yuan2_api_base="http://127.0.0.1:8001/v1",
temperature=1.0,
model_name="yuan2",
max_retries=3,
streaming=True,
callbacks=callback_manager,
)
messages: List = [
HumanMessage(content="Hello"),
]
response = await chat.agenerate([messages])
assert callback_handler.llm_streams > 0
assert isinstance(response, LLMResult)
assert len(response.generations) == 1
generations = response.generations[0]
for generation in generations:
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
assert generation.text == generation.message.content
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/text_mlx.py | """Test MLX Chat Model."""
from langchain_core.messages import AIMessage, BaseMessage, HumanMessage
from langchain_community.chat_models.mlx import ChatMLX
from langchain_community.llms.mlx_pipeline import MLXPipeline
def test_default_call() -> None:
"""Test default model call."""
llm = MLXPipeline.from_model_id(
model_id="mlx-community/quantized-gemma-2b-it",
pipeline_kwargs={"max_new_tokens": 10},
)
chat = ChatMLX(llm=llm)
response = chat.invoke(input=[HumanMessage(content="Hello")])
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_multiple_history() -> None:
"""Tests multiple history works."""
llm = MLXPipeline.from_model_id(
model_id="mlx-community/quantized-gemma-2b-it",
pipeline_kwargs={"max_new_tokens": 10},
)
chat = ChatMLX(llm=llm)
response = chat.invoke(
input=[
HumanMessage(content="Hello."),
AIMessage(content="Hello!"),
HumanMessage(content="How are you doing?"),
]
)
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_coze.py | from langchain_core.messages import AIMessage, HumanMessage
from langchain_community.chat_models.coze import ChatCoze
# For testing, run:
# TEST_FILE=tests/integration_tests/chat_models/test_coze.py make test
def test_chat_coze_default() -> None:
chat = ChatCoze(
coze_api_base="https://api.coze.com",
coze_api_key="pat_...", # type: ignore[arg-type]
bot_id="7....",
user="123",
conversation_id="",
streaming=True,
)
message = HumanMessage(content="请完整背诵将进酒,背诵5遍")
response = chat([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
def test_chat_coze_default_non_streaming() -> None:
chat = ChatCoze(
coze_api_base="https://api.coze.com",
coze_api_key="pat_...", # type: ignore[arg-type]
bot_id="7....",
user="123",
conversation_id="",
streaming=False,
)
message = HumanMessage(content="请完整背诵将进酒,背诵5遍")
response = chat([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_volcengine_maas.py | """Test volc engine maas chat model."""
from langchain_core.callbacks import CallbackManager
from langchain_core.messages import AIMessage, BaseMessage, HumanMessage
from langchain_core.outputs import ChatGeneration, LLMResult
from langchain_community.chat_models.volcengine_maas import VolcEngineMaasChat
from tests.unit_tests.callbacks.fake_callback_handler import FakeCallbackHandler
def test_default_call() -> None:
"""Test valid chat call to volc engine."""
chat = VolcEngineMaasChat() # type: ignore[call-arg]
response = chat.invoke([HumanMessage(content="Hello")])
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_multiple_history() -> None:
"""Tests multiple history works."""
chat = VolcEngineMaasChat() # type: ignore[call-arg]
response = chat.invoke(
[
HumanMessage(content="Hello"),
AIMessage(content="Hello!"),
HumanMessage(content="How are you?"),
]
)
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_stream() -> None:
"""Test that stream works."""
chat = VolcEngineMaasChat(streaming=True) # type: ignore[call-arg]
callback_handler = FakeCallbackHandler()
callback_manager = CallbackManager([callback_handler])
response = chat.invoke(
[
HumanMessage(content="Hello"),
AIMessage(content="Hello!"),
HumanMessage(content="How are you?"),
],
stream=True,
config={"callbacks": callback_manager},
)
assert callback_handler.llm_streams > 0
assert isinstance(response.content, str)
def test_stop() -> None:
"""Test that stop works."""
chat = VolcEngineMaasChat( # type: ignore[call-arg]
model="skylark2-pro-4k", model_version="1.2", streaming=True
)
callback_handler = FakeCallbackHandler()
callback_manager = CallbackManager([callback_handler])
response = chat.invoke(
[
HumanMessage(content="repeat: hello world"),
AIMessage(content="hello world"),
HumanMessage(content="repeat: hello world"),
],
stream=True,
config={"callbacks": callback_manager},
stop=["world"],
)
assert callback_handler.llm_streams > 0
assert isinstance(response.content, str)
assert response.content.rstrip() == "hello"
def test_multiple_messages() -> None:
"""Tests multiple messages works."""
chat = VolcEngineMaasChat() # type: ignore[call-arg]
message = HumanMessage(content="Hi, how are you?")
response = chat.generate([[message], [message]])
assert isinstance(response, LLMResult)
assert len(response.generations) == 2
for generations in response.generations:
assert len(generations) == 1
for generation in generations:
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
assert generation.text == generation.message.content
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_deepinfra.py | """Test ChatDeepInfra wrapper."""
from typing import List
from langchain_core.messages import BaseMessage, HumanMessage
from langchain_core.messages.ai import AIMessage
from langchain_core.messages.tool import ToolMessage
from langchain_core.outputs import ChatGeneration, LLMResult
from langchain_core.runnables.base import RunnableBinding
from pydantic import BaseModel
from langchain_community.chat_models.deepinfra import ChatDeepInfra
from tests.unit_tests.callbacks.fake_callback_handler import FakeCallbackHandler
class GenerateMovieName(BaseModel):
"Get a movie name from a description"
description: str
def test_chat_deepinfra() -> None:
"""Test valid call to DeepInfra."""
chat = ChatDeepInfra(
max_tokens=10,
)
response = chat.invoke([HumanMessage(content="Hello")])
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_chat_deepinfra_streaming() -> None:
callback_handler = FakeCallbackHandler()
chat = ChatDeepInfra(
callbacks=[callback_handler],
streaming=True,
max_tokens=10,
)
response = chat.invoke([HumanMessage(content="Hello")])
assert callback_handler.llm_streams > 0
assert isinstance(response, BaseMessage)
async def test_async_chat_deepinfra() -> None:
"""Test async generation."""
chat = ChatDeepInfra(
max_tokens=10,
)
message = HumanMessage(content="Hello")
response = await chat.agenerate([[message]])
assert isinstance(response, LLMResult)
assert len(response.generations) == 1
assert len(response.generations[0]) == 1
generation = response.generations[0][0]
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
assert generation.text == generation.message.content
async def test_async_chat_deepinfra_streaming() -> None:
callback_handler = FakeCallbackHandler()
chat = ChatDeepInfra(
# model="meta-llama/Llama-2-7b-chat-hf",
callbacks=[callback_handler],
max_tokens=10,
streaming=True,
timeout=5,
)
message = HumanMessage(content="Hello")
response = await chat.agenerate([[message]])
assert callback_handler.llm_streams > 0
assert isinstance(response, LLMResult)
assert len(response.generations) == 1
assert len(response.generations[0]) == 1
generation = response.generations[0][0]
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
assert generation.text == generation.message.content
def test_chat_deepinfra_bind_tools() -> None:
class Foo(BaseModel):
pass
chat = ChatDeepInfra(
max_tokens=10,
)
tools = [Foo]
chat_with_tools = chat.bind_tools(tools)
assert isinstance(chat_with_tools, RunnableBinding)
chat_tools = chat_with_tools.tools
assert chat_tools
assert chat_tools == {
"tools": [
{
"function": {
"description": "",
"name": "Foo",
"parameters": {"properties": {}, "type": "object"},
},
"type": "function",
}
]
}
def test_tool_use() -> None:
llm = ChatDeepInfra(model="meta-llama/Meta-Llama-3-70B-Instruct", temperature=0)
llm_with_tool = llm.bind_tools(tools=[GenerateMovieName], tool_choice=True)
msgs: List = [
HumanMessage(content="It should be a movie explaining humanity in 2133.")
]
ai_msg = llm_with_tool.invoke(msgs)
assert isinstance(ai_msg, AIMessage)
assert isinstance(ai_msg.tool_calls, list)
assert len(ai_msg.tool_calls) == 1
tool_call = ai_msg.tool_calls[0]
assert "args" in tool_call
tool_msg = ToolMessage(
content="Year 2133",
tool_call_id=ai_msg.additional_kwargs["tool_calls"][0]["id"],
)
msgs.extend([ai_msg, tool_msg])
llm_with_tool.invoke(msgs)
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_anthropic.py | """Test Anthropic API wrapper."""
from typing import List
import pytest
from langchain_core.callbacks import CallbackManager
from langchain_core.messages import AIMessage, BaseMessage, HumanMessage
from langchain_core.outputs import ChatGeneration, LLMResult
from langchain_community.chat_models.anthropic import (
ChatAnthropic,
)
from tests.unit_tests.callbacks.fake_callback_handler import FakeCallbackHandler
@pytest.mark.scheduled
def test_anthropic_call() -> None:
"""Test valid call to anthropic."""
chat = ChatAnthropic(model="test") # type: ignore[call-arg]
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
@pytest.mark.scheduled
def test_anthropic_generate() -> None:
"""Test generate method of anthropic."""
chat = ChatAnthropic(model="test") # type: ignore[call-arg]
chat_messages: List[List[BaseMessage]] = [
[HumanMessage(content="How many toes do dogs have?")]
]
messages_copy = [messages.copy() for messages in chat_messages]
result: LLMResult = chat.generate(chat_messages)
assert isinstance(result, LLMResult)
for response in result.generations[0]:
assert isinstance(response, ChatGeneration)
assert isinstance(response.text, str)
assert response.text == response.message.content
assert chat_messages == messages_copy
@pytest.mark.scheduled
def test_anthropic_streaming() -> None:
"""Test streaming tokens from anthropic."""
chat = ChatAnthropic(model="test", streaming=True) # type: ignore[call-arg]
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
@pytest.mark.scheduled
def test_anthropic_streaming_callback() -> None:
"""Test that streaming correctly invokes on_llm_new_token callback."""
callback_handler = FakeCallbackHandler()
callback_manager = CallbackManager([callback_handler])
chat = ChatAnthropic( # type: ignore[call-arg]
model="test",
streaming=True,
callback_manager=callback_manager,
verbose=True,
)
message = HumanMessage(content="Write me a sentence with 10 words.")
chat.invoke([message])
assert callback_handler.llm_streams > 1
@pytest.mark.scheduled
async def test_anthropic_async_streaming_callback() -> None:
"""Test that streaming correctly invokes on_llm_new_token callback."""
callback_handler = FakeCallbackHandler()
callback_manager = CallbackManager([callback_handler])
chat = ChatAnthropic( # type: ignore[call-arg]
model="test",
streaming=True,
callback_manager=callback_manager,
verbose=True,
)
chat_messages: List[BaseMessage] = [
HumanMessage(content="How many toes do dogs have?")
]
result: LLMResult = await chat.agenerate([chat_messages])
assert callback_handler.llm_streams > 1
assert isinstance(result, LLMResult)
for response in result.generations[0]:
assert isinstance(response, ChatGeneration)
assert isinstance(response.text, str)
assert response.text == response.message.content
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_gpt_router.py | """Test GPTRouter API wrapper."""
from typing import List
import pytest
from langchain_core.callbacks import (
CallbackManager,
)
from langchain_core.messages import AIMessage, BaseMessage, HumanMessage
from langchain_core.outputs import ChatGeneration, LLMResult
from pydantic import SecretStr
from pytest import CaptureFixture
from langchain_community.chat_models.gpt_router import GPTRouter, GPTRouterModel
from tests.unit_tests.callbacks.fake_callback_handler import FakeCallbackHandler
def test_api_key_is_string() -> None:
gpt_router = GPTRouter( # type: ignore[call-arg]
gpt_router_api_base="https://example.com",
gpt_router_api_key="secret-api-key", # type: ignore[arg-type]
)
assert isinstance(gpt_router.gpt_router_api_key, SecretStr)
def test_api_key_masked_when_passed_via_constructor(
capsys: CaptureFixture,
) -> None:
gpt_router = GPTRouter( # type: ignore[call-arg]
gpt_router_api_base="https://example.com",
gpt_router_api_key="secret-api-key", # type: ignore[arg-type]
)
print(gpt_router.gpt_router_api_key, end="") # noqa: T201
captured = capsys.readouterr()
assert captured.out == "**********"
def test_gpt_router_call() -> None:
"""Test valid call to GPTRouter."""
anthropic_claude = GPTRouterModel(
name="claude-instant-1.2", provider_name="anthropic"
)
chat = GPTRouter(models_priority_list=[anthropic_claude])
message = HumanMessage(content="Hello World")
response = chat.invoke([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
def test_gpt_router_call_incorrect_model() -> None:
"""Test invalid modelName"""
anthropic_claude = GPTRouterModel(
name="model_does_not_exist", provider_name="anthropic"
)
chat = GPTRouter(models_priority_list=[anthropic_claude])
message = HumanMessage(content="Hello World")
with pytest.raises(Exception):
chat.invoke([message])
def test_gpt_router_generate() -> None:
"""Test generate method of GPTRouter."""
anthropic_claude = GPTRouterModel(
name="claude-instant-1.2", provider_name="anthropic"
)
chat = GPTRouter(models_priority_list=[anthropic_claude])
chat_messages: List[List[BaseMessage]] = [
[HumanMessage(content="If (5 + x = 18), what is x?")]
]
messages_copy = [messages.copy() for messages in chat_messages]
result: LLMResult = chat.generate(chat_messages)
assert isinstance(result, LLMResult)
for response in result.generations[0]:
assert isinstance(response, ChatGeneration)
assert isinstance(response.text, str)
assert response.text == response.message.content
assert chat_messages == messages_copy
def test_gpt_router_streaming() -> None:
"""Test streaming tokens from GPTRouter."""
anthropic_claude = GPTRouterModel(
name="claude-instant-1.2", provider_name="anthropic"
)
chat = GPTRouter(models_priority_list=[anthropic_claude], streaming=True)
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
def test_gpt_router_streaming_callback() -> None:
"""Test that streaming correctly invokes on_llm_new_token callback."""
callback_handler = FakeCallbackHandler()
callback_manager = CallbackManager([callback_handler])
anthropic_claude = GPTRouterModel(
name="claude-instant-1.2", provider_name="anthropic"
)
chat = GPTRouter(
models_priority_list=[anthropic_claude],
streaming=True,
callback_manager=callback_manager,
verbose=True,
)
message = HumanMessage(content="Write me a 5 line poem.")
chat.invoke([message])
assert callback_handler.llm_streams > 1
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_reka.py | """Test Reka API wrapper."""
import logging
from typing import List
import pytest
from langchain_core.messages import (
AIMessage,
BaseMessage,
HumanMessage,
SystemMessage,
ToolMessage,
)
from langchain_core.outputs import ChatGeneration, LLMResult
from langchain_community.chat_models.reka import ChatReka
from tests.unit_tests.callbacks.fake_callback_handler import FakeCallbackHandler
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
@pytest.mark.skip(
reason="Dependency conflict w/ other dependencies for urllib3 versions."
)
def test_reka_call() -> None:
"""Test a simple call to Reka."""
chat = ChatReka(model="reka-flash", verbose=True)
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
logger.debug(f"Response content: {response.content}")
@pytest.mark.skip(
reason="Dependency conflict w/ other dependencies for urllib3 versions."
)
def test_reka_generate() -> None:
"""Test the generate method of Reka."""
chat = ChatReka(model="reka-flash", verbose=True)
chat_messages: List[List[BaseMessage]] = [
[HumanMessage(content="How many toes do dogs have?")]
]
messages_copy = [messages.copy() for messages in chat_messages]
result: LLMResult = chat.generate(chat_messages)
assert isinstance(result, LLMResult)
for response in result.generations[0]:
assert isinstance(response, ChatGeneration)
assert isinstance(response.text, str)
assert response.text == response.message.content
logger.debug(f"Generated response: {response.text}")
assert chat_messages == messages_copy
@pytest.mark.skip(
reason="Dependency conflict w/ other dependencies for urllib3 versions."
)
def test_reka_streaming() -> None:
"""Test streaming tokens from Reka."""
chat = ChatReka(model="reka-flash", streaming=True, verbose=True)
message = HumanMessage(content="Tell me a story.")
response = chat.invoke([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
logger.debug(f"Streaming response content: {response.content}")
@pytest.mark.skip(
reason="Dependency conflict w/ other dependencies for urllib3 versions."
)
def test_reka_streaming_callback() -> None:
"""Test that streaming correctly invokes callbacks."""
callback_handler = FakeCallbackHandler()
chat = ChatReka(
model="reka-flash",
streaming=True,
callbacks=[callback_handler],
verbose=True,
)
message = HumanMessage(content="Write me a sentence with 10 words.")
chat.invoke([message])
assert callback_handler.llm_streams > 1
logger.debug(f"Number of LLM streams: {callback_handler.llm_streams}")
@pytest.mark.skip(
reason="Dependency conflict w/ other dependencies for urllib3 versions."
)
async def test_reka_async_streaming_callback() -> None:
"""Test asynchronous streaming with callbacks."""
callback_handler = FakeCallbackHandler()
chat = ChatReka(
model="reka-flash",
streaming=True,
callbacks=[callback_handler],
verbose=True,
)
chat_messages: List[BaseMessage] = [
HumanMessage(content="How many toes do dogs have?")
]
result: LLMResult = await chat.agenerate([chat_messages])
assert callback_handler.llm_streams > 1
assert isinstance(result, LLMResult)
for response in result.generations[0]:
assert isinstance(response, ChatGeneration)
assert isinstance(response.text, str)
assert response.text == response.message.content
logger.debug(f"Async generated response: {response.text}")
@pytest.mark.skip(
reason="Dependency conflict w/ other dependencies for urllib3 versions."
)
def test_reka_tool_usage_integration() -> None:
"""Test tool usage with Reka API integration."""
# Initialize the ChatReka model with tools and verbose logging
chat_reka = ChatReka(model="reka-flash", verbose=True)
tools = [
{
"type": "function",
"function": {
"name": "get_product_availability",
"description": (
"Determine whether a product is currently in stock given "
"a product ID."
),
"parameters": {
"type": "object",
"properties": {
"product_id": {
"type": "string",
"description": (
"The unique product ID to check availability for"
),
},
},
"required": ["product_id"],
},
},
},
]
chat_reka_with_tools = chat_reka.bind_tools(tools)
# Start a conversation
messages: List[BaseMessage] = [
HumanMessage(content="Is product A12345 in stock right now?")
]
# Get the initial response
response = chat_reka_with_tools.invoke(messages)
assert isinstance(response, AIMessage)
logger.debug(f"Initial AI message: {response.content}")
# Check if the model wants to use a tool
if "tool_calls" in response.additional_kwargs:
tool_calls = response.additional_kwargs["tool_calls"]
for tool_call in tool_calls:
function_name = tool_call["function"]["name"]
arguments = tool_call["function"]["arguments"]
logger.debug(
f"Tool call requested: {function_name} with arguments {arguments}"
)
# Simulate executing the tool
tool_output = "AVAILABLE"
tool_message = ToolMessage(
content=tool_output, tool_call_id=tool_call["id"]
)
messages.append(response)
messages.append(tool_message)
final_response = chat_reka_with_tools.invoke(messages)
assert isinstance(final_response, AIMessage)
logger.debug(f"Final AI message: {final_response.content}")
# Assert that the response message is non-empty
assert final_response.content, "The final response content is empty."
else:
pytest.fail("The model did not request a tool.")
@pytest.mark.skip(
reason="Dependency conflict w/ other dependencies for urllib3 versions."
)
def test_reka_system_message() -> None:
"""Test Reka with system message."""
chat = ChatReka(model="reka-flash", verbose=True)
messages = [
SystemMessage(content="You are a helpful AI that speaks like Shakespeare."),
HumanMessage(content="Tell me about the weather today."),
]
response = chat.invoke(messages)
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
logger.debug(f"Response with system message: {response.content}")
@pytest.mark.skip(
reason="Dependency conflict w/ other dependencies for urllib3 versions."
)
def test_reka_system_message_multi_turn() -> None:
"""Test multi-turn conversation with system message."""
chat = ChatReka(model="reka-flash", verbose=True)
messages = [
SystemMessage(content="You are a math tutor who explains concepts simply."),
HumanMessage(content="What is a prime number?"),
]
# First turn
response1 = chat.invoke(messages)
assert isinstance(response1, AIMessage)
messages.append(response1)
# Second turn
messages.append(HumanMessage(content="Can you give me an example?"))
response2 = chat.invoke(messages)
assert isinstance(response2, AIMessage)
logger.debug(f"First response: {response1.content}")
logger.debug(f"Second response: {response2.content}")
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_azure_openai.py | """Test AzureChatOpenAI wrapper."""
import os
from typing import Any
import pytest
from langchain_core.callbacks import CallbackManager
from langchain_core.messages import BaseMessage, HumanMessage
from langchain_core.outputs import ChatGeneration, ChatResult, LLMResult
from langchain_community.chat_models import AzureChatOpenAI
from tests.unit_tests.callbacks.fake_callback_handler import FakeCallbackHandler
OPENAI_API_VERSION = os.environ.get("AZURE_OPENAI_API_VERSION", "")
OPENAI_API_BASE = os.environ.get("AZURE_OPENAI_API_BASE", "")
OPENAI_API_KEY = os.environ.get("AZURE_OPENAI_API_KEY", "")
DEPLOYMENT_NAME = os.environ.get(
"AZURE_OPENAI_DEPLOYMENT_NAME",
os.environ.get("AZURE_OPENAI_CHAT_DEPLOYMENT_NAME", ""),
)
def _get_llm(**kwargs: Any) -> AzureChatOpenAI:
return AzureChatOpenAI( # type: ignore[call-arg]
deployment_name=DEPLOYMENT_NAME,
openai_api_version=OPENAI_API_VERSION,
azure_endpoint=OPENAI_API_BASE,
openai_api_key=OPENAI_API_KEY,
**kwargs,
)
@pytest.mark.scheduled
@pytest.fixture
def llm() -> AzureChatOpenAI:
return _get_llm(
max_tokens=10,
)
def test_chat_openai(llm: AzureChatOpenAI) -> None:
"""Test AzureChatOpenAI wrapper."""
message = HumanMessage(content="Hello")
response = llm.invoke([message])
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
@pytest.mark.scheduled
def test_chat_openai_generate() -> None:
"""Test AzureChatOpenAI wrapper with generate."""
chat = _get_llm(max_tokens=10, n=2)
message = HumanMessage(content="Hello")
response = chat.generate([[message], [message]])
assert isinstance(response, LLMResult)
assert len(response.generations) == 2
for generations in response.generations:
assert len(generations) == 2
for generation in generations:
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
assert generation.text == generation.message.content
@pytest.mark.scheduled
def test_chat_openai_multiple_completions() -> None:
"""Test AzureChatOpenAI wrapper with multiple completions."""
chat = _get_llm(max_tokens=10, n=5)
message = HumanMessage(content="Hello")
response = chat._generate([message])
assert isinstance(response, ChatResult)
assert len(response.generations) == 5
for generation in response.generations:
assert isinstance(generation.message, BaseMessage)
assert isinstance(generation.message.content, str)
@pytest.mark.scheduled
def test_chat_openai_streaming() -> None:
"""Test that streaming correctly invokes on_llm_new_token callback."""
callback_handler = FakeCallbackHandler()
callback_manager = CallbackManager([callback_handler])
chat = _get_llm(
max_tokens=10,
streaming=True,
temperature=0,
callback_manager=callback_manager,
verbose=True,
)
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert callback_handler.llm_streams > 0
assert isinstance(response, BaseMessage)
@pytest.mark.scheduled
def test_chat_openai_streaming_generation_info() -> None:
"""Test that generation info is preserved when streaming."""
class _FakeCallback(FakeCallbackHandler):
saved_things: dict = {}
def on_llm_end(
self,
*args: Any,
**kwargs: Any,
) -> Any:
# Save the generation
self.saved_things["generation"] = args[0]
callback = _FakeCallback()
callback_manager = CallbackManager([callback])
chat = _get_llm(
max_tokens=2,
temperature=0,
callback_manager=callback_manager,
)
list(chat.stream("hi"))
generation = callback.saved_things["generation"]
# `Hello!` is two tokens, assert that that is what is returned
assert generation.generations[0][0].text == "Hello!"
@pytest.mark.scheduled
async def test_async_chat_openai() -> None:
"""Test async generation."""
chat = _get_llm(max_tokens=10, n=2)
message = HumanMessage(content="Hello")
response = await chat.agenerate([[message], [message]])
assert isinstance(response, LLMResult)
assert len(response.generations) == 2
for generations in response.generations:
assert len(generations) == 2
for generation in generations:
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
assert generation.text == generation.message.content
@pytest.mark.scheduled
async def test_async_chat_openai_streaming() -> None:
"""Test that streaming correctly invokes on_llm_new_token callback."""
callback_handler = FakeCallbackHandler()
callback_manager = CallbackManager([callback_handler])
chat = _get_llm(
max_tokens=10,
streaming=True,
temperature=0,
callback_manager=callback_manager,
verbose=True,
)
message = HumanMessage(content="Hello")
response = await chat.agenerate([[message], [message]])
assert callback_handler.llm_streams > 0
assert isinstance(response, LLMResult)
assert len(response.generations) == 2
for generations in response.generations:
assert len(generations) == 1
for generation in generations:
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
assert generation.text == generation.message.content
@pytest.mark.scheduled
def test_openai_streaming(llm: AzureChatOpenAI) -> None:
"""Test streaming tokens from OpenAI."""
for token in llm.stream("I'm Pickle Rick"):
assert isinstance(token.content, str)
@pytest.mark.scheduled
async def test_openai_astream(llm: AzureChatOpenAI) -> None:
"""Test streaming tokens from OpenAI."""
async for token in llm.astream("I'm Pickle Rick"):
assert isinstance(token.content, str)
@pytest.mark.scheduled
async def test_openai_abatch(llm: AzureChatOpenAI) -> None:
"""Test streaming tokens from AzureChatOpenAI."""
result = await llm.abatch(["I'm Pickle Rick", "I'm not Pickle Rick"])
for token in result:
assert isinstance(token.content, str)
@pytest.mark.scheduled
async def test_openai_abatch_tags(llm: AzureChatOpenAI) -> None:
"""Test batch tokens from AzureChatOpenAI."""
result = await llm.abatch(
["I'm Pickle Rick", "I'm not Pickle Rick"], config={"tags": ["foo"]}
)
for token in result:
assert isinstance(token.content, str)
@pytest.mark.scheduled
def test_openai_batch(llm: AzureChatOpenAI) -> None:
"""Test batch tokens from AzureChatOpenAI."""
result = llm.batch(["I'm Pickle Rick", "I'm not Pickle Rick"])
for token in result:
assert isinstance(token.content, str)
@pytest.mark.scheduled
async def test_openai_ainvoke(llm: AzureChatOpenAI) -> None:
"""Test invoke tokens from AzureChatOpenAI."""
result = await llm.ainvoke("I'm Pickle Rick", config={"tags": ["foo"]})
assert isinstance(result.content, str)
@pytest.mark.scheduled
def test_openai_invoke(llm: AzureChatOpenAI) -> None:
"""Test invoke tokens from AzureChatOpenAI."""
result = llm.invoke("I'm Pickle Rick", config=dict(tags=["foo"]))
assert isinstance(result.content, str)
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_moonshot.py | """Test Moonshot Chat Model."""
from typing import Type, cast
import pytest
from langchain_core.language_models import BaseChatModel
from langchain_tests.integration_tests import ChatModelIntegrationTests
from pydantic import SecretStr
from langchain_community.chat_models.moonshot import MoonshotChat
class TestMoonshotChat(ChatModelIntegrationTests):
@property
def chat_model_class(self) -> Type[BaseChatModel]:
return MoonshotChat
@property
def chat_model_params(self) -> dict:
return {"model": "moonshot-v1-8k"}
@pytest.mark.xfail(reason="Not yet implemented.")
def test_usage_metadata(self, model: BaseChatModel) -> None:
super().test_usage_metadata(model)
def test_chat_moonshot_instantiate_with_alias() -> None:
"""Test MoonshotChat instantiate when using alias."""
api_key = "your-api-key"
chat = MoonshotChat(api_key=api_key) # type: ignore[call-arg]
assert cast(SecretStr, chat.moonshot_api_key).get_secret_value() == api_key
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_outlines.py | # flake8: noqa
"""Test ChatOutlines wrapper."""
from typing import Generator
import re
import platform
import pytest
from langchain_community.chat_models.outlines import ChatOutlines
from langchain_core.messages import AIMessage, HumanMessage, BaseMessage
from langchain_core.messages import BaseMessageChunk
from pydantic import BaseModel
from tests.unit_tests.callbacks.fake_callback_handler import FakeCallbackHandler
MODEL = "microsoft/Phi-3-mini-4k-instruct"
LLAMACPP_MODEL = "bartowski/qwen2.5-7b-ins-v3-GGUF/qwen2.5-7b-ins-v3-Q4_K_M.gguf"
BACKENDS = ["transformers", "llamacpp"]
if platform.system() != "Darwin":
BACKENDS.append("vllm")
if platform.system() == "Darwin":
BACKENDS.append("mlxlm")
@pytest.fixture(params=BACKENDS)
def chat_model(request: pytest.FixtureRequest) -> ChatOutlines:
if request.param == "llamacpp":
return ChatOutlines(model=LLAMACPP_MODEL, backend=request.param)
else:
return ChatOutlines(model=MODEL, backend=request.param)
def test_chat_outlines_inference(chat_model: ChatOutlines) -> None:
"""Test valid ChatOutlines inference."""
messages = [HumanMessage(content="Say foo:")]
output = chat_model.invoke(messages)
assert isinstance(output, AIMessage)
assert len(output.content) > 1
def test_chat_outlines_streaming(chat_model: ChatOutlines) -> None:
"""Test streaming tokens from ChatOutlines."""
messages = [HumanMessage(content="How do you say 'hello' in Spanish?")]
generator = chat_model.stream(messages)
stream_results_string = ""
assert isinstance(generator, Generator)
for chunk in generator:
assert isinstance(chunk, BaseMessageChunk)
if isinstance(chunk.content, str):
stream_results_string += chunk.content
else:
raise ValueError(
f"Invalid content type, only str is supported, "
f"got {type(chunk.content)}"
)
assert len(stream_results_string.strip()) > 1
def test_chat_outlines_streaming_callback(chat_model: ChatOutlines) -> None:
"""Test that streaming correctly invokes on_llm_new_token callback."""
MIN_CHUNKS = 5
callback_handler = FakeCallbackHandler()
chat_model.callbacks = [callback_handler]
chat_model.verbose = True
messages = [HumanMessage(content="Can you count to 10?")]
chat_model.invoke(messages)
assert callback_handler.llm_streams >= MIN_CHUNKS
def test_chat_outlines_regex(chat_model: ChatOutlines) -> None:
"""Test regex for generating a valid IP address"""
ip_regex = r"((25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(25[0-5]|2[0-4]\d|[01]?\d\d?)"
chat_model.regex = ip_regex
assert chat_model.regex == ip_regex
messages = [HumanMessage(content="What is the IP address of Google's DNS server?")]
output = chat_model.invoke(messages)
assert isinstance(output, AIMessage)
assert re.match(
ip_regex, str(output.content)
), f"Generated output '{output.content}' is not a valid IP address"
def test_chat_outlines_type_constraints(chat_model: ChatOutlines) -> None:
"""Test type constraints for generating an integer"""
chat_model.type_constraints = int
messages = [
HumanMessage(
content="What is the answer to life, the universe, and everything?"
)
]
output = chat_model.invoke(messages)
assert isinstance(int(str(output.content)), int)
def test_chat_outlines_json(chat_model: ChatOutlines) -> None:
"""Test json for generating a valid JSON object"""
class Person(BaseModel):
name: str
chat_model.json_schema = Person
messages = [HumanMessage(content="Who are the main contributors to LangChain?")]
output = chat_model.invoke(messages)
person = Person.model_validate_json(str(output.content))
assert isinstance(person, Person)
def test_chat_outlines_grammar(chat_model: ChatOutlines) -> None:
"""Test grammar for generating a valid arithmetic expression"""
if chat_model.backend == "mlxlm":
pytest.skip("MLX grammars not yet supported.")
chat_model.grammar = """
?start: expression
?expression: term (("+" | "-") term)*
?term: factor (("*" | "/") factor)*
?factor: NUMBER | "-" factor | "(" expression ")"
%import common.NUMBER
%import common.WS
%ignore WS
"""
messages = [HumanMessage(content="Give me a complex arithmetic expression:")]
output = chat_model.invoke(messages)
# Validate the output is a non-empty string
assert (
isinstance(output.content, str) and output.content.strip()
), "Output should be a non-empty string"
# Use a simple regex to check if the output contains basic arithmetic operations and numbers
assert re.search(
r"[\d\+\-\*/\(\)]+", output.content
), f"Generated output '{output.content}' does not appear to be a valid arithmetic expression"
def test_chat_outlines_with_structured_output(chat_model: ChatOutlines) -> None:
"""Test that ChatOutlines can generate structured outputs"""
class AnswerWithJustification(BaseModel):
"""An answer to the user question along with justification for the answer."""
answer: str
justification: str
structured_chat_model = chat_model.with_structured_output(AnswerWithJustification)
result = structured_chat_model.invoke(
"What weighs more, a pound of bricks or a pound of feathers?"
)
assert isinstance(result, AnswerWithJustification)
assert isinstance(result.answer, str)
assert isinstance(result.justification, str)
assert len(result.answer) > 0
assert len(result.justification) > 0
structured_chat_model_with_raw = chat_model.with_structured_output(
AnswerWithJustification, include_raw=True
)
result_with_raw = structured_chat_model_with_raw.invoke(
"What weighs more, a pound of bricks or a pound of feathers?"
)
assert isinstance(result_with_raw, dict)
assert "raw" in result_with_raw
assert "parsed" in result_with_raw
assert "parsing_error" in result_with_raw
assert isinstance(result_with_raw["raw"], BaseMessage)
assert isinstance(result_with_raw["parsed"], AnswerWithJustification)
assert result_with_raw["parsing_error"] is None
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_tongyi.py | """Test Alibaba Tongyi Chat Model."""
from typing import Any, List, cast
from langchain_core.callbacks import CallbackManager
from langchain_core.messages import AIMessage, BaseMessage, HumanMessage
from langchain_core.messages.ai import AIMessageChunk
from langchain_core.messages.tool import ToolCall, ToolMessage
from langchain_core.outputs import ChatGeneration, LLMResult
from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate
from pydantic import BaseModel, SecretStr
from pytest import CaptureFixture
from langchain_community.chat_models.tongyi import ChatTongyi
from tests.unit_tests.callbacks.fake_callback_handler import FakeCallbackHandler
_FUNCTIONS: Any = [
{
"type": "function",
"function": {
"name": "get_current_weather",
"description": "Get the current weather in a given location",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA",
},
"unit": {"type": "string", "enum": ["celsius", "fahrenheit"]},
},
"required": ["location"],
},
},
}
]
def test_initialization() -> None:
"""Test chat model initialization."""
for model in [
ChatTongyi(model_name="qwen-turbo", api_key="xyz"), # type: ignore[arg-type, call-arg]
ChatTongyi(model="qwen-turbo", dashscope_api_key="xyz"), # type: ignore[call-arg]
]:
assert model.model_name == "qwen-turbo"
assert cast(SecretStr, model.dashscope_api_key).get_secret_value() == "xyz"
def test_api_key_is_string() -> None:
llm = ChatTongyi(dashscope_api_key="secret-api-key") # type: ignore[call-arg]
assert isinstance(llm.dashscope_api_key, SecretStr)
def test_api_key_masked_when_passed_via_constructor(
capsys: CaptureFixture,
) -> None:
llm = ChatTongyi(dashscope_api_key="secret-api-key") # type: ignore[call-arg]
print(llm.dashscope_api_key, end="") # noqa: T201
captured = capsys.readouterr()
assert captured.out == "**********"
def test_default_call() -> None:
"""Test default model call."""
chat = ChatTongyi() # type: ignore[call-arg]
response = chat.invoke([HumanMessage(content="Hello")])
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_model() -> None:
"""Test model kwarg works."""
chat = ChatTongyi(model="qwen-plus") # type: ignore[call-arg]
response = chat.invoke([HumanMessage(content="Hello")])
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_vision_model() -> None:
"""Test model kwarg works."""
chat = ChatTongyi(model="qwen-vl-max") # type: ignore[call-arg]
response = chat.invoke(
[
HumanMessage(
content=[
{
"image": "https://python.langchain.com/v0.1/assets/images/run_details-806f6581cd382d4887a5bc3e8ac62569.png"
},
{"text": "Summarize the image"},
]
)
]
)
assert isinstance(response, BaseMessage)
assert isinstance(response.content, list)
def test_functions_call_thoughts() -> None:
chat = ChatTongyi(model="qwen-plus") # type: ignore[call-arg]
prompt_tmpl = "Use the given functions to answer following question: {input}"
prompt_msgs = [
HumanMessagePromptTemplate.from_template(prompt_tmpl),
]
prompt = ChatPromptTemplate(messages=prompt_msgs) # type: ignore[arg-type, call-arg]
chain = prompt | chat.bind(functions=_FUNCTIONS)
message = HumanMessage(content="What's the weather like in Shanghai today?")
response = chain.batch([{"input": message}])
assert isinstance(response[0], AIMessage)
assert "tool_calls" in response[0].additional_kwargs
def test_multiple_history() -> None:
"""Tests multiple history works."""
chat = ChatTongyi() # type: ignore[call-arg]
response = chat.invoke(
[
HumanMessage(content="Hello."),
AIMessage(content="Hello!"),
HumanMessage(content="How are you doing?"),
]
)
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_stream() -> None:
"""Test that stream works."""
chat = ChatTongyi(streaming=True) # type: ignore[call-arg]
callback_handler = FakeCallbackHandler()
callback_manager = CallbackManager([callback_handler])
response = chat.invoke(
[
HumanMessage(content="Hello."),
AIMessage(content="Hello!"),
HumanMessage(content="Who are you?"),
],
stream=True,
config={"callbacks": callback_manager},
)
assert callback_handler.llm_streams > 0
assert isinstance(response.content, str)
def test_multiple_messages() -> None:
"""Tests multiple messages works."""
chat = ChatTongyi() # type: ignore[call-arg]
message = HumanMessage(content="Hi, how are you.")
response = chat.generate([[message], [message]])
assert isinstance(response, LLMResult)
assert len(response.generations) == 2
for generations in response.generations:
assert len(generations) == 1
for generation in generations:
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
assert generation.text == generation.message.content
class GenerateUsername(BaseModel):
"Get a username based on someone's name and hair color."
name: str
hair_color: str
def test_tool_use() -> None:
llm = ChatTongyi(model="qwen-turbo", temperature=0) # type: ignore
llm_with_tool = llm.bind_tools(tools=[GenerateUsername])
msgs: List = [
HumanMessage(content="Sally has green hair, what would her username be?")
]
ai_msg = llm_with_tool.invoke(msgs)
# assert ai_msg is None
# ai_msg.content = " "
assert isinstance(ai_msg, AIMessage)
assert isinstance(ai_msg.tool_calls, list)
assert len(ai_msg.tool_calls) == 1
tool_call = ai_msg.tool_calls[0]
assert "args" in tool_call
tool_msg = ToolMessage(
content="sally_green_hair",
tool_call_id=ai_msg.tool_calls[0]["id"], # type: ignore
name=ai_msg.tool_calls[0]["name"],
)
msgs.extend([ai_msg, tool_msg])
llm_with_tool.invoke(msgs)
# Test streaming
ai_messages = llm_with_tool.stream(msgs)
first = True
for message in ai_messages:
if first:
gathered = message
first = False
else:
gathered = gathered + message # type: ignore
assert isinstance(gathered, AIMessageChunk)
streaming_tool_msg = ToolMessage(
content="sally_green_hair",
name=tool_call["name"],
tool_call_id=tool_call["id"] if tool_call["id"] else " ",
)
msgs.extend([gathered, streaming_tool_msg])
llm_with_tool.invoke(msgs)
def test_manual_tool_call_msg() -> None:
"""Test passing in manually construct tool call message."""
llm = ChatTongyi(model="qwen-turbo", temperature=0) # type: ignore
llm_with_tool = llm.bind_tools(tools=[GenerateUsername])
msgs: List = [
HumanMessage(content="Sally has green hair, what would her username be?"),
AIMessage(
content=" ",
tool_calls=[
ToolCall(
name="GenerateUsername",
args={"name": "Sally", "hair_color": "green"},
id="foo",
)
],
),
ToolMessage(content="sally_green_hair", tool_call_id="foo"),
]
output: AIMessage = cast(AIMessage, llm_with_tool.invoke(msgs))
assert output.content
# Should not have called the tool again.
assert not output.tool_calls and not output.invalid_tool_calls
class AnswerWithJustification(BaseModel):
"""An answer to the user question along with justification for the answer."""
answer: str
justification: str
def test_chat_tongyi_with_structured_output() -> None:
"""Test ChatTongyi with structured output."""
llm = ChatTongyi() # type: ignore
structured_llm = llm.with_structured_output(AnswerWithJustification)
response = structured_llm.invoke(
"What weighs more a pound of bricks or a pound of feathers"
)
assert isinstance(response, AnswerWithJustification)
def test_chat_tongyi_with_structured_output_include_raw() -> None:
"""Test ChatTongyi with structured output."""
llm = ChatTongyi() # type: ignore
structured_llm = llm.with_structured_output(
AnswerWithJustification, include_raw=True
)
response = structured_llm.invoke(
"What weighs more a pound of bricks or a pound of feathers"
)
assert isinstance(response, dict)
assert isinstance(response.get("raw"), AIMessage)
assert isinstance(response.get("parsed"), AnswerWithJustification)
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_konko.py | """Evaluate ChatKonko Interface."""
from typing import Any, cast
import pytest
from langchain_core.callbacks import CallbackManager
from langchain_core.messages import BaseMessage, HumanMessage, SystemMessage
from langchain_core.outputs import ChatGeneration, ChatResult, LLMResult
from pydantic import SecretStr
from pytest import CaptureFixture, MonkeyPatch
from langchain_community.chat_models.konko import ChatKonko
from tests.unit_tests.callbacks.fake_callback_handler import FakeCallbackHandler
def test_konko_key_masked_when_passed_from_env(
monkeypatch: MonkeyPatch, capsys: CaptureFixture
) -> None:
"""Test initialization with an API key provided via an env variable"""
monkeypatch.setenv("OPENAI_API_KEY", "test-openai-key")
monkeypatch.setenv("KONKO_API_KEY", "test-konko-key")
chat = ChatKonko()
print(chat.openai_api_key, end="") # noqa: T201
captured = capsys.readouterr()
assert captured.out == "**********"
print(chat.konko_api_key, end="") # noqa: T201
captured = capsys.readouterr()
assert captured.out == "**********"
def test_konko_key_masked_when_passed_via_constructor(
capsys: CaptureFixture,
) -> None:
"""Test initialization with an API key provided via the initializer"""
chat = ChatKonko(openai_api_key="test-openai-key", konko_api_key="test-konko-key")
print(chat.konko_api_key, end="") # noqa: T201
captured = capsys.readouterr()
assert captured.out == "**********"
print(chat.konko_secret_key, end="") # type: ignore[attr-defined] # noqa: T201
captured = capsys.readouterr()
assert captured.out == "**********"
def test_uses_actual_secret_value_from_secret_str() -> None:
"""Test that actual secret is retrieved using `.get_secret_value()`."""
chat = ChatKonko(openai_api_key="test-openai-key", konko_api_key="test-konko-key")
assert cast(SecretStr, chat.konko_api_key).get_secret_value() == "test-openai-key"
assert cast(SecretStr, chat.konko_secret_key).get_secret_value() == "test-konko-key" # type: ignore[attr-defined]
def test_konko_chat_test() -> None:
"""Evaluate basic ChatKonko functionality."""
chat_instance = ChatKonko(max_tokens=10)
msg = HumanMessage(content="Hi")
chat_response = chat_instance.invoke([msg])
assert isinstance(chat_response, BaseMessage)
assert isinstance(chat_response.content, str)
def test_konko_chat_test_openai() -> None:
"""Evaluate basic ChatKonko functionality."""
chat_instance = ChatKonko(max_tokens=10, model="meta-llama/llama-2-70b-chat")
msg = HumanMessage(content="Hi")
chat_response = chat_instance.invoke([msg])
assert isinstance(chat_response, BaseMessage)
assert isinstance(chat_response.content, str)
def test_konko_model_test() -> None:
"""Check how ChatKonko manages model_name."""
chat_instance = ChatKonko(model="alpha")
assert chat_instance.model == "alpha"
chat_instance = ChatKonko(model="beta")
assert chat_instance.model == "beta"
def test_konko_available_model_test() -> None:
"""Check how ChatKonko manages model_name."""
chat_instance = ChatKonko(max_tokens=10, n=2)
res = chat_instance.get_available_models()
assert isinstance(res, set)
def test_konko_system_msg_test() -> None:
"""Evaluate ChatKonko's handling of system messages."""
chat_instance = ChatKonko(max_tokens=10)
sys_msg = SystemMessage(content="Initiate user chat.")
user_msg = HumanMessage(content="Hi there")
chat_response = chat_instance.invoke([sys_msg, user_msg])
assert isinstance(chat_response, BaseMessage)
assert isinstance(chat_response.content, str)
def test_konko_generation_test() -> None:
"""Check ChatKonko's generation ability."""
chat_instance = ChatKonko(max_tokens=10, n=2)
msg = HumanMessage(content="Hi")
gen_response = chat_instance.generate([[msg], [msg]])
assert isinstance(gen_response, LLMResult)
assert len(gen_response.generations) == 2
for gen_list in gen_response.generations:
assert len(gen_list) == 2
for gen in gen_list:
assert isinstance(gen, ChatGeneration)
assert isinstance(gen.text, str)
assert gen.text == gen.message.content
def test_konko_multiple_outputs_test() -> None:
"""Test multiple completions with ChatKonko."""
chat_instance = ChatKonko(max_tokens=10, n=5)
msg = HumanMessage(content="Hi")
gen_response = chat_instance._generate([msg])
assert isinstance(gen_response, ChatResult)
assert len(gen_response.generations) == 5
for gen in gen_response.generations:
assert isinstance(gen.message, BaseMessage)
assert isinstance(gen.message.content, str)
def test_konko_streaming_callback_test() -> None:
"""Evaluate streaming's token callback functionality."""
callback_instance = FakeCallbackHandler()
callback_mgr = CallbackManager([callback_instance])
chat_instance = ChatKonko(
max_tokens=10,
streaming=True,
temperature=0,
callback_manager=callback_mgr,
verbose=True,
)
msg = HumanMessage(content="Hi")
chat_response = chat_instance.invoke([msg])
assert callback_instance.llm_streams > 0
assert isinstance(chat_response, BaseMessage)
def test_konko_streaming_info_test() -> None:
"""Ensure generation details are retained during streaming."""
class TestCallback(FakeCallbackHandler):
data_store: dict = {}
def on_llm_end(self, *args: Any, **kwargs: Any) -> Any:
self.data_store["generation"] = args[0]
callback_instance = TestCallback()
callback_mgr = CallbackManager([callback_instance])
chat_instance = ChatKonko(
max_tokens=2,
temperature=0,
callback_manager=callback_mgr,
)
list(chat_instance.stream("hey"))
gen_data = callback_instance.data_store["generation"]
assert gen_data.generations[0][0].text == " Hey"
def test_konko_llm_model_name_test() -> None:
"""Check if llm_output has model info."""
chat_instance = ChatKonko(max_tokens=10)
msg = HumanMessage(content="Hi")
llm_data = chat_instance.generate([[msg]])
assert llm_data.llm_output is not None
assert llm_data.llm_output["model_name"] == chat_instance.model
def test_konko_streaming_model_name_test() -> None:
"""Check model info during streaming."""
chat_instance = ChatKonko(max_tokens=10, streaming=True)
msg = HumanMessage(content="Hi")
llm_data = chat_instance.generate([[msg]])
assert llm_data.llm_output is not None
assert llm_data.llm_output["model_name"] == chat_instance.model
def test_konko_streaming_param_validation_test() -> None:
"""Ensure correct token callback during streaming."""
with pytest.raises(ValueError):
ChatKonko(
max_tokens=10,
streaming=True,
temperature=0,
n=5,
)
def test_konko_additional_args_test() -> None:
"""Evaluate extra arguments for ChatKonko."""
chat_instance = ChatKonko(extra=3, max_tokens=10) # type: ignore[call-arg]
assert chat_instance.max_tokens == 10
assert chat_instance.model_kwargs == {"extra": 3}
chat_instance = ChatKonko(extra=3, model_kwargs={"addition": 2}) # type: ignore[call-arg]
assert chat_instance.model_kwargs == {"extra": 3, "addition": 2}
with pytest.raises(ValueError):
ChatKonko(extra=3, model_kwargs={"extra": 2}) # type: ignore[call-arg]
with pytest.raises(ValueError):
ChatKonko(model_kwargs={"temperature": 0.2})
with pytest.raises(ValueError):
ChatKonko(model_kwargs={"model": "gpt-3.5-turbo-instruct"})
def test_konko_token_streaming_test() -> None:
"""Check token streaming for ChatKonko."""
chat_instance = ChatKonko(max_tokens=10)
for token in chat_instance.stream("Just a test"):
assert isinstance(token.content, str)
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_baiduqianfan.py | from langchain_core.messages import AIMessage, HumanMessage, ToolMessage
from langchain_core.messages.tool import ToolCall
from langchain_core.tools import tool
from langchain_community.chat_models import QianfanChatEndpoint
@tool
def get_current_weather(location: str, unit: str = "摄氏度") -> str:
"""获取指定地点的天气"""
return f"{location}是晴朗,25{unit}左右。"
def test_chat_qianfan_tool_result_to_model() -> None:
"""Test QianfanChatEndpoint invoke with tool_calling result."""
messages = [
HumanMessage("上海天气怎么样?"),
AIMessage(
content=" ",
tool_calls=[
ToolCall(
name="get_current_weather",
args={"location": "上海", "unit": "摄氏度"},
id="foo",
type="tool_call",
),
],
),
ToolMessage(
content="上海是晴天,25度左右。",
tool_call_id="foo",
name="get_current_weather",
),
]
chat = QianfanChatEndpoint(model="ERNIE-3.5-8K") # type: ignore[call-arg]
llm_with_tool = chat.bind_tools([get_current_weather])
response = llm_with_tool.invoke(messages)
assert isinstance(response, AIMessage)
print(response.content) # noqa: T201
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_sambanova.py | from langchain_core.messages import AIMessage, HumanMessage
from langchain_community.chat_models.sambanova import (
ChatSambaNovaCloud,
ChatSambaStudio,
)
def test_chat_sambanova_cloud() -> None:
chat = ChatSambaNovaCloud()
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
def test_chat_sambastudio() -> None:
chat = ChatSambaStudio()
message = HumanMessage(content="Hello")
response = chat.invoke([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_litellm_standard.py | """Standard LangChain interface tests"""
from typing import Type
import pytest
from langchain_core.language_models import BaseChatModel
from langchain_tests.integration_tests import ChatModelIntegrationTests
from langchain_community.chat_models.litellm import ChatLiteLLM
class TestLiteLLMStandard(ChatModelIntegrationTests):
@property
def chat_model_class(self) -> Type[BaseChatModel]:
return ChatLiteLLM
@property
def chat_model_params(self) -> dict:
return {"model": "ollama/mistral"}
@pytest.mark.xfail(reason="Not yet implemented.")
def test_usage_metadata(self, model: BaseChatModel) -> None:
super().test_usage_metadata(model)
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_minimax.py | import os
from langchain_core.messages import AIMessage, HumanMessage, ToolMessage
from langchain_core.tools import tool
from pydantic import BaseModel
from langchain_community.chat_models import MiniMaxChat
def test_chat_minimax_not_group_id() -> None:
if "MINIMAX_GROUP_ID" in os.environ:
del os.environ["MINIMAX_GROUP_ID"]
chat = MiniMaxChat() # type: ignore[call-arg]
response = chat.invoke("你好呀")
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
def test_chat_minimax_with_stream() -> None:
chat = MiniMaxChat() # type: ignore[call-arg]
for chunk in chat.stream("你好呀"):
assert isinstance(chunk, AIMessage)
assert isinstance(chunk.content, str)
@tool
def add(a: int, b: int) -> int:
"""Adds a and b."""
return a + b
@tool
def multiply(a: int, b: int) -> int:
"""Multiplies a and b."""
return a * b
def test_chat_minimax_with_tool() -> None:
"""Test MinimaxChat with bind tools."""
chat = MiniMaxChat() # type: ignore[call-arg]
tools = [add, multiply]
chat_with_tools = chat.bind_tools(tools)
query = "What is 3 * 12?"
messages = [HumanMessage(query)]
ai_msg = chat_with_tools.invoke(messages)
assert isinstance(ai_msg, AIMessage)
assert isinstance(ai_msg.tool_calls, list)
assert len(ai_msg.tool_calls) == 1
tool_call = ai_msg.tool_calls[0]
assert "args" in tool_call
messages.append(ai_msg) # type: ignore[arg-type]
for tool_call in ai_msg.tool_calls:
selected_tool = {"add": add, "multiply": multiply}[tool_call["name"].lower()]
tool_output = selected_tool.invoke(tool_call["args"]) # type: ignore[attr-defined]
messages.append(ToolMessage(tool_output, tool_call_id=tool_call["id"])) # type: ignore[arg-type]
response = chat_with_tools.invoke(messages)
assert isinstance(response, AIMessage)
class AnswerWithJustification(BaseModel):
"""An answer to the user question along with justification for the answer."""
answer: str
justification: str
def test_chat_minimax_with_structured_output() -> None:
"""Test MiniMaxChat with structured output."""
llm = MiniMaxChat() # type: ignore
structured_llm = llm.with_structured_output(AnswerWithJustification)
response = structured_llm.invoke(
"What weighs more a pound of bricks or a pound of feathers"
)
assert isinstance(response, AnswerWithJustification)
def test_chat_tongyi_with_structured_output_include_raw() -> None:
"""Test MiniMaxChat with structured output."""
llm = MiniMaxChat() # type: ignore
structured_llm = llm.with_structured_output(
AnswerWithJustification, include_raw=True
)
response = structured_llm.invoke(
"What weighs more a pound of bricks or a pound of feathers"
)
assert isinstance(response, dict)
assert isinstance(response.get("raw"), AIMessage)
assert isinstance(response.get("parsed"), AnswerWithJustification)
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/test_qianfan_endpoint.py | """Test Baidu Qianfan Chat Endpoint."""
from typing import Any, cast
from langchain_core.callbacks import CallbackManager
from langchain_core.messages import (
AIMessage,
BaseMessage,
BaseMessageChunk,
FunctionMessage,
HumanMessage,
)
from langchain_core.outputs import ChatGeneration, LLMResult
from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate
from pydantic import SecretStr
from pytest import CaptureFixture, MonkeyPatch
from langchain_community.chat_models.baidu_qianfan_endpoint import (
QianfanChatEndpoint,
)
from tests.unit_tests.callbacks.fake_callback_handler import FakeCallbackHandler
_FUNCTIONS: Any = [
{
"name": "format_person_info",
"description": (
"Output formatter. Should always be used to format your response to the"
" user."
),
"parameters": {
"title": "Person",
"description": "Identifying information about a person.",
"type": "object",
"properties": {
"name": {
"title": "Name",
"description": "The person's name",
"type": "string",
},
"age": {
"title": "Age",
"description": "The person's age",
"type": "integer",
},
"fav_food": {
"title": "Fav Food",
"description": "The person's favorite food",
"type": "string",
},
},
"required": ["name", "age"],
},
},
{
"name": "get_current_temperature",
"description": ("Used to get the location's temperature."),
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "city name",
},
"unit": {
"type": "string",
"enum": ["centigrade", "Fahrenheit"],
},
},
"required": ["location", "unit"],
},
"responses": {
"type": "object",
"properties": {
"temperature": {
"type": "integer",
"description": "city temperature",
},
"unit": {
"type": "string",
"enum": ["centigrade", "Fahrenheit"],
},
},
},
},
]
def test_initialization() -> None:
"""Test chat model initialization."""
for model in [
QianfanChatEndpoint(model="BLOOMZ-7B", timeout=40), # type: ignore[call-arg]
QianfanChatEndpoint(model="BLOOMZ-7B", request_timeout=40), # type: ignore[call-arg]
]:
assert model.model == "BLOOMZ-7B"
assert model.request_timeout == 40
def test_default_call() -> None:
"""Test default model.invoke(`ERNIE-Bot`) call."""
chat = QianfanChatEndpoint() # type: ignore[call-arg]
response = chat.invoke([HumanMessage(content="Hello")])
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_model() -> None:
"""Test model kwarg works."""
chat = QianfanChatEndpoint(model="BLOOMZ-7B") # type: ignore[call-arg]
response = chat.invoke([HumanMessage(content="Hello")])
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_model_param() -> None:
"""Test model params works."""
chat = QianfanChatEndpoint() # type: ignore[call-arg]
response = chat.invoke([HumanMessage(content="Hello")], model="BLOOMZ-7B")
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_endpoint() -> None:
"""Test user custom model deployments like some open source models."""
chat = QianfanChatEndpoint(endpoint="qianfan_bloomz_7b_compressed") # type: ignore[call-arg]
response = chat.invoke([HumanMessage(content="Hello")])
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_endpoint_param() -> None:
"""Test user custom model deployments like some open source models."""
chat = QianfanChatEndpoint() # type: ignore[call-arg]
response = chat.invoke(
[HumanMessage(endpoint="qianfan_bloomz_7b_compressed", content="Hello")] # type: ignore[call-arg]
)
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_multiple_history() -> None:
"""Tests multiple history works."""
chat = QianfanChatEndpoint() # type: ignore[call-arg]
response = chat.invoke(
[
HumanMessage(content="Hello."),
AIMessage(content="Hello!"),
HumanMessage(content="How are you doing?"),
]
)
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)
def test_chat_generate() -> None:
"""Tests chat generate works."""
chat = QianfanChatEndpoint() # type: ignore[call-arg]
response = chat.generate(
[
[
HumanMessage(content="Hello."),
AIMessage(content="Hello!"),
HumanMessage(content="How are you doing?"),
]
]
)
assert isinstance(response, LLMResult)
for generations in response.generations:
for generation in generations:
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
def test_stream() -> None:
"""Test that stream works."""
chat = QianfanChatEndpoint(streaming=True) # type: ignore[call-arg]
callback_handler = FakeCallbackHandler()
callback_manager = CallbackManager([callback_handler])
response = chat.invoke(
[
HumanMessage(content="Hello."),
AIMessage(content="Hello!"),
HumanMessage(content="Who are you?"),
],
stream=True,
config={"callbacks": callback_manager},
)
assert callback_handler.llm_streams > 0
assert isinstance(response.content, str)
res = chat.stream(
[
HumanMessage(content="Hello."),
AIMessage(content="Hello!"),
HumanMessage(content="Who are you?"),
]
)
assert len(list(res)) >= 1
async def test_async_invoke() -> None:
chat = QianfanChatEndpoint() # type: ignore[call-arg]
res = await chat.ainvoke([HumanMessage(content="Hello")])
assert isinstance(res, BaseMessage)
assert res.content != ""
async def test_async_generate() -> None:
"""Tests chat agenerate works."""
chat = QianfanChatEndpoint() # type: ignore[call-arg]
response = await chat.agenerate(
[
[
HumanMessage(content="Hello."),
AIMessage(content="Hello!"),
HumanMessage(content="How are you doing?"),
]
]
)
assert isinstance(response, LLMResult)
for generations in response.generations:
for generation in generations:
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
async def test_async_stream() -> None:
chat = QianfanChatEndpoint(streaming=True) # type: ignore[call-arg]
async for token in chat.astream(
[
HumanMessage(content="Hello."),
AIMessage(content="Hello!"),
HumanMessage(content="Who are you?"),
]
):
assert isinstance(token, BaseMessageChunk)
def test_multiple_messages() -> None:
"""Tests multiple messages works."""
chat = QianfanChatEndpoint() # type: ignore[call-arg]
message = HumanMessage(content="Hi, how are you.")
response = chat.generate([[message], [message]])
assert isinstance(response, LLMResult)
assert len(response.generations) == 2
for generations in response.generations:
assert len(generations) == 1
for generation in generations:
assert isinstance(generation, ChatGeneration)
assert isinstance(generation.text, str)
assert generation.text == generation.message.content
def test_functions_call_thoughts() -> None:
chat = QianfanChatEndpoint(model="ERNIE-Bot") # type: ignore[call-arg]
prompt_tmpl = "Use the given functions to answer following question: {input}"
prompt_msgs = [
HumanMessagePromptTemplate.from_template(prompt_tmpl),
]
prompt = ChatPromptTemplate(messages=prompt_msgs) # type: ignore[arg-type, call-arg]
chain = prompt | chat.bind(functions=_FUNCTIONS)
message = HumanMessage(content="What's the temperature in Shanghai today?")
response = chain.batch([{"input": message}])
assert isinstance(response[0], AIMessage)
assert "function_call" in response[0].additional_kwargs
def test_functions_call() -> None:
chat = QianfanChatEndpoint(model="ERNIE-Bot") # type: ignore[call-arg]
prompt = ChatPromptTemplate( # type: ignore[call-arg]
messages=[
HumanMessage(content="What's the temperature in Shanghai today?"),
AIMessage(
content="",
additional_kwargs={
"function_call": {
"name": "get_current_temperature",
"thoughts": "i will use get_current_temperature "
"to resolve the questions",
"arguments": '{"location":"Shanghai","unit":"centigrade"}',
}
},
),
FunctionMessage(
name="get_current_weather",
content='{"temperature": "25", \
"unit": "摄氏度", "description": "晴朗"}',
),
]
)
chain = prompt | chat.bind(functions=_FUNCTIONS)
resp = chain.invoke({})
assert isinstance(resp, AIMessage)
def test_rate_limit() -> None:
chat = QianfanChatEndpoint(model="ERNIE-Bot", init_kwargs={"query_per_second": 2}) # type: ignore[call-arg]
assert (
chat.client._client._rate_limiter._internal_qps_rate_limiter._sync_limiter._query_per_second
== 1.8
)
responses = chat.batch(
[
[HumanMessage(content="Hello")],
[HumanMessage(content="who are you")],
[HumanMessage(content="what is baidu")],
]
)
for res in responses:
assert isinstance(res, BaseMessage)
assert isinstance(res.content, str)
def test_qianfan_key_masked_when_passed_from_env(
monkeypatch: MonkeyPatch, capsys: CaptureFixture
) -> None:
"""Test initialization with an API key provided via an env variable"""
monkeypatch.setenv("QIANFAN_AK", "test-api-key")
monkeypatch.setenv("QIANFAN_SK", "test-secret-key")
chat = QianfanChatEndpoint() # type: ignore[call-arg]
print(chat.qianfan_ak, end="") # noqa: T201
captured = capsys.readouterr()
assert captured.out == "**********"
print(chat.qianfan_sk, end="") # noqa: T201
captured = capsys.readouterr()
assert captured.out == "**********"
def test_qianfan_key_masked_when_passed_via_constructor(
capsys: CaptureFixture,
) -> None:
"""Test initialization with an API key provided via the initializer"""
chat = QianfanChatEndpoint( # type: ignore[call-arg]
qianfan_ak="test-api-key", # type: ignore[arg-type]
qianfan_sk="test-secret-key", # type: ignore[arg-type]
)
print(chat.qianfan_ak, end="") # noqa: T201
captured = capsys.readouterr()
assert captured.out == "**********"
print(chat.qianfan_sk, end="") # noqa: T201
captured = capsys.readouterr()
assert captured.out == "**********"
def test_uses_actual_secret_value_from_secret_str() -> None:
"""Test that actual secret is retrieved using `.get_secret_value()`."""
chat = QianfanChatEndpoint( # type: ignore[call-arg]
qianfan_ak="test-api-key", # type: ignore[arg-type]
qianfan_sk="test-secret-key", # type: ignore[arg-type]
)
assert cast(SecretStr, chat.qianfan_ak).get_secret_value() == "test-api-key"
assert cast(SecretStr, chat.qianfan_sk).get_secret_value() == "test-secret-key"
def test_init_api_key_param() -> None:
"""Test the standardized parameters -- api_key and secret_key"""
for chat in [
QianfanChatEndpoint( # type: ignore[call-arg]
api_key="test-api-key", # type: ignore[arg-type]
secret_key="test-secret-key", # type: ignore[arg-type]
),
QianfanChatEndpoint( # type: ignore[call-arg]
qianfan_ak="test-api-key", # type: ignore[arg-type]
qianfan_sk="test-secret-key", # type: ignore[arg-type]
),
]:
assert cast(SecretStr, chat.qianfan_ak).get_secret_value() == "test-api-key"
assert cast(SecretStr, chat.qianfan_sk).get_secret_value() == "test-secret-key"
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/cassettes/TestChatKinetica.test_create_llm.yaml | interactions:
- request:
body: "\0"
headers:
Accept:
- application/octet-stream
Content-type:
- application/octet-stream
authorization:
- DUMMY
method: POST
uri: http://172.31.33.30:9191/show/system/status
response:
body:
string: !!binary |
BE9LADZzaG93X3N5c3RlbV9zdGF0dXNfcmVzcG9uc2X8ugEUCmdyYXBovAl7ImNvdW50Ijo4LCJz
dGF0dXMiOlt7InZlcnNpb24iOjM5LCJzZXJ2ZXJfaWQiOjAsImhvc3RfaWQiOiIxNzIuMzEuMzMu
MzAiLCJzdGF0dXMiOiJydW5uaW5nIn0seyJ2ZXJzaW9uIjozOSwic2VydmVyX2lkIjoxLCJob3N0
X2lkIjoiMTcyLjMxLjMzLjMxIiwic3RhdHVzIjoicnVubmluZyJ9LHsidmVyc2lvbiI6MzksInNl
cnZlcl9pZCI6MiwiaG9zdF9pZCI6IjE3Mi4zMS4zMy4zMiIsInN0YXR1cyI6InJ1bm5pbmcifSx7
InZlcnNpb24iOjM5LCJzZXJ2ZXJfaWQiOjMsImhvc3RfaWQiOiIxNzIuMzEuMzMuMzMiLCJzdGF0
dXMiOiJydW5uaW5nIn0seyJ2ZXJzaW9uIjozOSwic2VydmVyX2lkIjo0LCJob3N0X2lkIjoiMTcy
LjMxLjMzLjM0Iiwic3RhdHVzIjoicnVubmluZyJ9LHsidmVyc2lvbiI6MzksInNlcnZlcl9pZCI6
NSwiaG9zdF9pZCI6IjE3Mi4zMS4zMy4zNSIsInN0YXR1cyI6InJ1bm5pbmcifSx7InZlcnNpb24i
OjM5LCJzZXJ2ZXJfaWQiOjYsImhvc3RfaWQiOiIxNzIuMzEuMzMuMzYiLCJzdGF0dXMiOiJydW5u
aW5nIn0seyJ2ZXJzaW9uIjozOSwic2VydmVyX2lkIjo3LCJob3N0X2lkIjoiMTcyLjMxLjMzLjM3
Iiwic3RhdHVzIjoicnVubmluZyJ9XX0eaGFfY2x1c3Rlcl9pbmZvzAN7ImhhX3JhbmtzX2luZm8i
Olt7InByaXZhdGVfdXJsIjoiaHR0cDovLzE3Mi4zMS4zMy4zMDo5MTkxIiwicHVibGljX3VybCI6
Imh0dHA6Ly8xNzIuMzEuMzMuMzA6OTE5MSIsImhhX3VybF9vdmVycmlkZSI6Imh0dHA6Ly8xNzIu
MzEuMzMuMzA6OTE5MSIsImFsdGVybmF0ZV91cmxzIjpbImh0dHA6Ly8xNzIuMzEuMzMuMzA6OTE5
MSJdfV0sInJlYWR5Ijp0cnVlLCJuYW1lIjoicHJvZHVjdGlvbi1uZXcifQpob3N0c/hLeyJjb3Vu
dCI6OCwic3RhdHVzIjpbeyJ2ZXJzaW9uIjoxMzMsImhvc3RfbnVtYmVyIjowLCJpZCI6IjE3Mi4z
MS4zMy4zMCIsImhvc3RuYW1lIjoiMzAwLTMwMy11MzAtdjEwMCIsInN0YXR1cyI6InJ1bm5pbmci
LCJodHRwZF9zdGF0dXMiOiJkaXNhYmxlZCIsInN0YXRzX3N0YXR1cyI6InJ1bm5pbmciLCJtbF9z
dGF0dXMiOiJkaXNhYmxlZCIsInF1ZXJ5X3BsYW5uZXJfc3RhdHVzIjoicnVubmluZyIsInJldmVh
bF9zdGF0dXMiOiJydW5uaW5nIiwidG90YWxfbWVtb3J5Ijo4MTAyMDEyNjAwMzIsInN0YXJ0X3Rp
bWUiOjE3MDgxMDM0NTIsIm5ldHdvcmtfaXBzIjpbIjE3Mi4zMS4zMy4zMCJdLCJncHVfaWRzIjpb
IkdQVS1hMDdjMGU0OC05NDhhLTQwNWMtNDNiNy03Mzc0ZDJkZGMwOGMiLCJHUFUtNTNlMWI3YjQt
NjhiOC1hMjhjLTEwZjMtZGI2YTA1OTdmYmI0IiwiR1BVLTQxZmY5MWZiLWVjYjktMGE1Yi1kNDdj
LWQ1YmI3ZWYxMDM4YSIsIkdQVS1hOTQwYjEyNi1iMWE0LTlmMDctNDRlMS02MzZjMzc5ODllY2Yi
XSwiYWNjZXB0c19mYWlsb3ZlciI6ImZhbHNlIiwiaG9zdF9yb2xlIjoibGVhZGVyIiwiaG9zdF90
ZXJtIjowLCJob3N0X2VsZWN0aW9uX3N0YXR1cyI6ImxlYWRlcl9lbGVjdGVkIn0seyJ2ZXJzaW9u
IjoxMTMsImhvc3RfbnVtYmVyIjoxLCJpZCI6IjE3Mi4zMS4zMy4zMSIsImhvc3RuYW1lIjoiMzAw
LTMwMy11MzEtdjEwMCIsInN0YXR1cyI6InJ1bm5pbmciLCJodHRwZF9zdGF0dXMiOiJkaXNhYmxl
ZCIsInN0YXRzX3N0YXR1cyI6InJ1bm5pbmciLCJtbF9zdGF0dXMiOiJkaXNhYmxlZCIsInF1ZXJ5
X3BsYW5uZXJfc3RhdHVzIjoic3RvcHBlZCIsInJldmVhbF9zdGF0dXMiOiJzdG9wcGVkIiwidG90
YWxfbWVtb3J5Ijo4MTAyMDEyMzU0NTYsInN0YXJ0X3RpbWUiOjE3MDgxMDM0NTIsIm5ldHdvcmtf
aXBzIjpbIjE3Mi4zMS4zMy4zMSJdLCJncHVfaWRzIjpbIkdQVS0xMzJmMWRkYi1hNWY0LTIwZDMt
MTEyMi02ZDM1OTlhZmNmMWQiLCJHUFUtM2JjN2JmY2QtOTVhOS1hNjI2LThlOGYtMzdiOTcxOWFh
OWRkIiwiR1BVLWZlMTBhZDNkLTg4M2QtYTU5MC1kNDA1LWUwYTU2OTNiMGFmMCIsIkdQVS02Yjgy
OTY5OS0wYjRjLTEyZjAtMGMyOC04Y2Y0NmMyNGMxODUiXSwiYWNjZXB0c19mYWlsb3ZlciI6ImZh
bHNlIiwiaG9zdF9yb2xlIjoiZm9sbG93ZXIiLCJob3N0X3Rlcm0iOjAsImhvc3RfZWxlY3Rpb25f
c3RhdHVzIjoibGVhZGVyX2VsZWN0ZWQifSx7InZlcnNpb24iOjExMywiaG9zdF9udW1iZXIiOjIs
ImlkIjoiMTcyLjMxLjMzLjMyIiwiaG9zdG5hbWUiOiIzMDAtMzAzLXUzMi12MTAwIiwic3RhdHVz
IjoicnVubmluZyIsImh0dHBkX3N0YXR1cyI6ImRpc2FibGVkIiwic3RhdHNfc3RhdHVzIjoicnVu
bmluZyIsIm1sX3N0YXR1cyI6ImRpc2FibGVkIiwicXVlcnlfcGxhbm5lcl9zdGF0dXMiOiJzdG9w
cGVkIiwicmV2ZWFsX3N0YXR1cyI6InN0b3BwZWQiLCJ0b3RhbF9tZW1vcnkiOjgxMDIwMTIzMTM2
MCwic3RhcnRfdGltZSI6MTcwODEwMzQ1MiwibmV0d29ya19pcHMiOlsiMTcyLjMxLjMzLjMyIl0s
ImdwdV9pZHMiOlsiR1BVLTA5MGM0NDMwLWRiYmUtMWUxYS03ZjdmLWExODI3ODNhZDIzMSIsIkdQ
VS1iZTE0YjVjZS1iNDExLTQ4Y2EtYTlmZi01YTA2YzdhNmYzOTgiLCJHUFUtNGIxYTU2ODgtMGU4
Yy1jYzk0LTgzM2ItYzJmMzllOTk1M2I4IiwiR1BVLTEyZWJhNDYzLTgzMmUtMTA4Yi1lY2IyLTVj
OWFmOGRhNjE2NCJdLCJhY2NlcHRzX2ZhaWxvdmVyIjoiZmFsc2UiLCJob3N0X3JvbGUiOiJmb2xs
b3dlciIsImhvc3RfdGVybSI6MCwiaG9zdF9lbGVjdGlvbl9zdGF0dXMiOiJsZWFkZXJfZWxlY3Rl
ZCJ9LHsidmVyc2lvbiI6MTEzLCJob3N0X251bWJlciI6MywiaWQiOiIxNzIuMzEuMzMuMzMiLCJo
b3N0bmFtZSI6IjMwMC0zMDMtdTMzLXYxMDAiLCJzdGF0dXMiOiJydW5uaW5nIiwiaHR0cGRfc3Rh
dHVzIjoiZGlzYWJsZWQiLCJzdGF0c19zdGF0dXMiOiJydW5uaW5nIiwibWxfc3RhdHVzIjoiZGlz
YWJsZWQiLCJxdWVyeV9wbGFubmVyX3N0YXR1cyI6InN0b3BwZWQiLCJyZXZlYWxfc3RhdHVzIjoi
c3RvcHBlZCIsInRvdGFsX21lbW9yeSI6ODEwMjAxMjM5NTUyLCJzdGFydF90aW1lIjoxNzA4MTAz
NDUyLCJuZXR3b3JrX2lwcyI6WyIxNzIuMzEuMzMuMzMiXSwiZ3B1X2lkcyI6WyJHUFUtMjYzMzFh
MDctMTc1Ni1mMDY2LTFlNWEtMzc1M2Y1ZTViYzc4IiwiR1BVLWE0MWFhMTg3LTQ1NmQtNjBiMy04
ZmM5LWI4YjMzZWFlMjFiYyIsIkdQVS01NWRlZjYxOS0wMTE2LWViZjctMzMwMy03ZDkzMmRmYzcw
ZmYiLCJHUFUtNGM5YWYzODgtYjlmYi03MWQ5LWZiNDUtODMwYTM4MTIwMGQzIl0sImFjY2VwdHNf
ZmFpbG92ZXIiOiJmYWxzZSIsImhvc3Rfcm9sZSI6ImZvbGxvd2VyIiwiaG9zdF90ZXJtIjowLCJo
b3N0X2VsZWN0aW9uX3N0YXR1cyI6ImxlYWRlcl9lbGVjdGVkIn0seyJ2ZXJzaW9uIjoxMTMsImhv
c3RfbnVtYmVyIjo0LCJpZCI6IjE3Mi4zMS4zMy4zNCIsImhvc3RuYW1lIjoiMzAwLTMwMy11MzQt
djEwMCIsInN0YXR1cyI6InJ1bm5pbmciLCJodHRwZF9zdGF0dXMiOiJkaXNhYmxlZCIsInN0YXRz
X3N0YXR1cyI6InJ1bm5pbmciLCJtbF9zdGF0dXMiOiJkaXNhYmxlZCIsInF1ZXJ5X3BsYW5uZXJf
c3RhdHVzIjoic3RvcHBlZCIsInJldmVhbF9zdGF0dXMiOiJzdG9wcGVkIiwidG90YWxfbWVtb3J5
Ijo4MTAyMDEyMTA4ODAsInN0YXJ0X3RpbWUiOjE3MDgxMDM0NTIsIm5ldHdvcmtfaXBzIjpbIjE3
Mi4zMS4zMy4zNCJdLCJncHVfaWRzIjpbIkdQVS02OGUwOWNmYy1mOWE0LTJhMTQtNTdhNC05NDgz
YjkxYzJkOWEiLCJHUFUtM2RmMjE2ZTgtZmU3NC0wNDdhLTk1YWMtNzJlMmNiZWNiNTIyIiwiR1BV
LTE0ZDQ0Yjk4LWIwNDItY2I4MS0xZGQzLTIwZDRmNjljODljYSIsIkdQVS0zNDA2NzMwYi1iZWFk
LWM1MGEtNDZlYi1lMGEyYzJiZjZlNzYiXSwiYWNjZXB0c19mYWlsb3ZlciI6ImZhbHNlIiwiaG9z
dF9yb2xlIjoiZm9sbG93ZXIiLCJob3N0X3Rlcm0iOjAsImhvc3RfZWxlY3Rpb25fc3RhdHVzIjoi
bGVhZGVyX2VsZWN0ZWQifSx7InZlcnNpb24iOjExMywiaG9zdF9udW1iZXIiOjUsImlkIjoiMTcy
LjMxLjMzLjM1IiwiaG9zdG5hbWUiOiIzMDAtMzAzLXUzNS12MTAwIiwic3RhdHVzIjoicnVubmlu
ZyIsImh0dHBkX3N0YXR1cyI6ImRpc2FibGVkIiwic3RhdHNfc3RhdHVzIjoicnVubmluZyIsIm1s
X3N0YXR1cyI6ImRpc2FibGVkIiwicXVlcnlfcGxhbm5lcl9zdGF0dXMiOiJzdG9wcGVkIiwicmV2
ZWFsX3N0YXR1cyI6InN0b3BwZWQiLCJ0b3RhbF9tZW1vcnkiOjgxMDIwMTI0Nzc0NCwic3RhcnRf
dGltZSI6MTcwODEwMzQ1MiwibmV0d29ya19pcHMiOlsiMTcyLjMxLjMzLjM1Il0sImdwdV9pZHMi
OlsiR1BVLWY3MzAyYWJjLTllYWEtMTRjOS1mNDI2LTE2M2RmM2RhOGMyNiIsIkdQVS03N2RkN2Q0
OC1mOTgwLWZkMDYtNzIyYy0xYzViOTMyMTgyMDMiLCJHUFUtNTUyYjIwYTUtNTdlNi00OTg2LWJl
MmItMmIzNzhmZDRiY2FhIiwiR1BVLTYzMDUzMTYyLTMwN2YtNTVjNS1hOTc0LTU4ZGZlODQzNDJi
MiJdLCJhY2NlcHRzX2ZhaWxvdmVyIjoiZmFsc2UiLCJob3N0X3JvbGUiOiJmb2xsb3dlciIsImhv
c3RfdGVybSI6MCwiaG9zdF9lbGVjdGlvbl9zdGF0dXMiOiJsZWFkZXJfZWxlY3RlZCJ9LHsidmVy
c2lvbiI6MTEzLCJob3N0X251bWJlciI6NiwiaWQiOiIxNzIuMzEuMzMuMzYiLCJob3N0bmFtZSI6
IjMwMC0zMDMtdTM2LXYxMDAiLCJzdGF0dXMiOiJydW5uaW5nIiwiaHR0cGRfc3RhdHVzIjoiZGlz
YWJsZWQiLCJzdGF0c19zdGF0dXMiOiJydW5uaW5nIiwibWxfc3RhdHVzIjoiZGlzYWJsZWQiLCJx
dWVyeV9wbGFubmVyX3N0YXR1cyI6InN0b3BwZWQiLCJyZXZlYWxfc3RhdHVzIjoic3RvcHBlZCIs
InRvdGFsX21lbW9yeSI6ODEwMjAxMjIzMTY4LCJzdGFydF90aW1lIjoxNzA4MTAzNDUyLCJuZXR3
b3JrX2lwcyI6WyIxNzIuMzEuMzMuMzYiXSwiZ3B1X2lkcyI6WyJHUFUtOGU5ZDRhZGItYzYxMS04
MmYwLTNmZTQtZjFmMmMzZTZhNDRmIiwiR1BVLTRjNmFiYWM5LTc0ZWMtZjc4Yy1mYWE5LTQ3NDZk
MDA1N2FiOCIsIkdQVS05NjI1YTc4Yy1lMmFlLTdmMDktOWNjZi1lZTA0OTk1MjYzMTAiLCJHUFUt
YWI3NTRhM2MtNjA4Ni1iYjUxLWU5NGEtM2NmNGExNDkwNWJhIl0sImFjY2VwdHNfZmFpbG92ZXIi
OiJmYWxzZSIsImhvc3Rfcm9sZSI6ImZvbGxvd2VyIiwiaG9zdF90ZXJtIjowLCJob3N0X2VsZWN0
aW9uX3N0YXR1cyI6ImxlYWRlcl9lbGVjdGVkIn0seyJ2ZXJzaW9uIjoxMTMsImhvc3RfbnVtYmVy
Ijo3LCJpZCI6IjE3Mi4zMS4zMy4zNyIsImhvc3RuYW1lIjoiMzAwLTMwMy11MzctdjEwMCIsInN0
YXR1cyI6InJ1bm5pbmciLCJodHRwZF9zdGF0dXMiOiJkaXNhYmxlZCIsInN0YXRzX3N0YXR1cyI6
InJ1bm5pbmciLCJtbF9zdGF0dXMiOiJkaXNhYmxlZCIsInF1ZXJ5X3BsYW5uZXJfc3RhdHVzIjoi
c3RvcHBlZCIsInJldmVhbF9zdGF0dXMiOiJzdG9wcGVkIiwidG90YWxfbWVtb3J5Ijo4MTAyMDEy
MzU0NTYsInN0YXJ0X3RpbWUiOjE3MDgxMDM0NTIsIm5ldHdvcmtfaXBzIjpbIjE3Mi4zMS4zMy4z
NyJdLCJncHVfaWRzIjpbIkdQVS1lYTE4ZDM4OC1lMmIzLTQyODMtYTZiNS1hMzIzYTQ4NzI1YTki
LCJHUFUtZGNkODRlZmItOTRjYS1iNDk3LThjMTUtN2EzMjY5NDBjMWViIiwiR1BVLTkxNjAxYTVl
LTk3M2YtZDFlNC02ZTFkLWY2NTUyOTRkMzQ2MCIsIkdQVS0wN2ZhNGRiNi05ZTU1LWI2MWYtOTA5
YS04NWM0ZDFiZWIwODgiXSwiYWNjZXB0c19mYWlsb3ZlciI6ImZhbHNlIiwiaG9zdF9yb2xlIjoi
Zm9sbG93ZXIiLCJob3N0X3Rlcm0iOjAsImhvc3RfZWxlY3Rpb25fc3RhdHVzIjoibGVhZGVyX2Vs
ZWN0ZWQifV19Fmh0dHBfc2VydmVyoAN7ImNvbm5lY3Rpb25zIjp7ImN1cnJlbnQiOjEsIm1heF9j
b25jdXJyZW50IjoxMTYsInF1ZXVlZCI6MCwibWF4X3F1ZXVlZF9hbGxvd2VkIjo2NTUzNiwidG90
YWwiOjMyNTg5MywicmVmdXNlZCI6MCwidGhyZWFkcyI6Mn0sInRocmVhZHMiOnsidXNlZCI6Miwi
Y2FwYWNpdHkiOjUxMiwiYWxsb2NhdGVkIjo4LCJhdmFpbGFibGUiOjUxMCwic3RhY2tfc2l6ZSI6
MH19FG1pZ3JhdGlvbnMueyJjb3VudCI6MCwic3RhdHVzIjpbXX0KcmFua3PgUnsiY291bnQiOjks
InN0YXR1cyI6W3sidmVyc2lvbiI6MTE4LCJyYW5rIjowLCJyYW5rX2lkIjoiMCA6IDE3Mi4zMS4z
My4zMCA6IDMyNzc0MDYiLCJyYW5rX21vZGUiOiJydW4iLCJyYW5rX3N0YXR1cyI6InJ1bm5pbmci
LCJyZW1vdmFsX3N0YXR1cyI6Im5vbmUiLCJhcHBfdmVyc2lvbiI6IjcuMi4wLjEuMjAyNDAyMTQy
MTA5MDYiLCJwaWQiOjMyNzc0MDYsInN0YXJ0X3RpbWUiOjE3MDg0NDM2NTYsInN0YXJ0X3RpbWVf
c3RyIjoiVHVlIEZlYiAyMCAxNTo0MDo1NiAyMDI0Iiwic3RhcnRfY291bnQiOjEsImFjY2VwdGlu
Z19qb2JzIjp0cnVlLCJuZXdfcmFuayI6ZmFsc2UsInJlYWRfb25seSI6ZmFsc2UsImhvc3RuYW1l
IjoiMzAwLTMwMy11MzAtdjEwMCIsImhvc3RfaWQiOiIxNzIuMzEuMzMuMzAiLCJncHVfaWRzIjpb
IkdQVS1hMDdjMGU0OC05NDhhLTQwNWMtNDNiNy03Mzc0ZDJkZGMwOGMiXSwiZ3B1X2luZGV4Ijow
LCJncHVfaW5kaWNlcyI6WzBdfSx7InZlcnNpb24iOjEwMiwicmFuayI6MSwicmFua19pZCI6IjEg
OiAxNzIuMzEuMzMuMzAgOiAzMjc4NTYwIiwicmFua19tb2RlIjoicnVuIiwicmFua19zdGF0dXMi
OiJydW5uaW5nIiwicmVtb3ZhbF9zdGF0dXMiOiJub25lIiwiYXBwX3ZlcnNpb24iOiI3LjIuMC4x
LjIwMjQwMjE0MjEwOTA2IiwicGlkIjozMjc4NTYwLCJzdGFydF90aW1lIjoxNzA4NDQzNjU2LCJz
dGFydF90aW1lX3N0ciI6IlR1ZSBGZWIgMjAgMTU6NDA6NTYgMjAyNCIsInN0YXJ0X2NvdW50Ijox
LCJhY2NlcHRpbmdfam9icyI6dHJ1ZSwibmV3X3JhbmsiOmZhbHNlLCJyZWFkX29ubHkiOmZhbHNl
LCJob3N0bmFtZSI6IjMwMC0zMDMtdTMwLXYxMDAiLCJob3N0X2lkIjoiMTcyLjMxLjMzLjMwIiwi
Z3B1X2lkcyI6WyJHUFUtYTA3YzBlNDgtOTQ4YS00MDVjLTQzYjctNzM3NGQyZGRjMDhjIiwiR1BV
LTUzZTFiN2I0LTY4YjgtYTI4Yy0xMGYzLWRiNmEwNTk3ZmJiNCIsIkdQVS00MWZmOTFmYi1lY2I5
LTBhNWItZDQ3Yy1kNWJiN2VmMTAzOGEiLCJHUFUtYTk0MGIxMjYtYjFhNC05ZjA3LTQ0ZTEtNjM2
YzM3OTg5ZWNmIl0sImdwdV9pbmRleCI6MCwiZ3B1X2luZGljZXMiOlswLDEsMiwzXX0seyJ2ZXJz
aW9uIjoxMDIsInJhbmsiOjIsInJhbmtfaWQiOiIyIDogMTcyLjMxLjMzLjMxIDogMTU0NDUxOSIs
InJhbmtfbW9kZSI6InJ1biIsInJhbmtfc3RhdHVzIjoicnVubmluZyIsInJlbW92YWxfc3RhdHVz
Ijoibm9uZSIsImFwcF92ZXJzaW9uIjoiNy4yLjAuMS4yMDI0MDIxNDIxMDkwNiIsInBpZCI6MTU0
NDUxOSwic3RhcnRfdGltZSI6MTcwODQ0MzY1Niwic3RhcnRfdGltZV9zdHIiOiJUdWUgRmViIDIw
IDE1OjQwOjU2IDIwMjQiLCJzdGFydF9jb3VudCI6MSwiYWNjZXB0aW5nX2pvYnMiOnRydWUsIm5l
d19yYW5rIjpmYWxzZSwicmVhZF9vbmx5IjpmYWxzZSwiaG9zdG5hbWUiOiIzMDAtMzAzLXUzMS12
MTAwIiwiaG9zdF9pZCI6IjE3Mi4zMS4zMy4zMSIsImdwdV9pZHMiOlsiR1BVLTEzMmYxZGRiLWE1
ZjQtMjBkMy0xMTIyLTZkMzU5OWFmY2YxZCIsIkdQVS0zYmM3YmZjZC05NWE5LWE2MjYtOGU4Zi0z
N2I5NzE5YWE5ZGQiLCJHUFUtZmUxMGFkM2QtODgzZC1hNTkwLWQ0MDUtZTBhNTY5M2IwYWYwIiwi
R1BVLTZiODI5Njk5LTBiNGMtMTJmMC0wYzI4LThjZjQ2YzI0YzE4NSJdLCJncHVfaW5kZXgiOjAs
ImdwdV9pbmRpY2VzIjpbMCwxLDIsM119LHsidmVyc2lvbiI6MTAwLCJyYW5rIjozLCJyYW5rX2lk
IjoiMyA6IDE3Mi4zMS4zMy4zMiA6IDE0Mjk2ODUiLCJyYW5rX21vZGUiOiJydW4iLCJyYW5rX3N0
YXR1cyI6InJ1bm5pbmciLCJyZW1vdmFsX3N0YXR1cyI6Im5vbmUiLCJhcHBfdmVyc2lvbiI6Ijcu
Mi4wLjEuMjAyNDAyMTQyMTA5MDYiLCJwaWQiOjE0Mjk2ODUsInN0YXJ0X3RpbWUiOjE3MDg0NDM2
NTYsInN0YXJ0X3RpbWVfc3RyIjoiVHVlIEZlYiAyMCAxNTo0MDo1NiAyMDI0Iiwic3RhcnRfY291
bnQiOjEsImFjY2VwdGluZ19qb2JzIjp0cnVlLCJuZXdfcmFuayI6ZmFsc2UsInJlYWRfb25seSI6
ZmFsc2UsImhvc3RuYW1lIjoiMzAwLTMwMy11MzItdjEwMCIsImhvc3RfaWQiOiIxNzIuMzEuMzMu
MzIiLCJncHVfaWRzIjpbIkdQVS0wOTBjNDQzMC1kYmJlLTFlMWEtN2Y3Zi1hMTgyNzgzYWQyMzEi
LCJHUFUtYmUxNGI1Y2UtYjQxMS00OGNhLWE5ZmYtNWEwNmM3YTZmMzk4IiwiR1BVLTRiMWE1Njg4
LTBlOGMtY2M5NC04MzNiLWMyZjM5ZTk5NTNiOCIsIkdQVS0xMmViYTQ2My04MzJlLTEwOGItZWNi
Mi01YzlhZjhkYTYxNjQiXSwiZ3B1X2luZGV4IjowLCJncHVfaW5kaWNlcyI6WzAsMSwyLDNdfSx7
InZlcnNpb24iOjExNiwicmFuayI6NCwicmFua19pZCI6IjQgOiAxNzIuMzEuMzMuMzMgOiAxNjA4
NzYxIiwicmFua19tb2RlIjoicnVuIiwicmFua19zdGF0dXMiOiJydW5uaW5nIiwicmVtb3ZhbF9z
dGF0dXMiOiJub25lIiwiYXBwX3ZlcnNpb24iOiI3LjIuMC4xLjIwMjQwMjE0MjEwOTA2IiwicGlk
IjoxNjA4NzYxLCJzdGFydF90aW1lIjoxNzA4NDQzNjU1LCJzdGFydF90aW1lX3N0ciI6IlR1ZSBG
ZWIgMjAgMTU6NDA6NTUgMjAyNCIsInN0YXJ0X2NvdW50IjoxLCJhY2NlcHRpbmdfam9icyI6dHJ1
ZSwibmV3X3JhbmsiOmZhbHNlLCJyZWFkX29ubHkiOmZhbHNlLCJob3N0bmFtZSI6IjMwMC0zMDMt
dTMzLXYxMDAiLCJob3N0X2lkIjoiMTcyLjMxLjMzLjMzIiwiZ3B1X2lkcyI6WyJHUFUtMjYzMzFh
MDctMTc1Ni1mMDY2LTFlNWEtMzc1M2Y1ZTViYzc4IiwiR1BVLWE0MWFhMTg3LTQ1NmQtNjBiMy04
ZmM5LWI4YjMzZWFlMjFiYyIsIkdQVS01NWRlZjYxOS0wMTE2LWViZjctMzMwMy03ZDkzMmRmYzcw
ZmYiLCJHUFUtNGM5YWYzODgtYjlmYi03MWQ5LWZiNDUtODMwYTM4MTIwMGQzIl0sImdwdV9pbmRl
eCI6MCwiZ3B1X2luZGljZXMiOlswLDEsMiwzXX0seyJ2ZXJzaW9uIjoxMDIsInJhbmsiOjUsInJh
bmtfaWQiOiI1IDogMTcyLjMxLjMzLjM0IDogMTY2MDEwMiIsInJhbmtfbW9kZSI6InJ1biIsInJh
bmtfc3RhdHVzIjoicnVubmluZyIsInJlbW92YWxfc3RhdHVzIjoibm9uZSIsImFwcF92ZXJzaW9u
IjoiNy4yLjAuMS4yMDI0MDIxNDIxMDkwNiIsInBpZCI6MTY2MDEwMiwic3RhcnRfdGltZSI6MTcw
ODQ0MzY1Niwic3RhcnRfdGltZV9zdHIiOiJUdWUgRmViIDIwIDE1OjQwOjU2IDIwMjQiLCJzdGFy
dF9jb3VudCI6MSwiYWNjZXB0aW5nX2pvYnMiOnRydWUsIm5ld19yYW5rIjpmYWxzZSwicmVhZF9v
bmx5IjpmYWxzZSwiaG9zdG5hbWUiOiIzMDAtMzAzLXUzNC12MTAwIiwiaG9zdF9pZCI6IjE3Mi4z
MS4zMy4zNCIsImdwdV9pZHMiOlsiR1BVLTY4ZTA5Y2ZjLWY5YTQtMmExNC01N2E0LTk0ODNiOTFj
MmQ5YSIsIkdQVS0zZGYyMTZlOC1mZTc0LTA0N2EtOTVhYy03MmUyY2JlY2I1MjIiLCJHUFUtMTRk
NDRiOTgtYjA0Mi1jYjgxLTFkZDMtMjBkNGY2OWM4OWNhIiwiR1BVLTM0MDY3MzBiLWJlYWQtYzUw
YS00NmViLWUwYTJjMmJmNmU3NiJdLCJncHVfaW5kZXgiOjAsImdwdV9pbmRpY2VzIjpbMCwxLDIs
M119LHsidmVyc2lvbiI6MTAyLCJyYW5rIjo2LCJyYW5rX2lkIjoiNiA6IDE3Mi4zMS4zMy4zNSA6
IDEzNzk3MTUiLCJyYW5rX21vZGUiOiJydW4iLCJyYW5rX3N0YXR1cyI6InJ1bm5pbmciLCJyZW1v
dmFsX3N0YXR1cyI6Im5vbmUiLCJhcHBfdmVyc2lvbiI6IjcuMi4wLjEuMjAyNDAyMTQyMTA5MDYi
LCJwaWQiOjEzNzk3MTUsInN0YXJ0X3RpbWUiOjE3MDg0NDM2NTYsInN0YXJ0X3RpbWVfc3RyIjoi
VHVlIEZlYiAyMCAxNTo0MDo1NiAyMDI0Iiwic3RhcnRfY291bnQiOjEsImFjY2VwdGluZ19qb2Jz
Ijp0cnVlLCJuZXdfcmFuayI6ZmFsc2UsInJlYWRfb25seSI6ZmFsc2UsImhvc3RuYW1lIjoiMzAw
LTMwMy11MzUtdjEwMCIsImhvc3RfaWQiOiIxNzIuMzEuMzMuMzUiLCJncHVfaWRzIjpbIkdQVS1m
NzMwMmFiYy05ZWFhLTE0YzktZjQyNi0xNjNkZjNkYThjMjYiLCJHUFUtNzdkZDdkNDgtZjk4MC1m
ZDA2LTcyMmMtMWM1YjkzMjE4MjAzIiwiR1BVLTU1MmIyMGE1LTU3ZTYtNDk4Ni1iZTJiLTJiMzc4
ZmQ0YmNhYSIsIkdQVS02MzA1MzE2Mi0zMDdmLTU1YzUtYTk3NC01OGRmZTg0MzQyYjIiXSwiZ3B1
X2luZGV4IjowLCJncHVfaW5kaWNlcyI6WzAsMSwyLDNdfSx7InZlcnNpb24iOjEwMiwicmFuayI6
NywicmFua19pZCI6IjcgOiAxNzIuMzEuMzMuMzYgOiAxMzc5NTM5IiwicmFua19tb2RlIjoicnVu
IiwicmFua19zdGF0dXMiOiJydW5uaW5nIiwicmVtb3ZhbF9zdGF0dXMiOiJub25lIiwiYXBwX3Zl
cnNpb24iOiI3LjIuMC4xLjIwMjQwMjE0MjEwOTA2IiwicGlkIjoxMzc5NTM5LCJzdGFydF90aW1l
IjoxNzA4NDQzNjU2LCJzdGFydF90aW1lX3N0ciI6IlR1ZSBGZWIgMjAgMTU6NDA6NTYgMjAyNCIs
InN0YXJ0X2NvdW50IjoxLCJhY2NlcHRpbmdfam9icyI6dHJ1ZSwibmV3X3JhbmsiOmZhbHNlLCJy
ZWFkX29ubHkiOmZhbHNlLCJob3N0bmFtZSI6IjMwMC0zMDMtdTM2LXYxMDAiLCJob3N0X2lkIjoi
MTcyLjMxLjMzLjM2IiwiZ3B1X2lkcyI6WyJHUFUtOGU5ZDRhZGItYzYxMS04MmYwLTNmZTQtZjFm
MmMzZTZhNDRmIiwiR1BVLTRjNmFiYWM5LTc0ZWMtZjc4Yy1mYWE5LTQ3NDZkMDA1N2FiOCIsIkdQ
VS05NjI1YTc4Yy1lMmFlLTdmMDktOWNjZi1lZTA0OTk1MjYzMTAiLCJHUFUtYWI3NTRhM2MtNjA4
Ni1iYjUxLWU5NGEtM2NmNGExNDkwNWJhIl0sImdwdV9pbmRleCI6MCwiZ3B1X2luZGljZXMiOlsw
LDEsMiwzXX0seyJ2ZXJzaW9uIjoxMDIsInJhbmsiOjgsInJhbmtfaWQiOiI4IDogMTcyLjMxLjMz
LjM3IDogMTM5Nzg5MyIsInJhbmtfbW9kZSI6InJ1biIsInJhbmtfc3RhdHVzIjoicnVubmluZyIs
InJlbW92YWxfc3RhdHVzIjoibm9uZSIsImFwcF92ZXJzaW9uIjoiNy4yLjAuMS4yMDI0MDIxNDIx
MDkwNiIsInBpZCI6MTM5Nzg5Mywic3RhcnRfdGltZSI6MTcwODQ0MzY1Niwic3RhcnRfdGltZV9z
dHIiOiJUdWUgRmViIDIwIDE1OjQwOjU2IDIwMjQiLCJzdGFydF9jb3VudCI6MSwiYWNjZXB0aW5n
X2pvYnMiOnRydWUsIm5ld19yYW5rIjpmYWxzZSwicmVhZF9vbmx5IjpmYWxzZSwiaG9zdG5hbWUi
OiIzMDAtMzAzLXUzNy12MTAwIiwiaG9zdF9pZCI6IjE3Mi4zMS4zMy4zNyIsImdwdV9pZHMiOlsi
R1BVLWVhMThkMzg4LWUyYjMtNDI4My1hNmI1LWEzMjNhNDg3MjVhOSIsIkdQVS1kY2Q4NGVmYi05
NGNhLWI0OTctOGMxNS03YTMyNjk0MGMxZWIiLCJHUFUtOTE2MDFhNWUtOTczZi1kMWU0LTZlMWQt
ZjY1NTI5NGQzNDYwIiwiR1BVLTA3ZmE0ZGI2LTllNTUtYjYxZi05MDlhLTg1YzRkMWJlYjA4OCJd
LCJncHVfaW5kZXgiOjAsImdwdV9pbmRpY2VzIjpbMCwxLDIsM119XX0Oc3ltYm9scxZ7ImNvdW50
IjoxfQxzeXN0ZW2qA3siaWQiOiJLaW5ldGljYSAzMDAtMzAzLXUzMC12MTAwIiwic3RhcnRfdGlt
ZSI6MTcwODEwMzQ1Miwic3RhdHVzIjoicnVubmluZyIsImNsdXN0ZXJfbGVhZGVyIjoiMTcyLjMx
LjMzLjMwIiwidmVyc2lvbiI6MjUsImNsdXN0ZXJfb3BlcmF0aW9uX3J1bm5pbmciOiJmYWxzZSIs
ImNsdXN0ZXJfb3BlcmF0aW9uX3N0YXR1cyI6IiIsIm9mZmxpbmVfc3RhdHVzIjoiZmFsc2UifQh0
ZXh03AV7ImNvdW50Ijo4LCJzdGF0dXMiOlt7InZlcnNpb24iOjMzLCJyYW5rIjoxLCJzdGF0dXMi
OiJydW5uaW5nIn0seyJ2ZXJzaW9uIjozMywicmFuayI6Miwic3RhdHVzIjoicnVubmluZyJ9LHsi
dmVyc2lvbiI6MzMsInJhbmsiOjMsInN0YXR1cyI6InJ1bm5pbmcifSx7InZlcnNpb24iOjMzLCJy
YW5rIjo0LCJzdGF0dXMiOiJydW5uaW5nIn0seyJ2ZXJzaW9uIjozMywicmFuayI6NSwic3RhdHVz
IjoicnVubmluZyJ9LHsidmVyc2lvbiI6MzMsInJhbmsiOjYsInN0YXR1cyI6InJ1bm5pbmcifSx7
InZlcnNpb24iOjMzLCJyYW5rIjo3LCJzdGF0dXMiOiJydW5uaW5nIn0seyJ2ZXJzaW9uIjozMywi
cmFuayI6OCwic3RhdHVzIjoicnVubmluZyJ9XX0QdHJpZ2dlcnNeeyJ0b3RhbF9jb3VudCI6MCwi
cmFuZ2VfY291bnQiOjAsIm5haV9jb3VudCI6MH0AAAA=
headers:
Access-Control-Allow-Origin:
- '*'
Access-Control-Expose-Headers:
- x-request-time-secs
Connection:
- Close
Content-Type:
- application/octet-stream
Date:
- Thu, 22 Feb 2024 00:26:36 GMT
Strict-Transport-Security:
- max-age=31536000; includeSubDomains
X-Content-Type-Options:
- nosniff
X-Frame-Options:
- SAMEORIGIN
X-XSS-Protection:
- 1; mode=block
x-request-time-secs:
- '0.00073'
status:
code: 200
message: OK
- request:
body: "\0"
headers:
Accept:
- application/octet-stream
Content-type:
- application/octet-stream
authorization:
- DUMMY
method: POST
uri: http://172.31.33.30:9191/show/system/properties
response:
body:
string: !!binary |
BE9LAD5zaG93X3N5c3RlbV9wcm9wZXJ0aWVzX3Jlc3BvbnNlvPUCpAk8Y29uZi5haS5hcGkuY29u
bmVjdGlvbl90aW1lb3V0BDkwHmNvbmYuYWkuYXBpLmtleQAoY29uZi5haS5hcGkucHJvdmlkZXIW
a2luZXRpY2FsbG0eY29uZi5haS5hcGkudXJsSGh0dHA6Ly8xNzIuMzEuMzEuMTM6ODA1MC9zcWwv
c3VnZ2VzdDBjb25mLmFsZXJ0X2Rpc2tfYWJzb2x1dGUANGNvbmYuYWxlcnRfZGlza19wZXJjZW50
YWdlGDEsIDUsIDEwLCAyMBxjb25mLmFsZXJ0X2V4ZQAsY29uZi5hbGVydF9ob3N0X3N0YXR1cwhU
UlVFOmNvbmYuYWxlcnRfaG9zdF9zdGF0dXNfZmlsdGVyIGZhdGFsX2luaXRfZXJyb3I4Y29uZi5h
bGVydF9tYXhfc3RvcmVkX2FsZXJ0cwYxMDA0Y29uZi5hbGVydF9tZW1vcnlfYWJzb2x1dGUAOGNv
bmYuYWxlcnRfbWVtb3J5X3BlcmNlbnRhZ2UYMSwgNSwgMTAsIDIwNGNvbmYuYWxlcnRfcmFua19j
dWRhX2Vycm9yCFRSVUVEY29uZi5hbGVydF9yYW5rX2ZhbGxiYWNrX2FsbG9jYXRvcghUUlVFLGNv
bmYuYWxlcnRfcmFua19zdGF0dXMIVFJVRTpjb25mLmFsZXJ0X3Jhbmtfc3RhdHVzX2ZpbHRlclhm
YXRhbF9pbml0X2Vycm9yLCBub3RfcmVzcG9uZGluZywgdGVybWluYXRlZB5jb25mLmF1ZGl0X2Jv
ZHkKRkFMU0UeY29uZi5hdWRpdF9kYXRhCkZBTFNFJGNvbmYuYXVkaXRfaGVhZGVycwpGQUxTRT5j
b25mLmF1dG9fY3JlYXRlX2V4dGVybmFsX3VzZXJzCkZBTFNFTGNvbmYuYnVpbGRfbWF0ZXJpYWxp
emVkX3ZpZXdzX29uX3N0YXJ0Em9uX2RlbWFuZDhjb25mLmJ1aWxkX3BrX2luZGV4X29uX3N0YXJ0
Em9uX2RlbWFuZDhjb25mLmNodW5rX2NvbHVtbl9tYXhfbWVtb3J5EjUxMjAwMDAwMCpjb25mLmNo
dW5rX21heF9tZW1vcnkUODE5MjAwMDAwMB5jb25mLmNodW5rX3NpemUOODAwMDAwMCJjb25mLmNs
dXN0ZXJfbmFtZRxwcm9kdWN0aW9uLW5ld0Bjb25mLmNvbmN1cnJlbnRfa2VybmVsX2V4ZWN1dGlv
bghUUlVFOmNvbmYuZGVmYXVsdF9wcmltYXJ5X2tleV90eXBlDG1lbW9yeSBjb25mLmRlZmF1bHRf
dHRsBDIwLGNvbmYuZGlzYWJsZV9jbGVhcl9hbGwIVFJVRT5jb25mLmVncmVzc19wYXJxdWV0X2Nv
bXByZXNzaW9uDHNuYXBweSRjb25mLmVuYWJsZV9hbGVydHMIVFJVRSJjb25mLmVuYWJsZV9hdWRp
dApGQUxTRTJjb25mLmVuYWJsZV9hdXRob3JpemF0aW9uCFRSVUVGY29uZi5lbmFibGVfZXh0ZXJu
YWxfYXV0aGVudGljYXRpb24KRkFMU0UwY29uZi5lbmFibGVfZ3JhcGhfc2VydmVyCFRSVUUcY29u
Zi5lbmFibGVfaGEKRkFMU0UuY29uZi5lbmFibGVfaHR0cGRfcHJveHkKRkFMU0UcY29uZi5lbmFi
bGVfbWwKRkFMU0U2Y29uZi5lbmFibGVfb3BlbmdsX3JlbmRlcmVyCFRSVUVAY29uZi5lbmFibGVf
b3ZlcmxhcHBlZF9lcXVpX2pvaW4IVFJVRTRjb25mLmVuYWJsZV9wb3N0Z3Jlc19wcm94eQhUUlVF
PmNvbmYuZW5hYmxlX3ByZWRpY2F0ZV9lcXVpX2pvaW4IVFJVRSJjb25mLmVuYWJsZV9wcm9jcwhU
UlVFJGNvbmYuZW5hYmxlX3JldmVhbAhUUlVFMGNvbmYuZW5hYmxlX3N0YXRzX3NlcnZlcghUUlVF
LmNvbmYuZW5hYmxlX3RleHRfc2VhcmNoCFRSVUU8Y29uZi5lbmFibGVfdmVjdG9ydGlsZV9zZXJ2
aWNlCFRSVUUsY29uZi5lbmFibGVfdnJhbV9jYWNoZQhUUlVFPmNvbmYuZW5hYmxlX3dvcmtlcl9o
dHRwX3NlcnZlcnMIVFJVRTJjb25mLmV2ZW50X3NlcnZlcl9hZGRyZXNzGDE3Mi4zMS4zMy4zMDRj
b25mLmV2ZW50X3NlcnZlcl9pbnRlcm5hbApGQUxTRTpjb25mLmV4dGVybmFsX2ZpbGVzX2RpcmVj
dG9yeSAvbmZzL2RhdGEvcHVibGljFGNvbmYuZ21faXAYMTcyLjMxLjMzLjMwGmNvbmYuZ21fcG9y
dDEINTU1MiBjb25mLmdtX3B1Yl9wb3J0CDU1NTMoY29uZi5ncmFwaC5oZWFkX3BvcnQIODEwMC5j
b25mLmdyYXBoLnNlcnZlcjAuaG9zdApob3N0MC5jb25mLmdyYXBoLnNlcnZlcjAucG9ydAg4MTAx
OGNvbmYuZ3JhcGguc2VydmVyMC5yYW1fbGltaXQCMC5jb25mLmdyYXBoLnNlcnZlcjEuaG9zdApo
b3N0MS5jb25mLmdyYXBoLnNlcnZlcjEucG9ydAg4MTAyOGNvbmYuZ3JhcGguc2VydmVyMS5yYW1f
bGltaXQCMC5jb25mLmdyYXBoLnNlcnZlcjIuaG9zdApob3N0Mi5jb25mLmdyYXBoLnNlcnZlcjIu
cG9ydAg4MTAzOGNvbmYuZ3JhcGguc2VydmVyMi5yYW1fbGltaXQCMC5jb25mLmdyYXBoLnNlcnZl
cjMuaG9zdApob3N0My5jb25mLmdyYXBoLnNlcnZlcjMucG9ydAg4MTA0OGNvbmYuZ3JhcGguc2Vy
dmVyMy5yYW1fbGltaXQCMC5jb25mLmdyYXBoLnNlcnZlcjQuaG9zdApob3N0NC5jb25mLmdyYXBo
LnNlcnZlcjQucG9ydAg4MTA1OGNvbmYuZ3JhcGguc2VydmVyNC5yYW1fbGltaXQCMC5jb25mLmdy
YXBoLnNlcnZlcjUuaG9zdApob3N0NS5jb25mLmdyYXBoLnNlcnZlcjUucG9ydAg4MTA2OGNvbmYu
Z3JhcGguc2VydmVyNS5yYW1fbGltaXQCMC5jb25mLmdyYXBoLnNlcnZlcjYuaG9zdApob3N0Ni5j
b25mLmdyYXBoLnNlcnZlcjYucG9ydAg4MTA3OGNvbmYuZ3JhcGguc2VydmVyNi5yYW1fbGltaXQC
MC5jb25mLmdyYXBoLnNlcnZlcjcuaG9zdApob3N0Ny5jb25mLmdyYXBoLnNlcnZlcjcucG9ydAg4
MTA4OGNvbmYuZ3JhcGguc2VydmVyNy5yYW1fbGltaXQCMBxjb25mLmhhX3F1ZXVlcwAuY29uZi5o
YV9yaW5nX2hlYWRfbm9kZXMAKGNvbmYuaGVhZF9pcF9hZGRyZXNzGDE3Mi4zMS4zMy4zMBxjb25m
LmhlYWRfcG9ydAg5MTkxImNvbmYuaG1faHR0cF9wb3J0CDkzMDA2Y29uZi5ob3N0MF9hY2NlcHRz
X2ZhaWxvdmVyCkZBTFNFJGNvbmYuaG9zdDBfYWRkcmVzcxgxNzIuMzEuMzMuMzAeY29uZi5ob3N0
MF9ncHVzDjAsMSwyLDNEY29uZi5ob3N0MF9ob3N0X21hbmFnZXJfcHVibGljX3VybDBodHRwOi8v
MTcyLjMxLjMzLjMwOjkzMDAsY29uZi5ob3N0MF9wcml2YXRlX3VybCZodHRwOi8vMTcyLjMxLjMz
LjMwMmNvbmYuaG9zdDBfcHVibGljX2FkZHJlc3MYMTcyLjMxLjMzLjMwLGNvbmYuaG9zdDBfcHVi
bGljX3VybHMmaHR0cDovLzE3Mi4zMS4zMy4zMChjb25mLmhvc3QwX3JhbV9saW1pdBg2NTY3Njc3
OTk5OTk2Y29uZi5ob3N0MV9hY2NlcHRzX2ZhaWxvdmVyCkZBTFNFJGNvbmYuaG9zdDFfYWRkcmVz
cxgxNzIuMzEuMzMuMzEeY29uZi5ob3N0MV9ncHVzDjAsMSwyLDNEY29uZi5ob3N0MV9ob3N0X21h
bmFnZXJfcHVibGljX3VybDBodHRwOi8vMTcyLjMxLjMzLjMxOjkzMDAsY29uZi5ob3N0MV9wcml2
YXRlX3VybCZodHRwOi8vMTcyLjMxLjMzLjMxMmNvbmYuaG9zdDFfcHVibGljX2FkZHJlc3MYMTcy
LjMxLjMzLjMxLGNvbmYuaG9zdDFfcHVibGljX3VybHMmaHR0cDovLzE3Mi4zMS4zMy4zMShjb25m
Lmhvc3QxX3JhbV9saW1pdBg2NTY3Njc3OTk5OTk2Y29uZi5ob3N0Ml9hY2NlcHRzX2ZhaWxvdmVy
CkZBTFNFJGNvbmYuaG9zdDJfYWRkcmVzcxgxNzIuMzEuMzMuMzIeY29uZi5ob3N0Ml9ncHVzDjAs
MSwyLDNEY29uZi5ob3N0Ml9ob3N0X21hbmFnZXJfcHVibGljX3VybDBodHRwOi8vMTcyLjMxLjMz
LjMyOjkzMDAsY29uZi5ob3N0Ml9wcml2YXRlX3VybCZodHRwOi8vMTcyLjMxLjMzLjMyMmNvbmYu
aG9zdDJfcHVibGljX2FkZHJlc3MYMTcyLjMxLjMzLjMyLGNvbmYuaG9zdDJfcHVibGljX3VybHMm
aHR0cDovLzE3Mi4zMS4zMy4zMihjb25mLmhvc3QyX3JhbV9saW1pdBg2NTY3Njc3OTk5OTk2Y29u
Zi5ob3N0M19hY2NlcHRzX2ZhaWxvdmVyCkZBTFNFJGNvbmYuaG9zdDNfYWRkcmVzcxgxNzIuMzEu
MzMuMzMeY29uZi5ob3N0M19ncHVzDjAsMSwyLDNEY29uZi5ob3N0M19ob3N0X21hbmFnZXJfcHVi
bGljX3VybDBodHRwOi8vMTcyLjMxLjMzLjMzOjkzMDAsY29uZi5ob3N0M19wcml2YXRlX3VybCZo
dHRwOi8vMTcyLjMxLjMzLjMzMmNvbmYuaG9zdDNfcHVibGljX2FkZHJlc3MYMTcyLjMxLjMzLjMz
LGNvbmYuaG9zdDNfcHVibGljX3VybHMmaHR0cDovLzE3Mi4zMS4zMy4zMyhjb25mLmhvc3QzX3Jh
bV9saW1pdBg2NTY3Njc3OTk5OTk2Y29uZi5ob3N0NF9hY2NlcHRzX2ZhaWxvdmVyCkZBTFNFJGNv
bmYuaG9zdDRfYWRkcmVzcxgxNzIuMzEuMzMuMzQeY29uZi5ob3N0NF9ncHVzDjAsMSwyLDNEY29u
Zi5ob3N0NF9ob3N0X21hbmFnZXJfcHVibGljX3VybDBodHRwOi8vMTcyLjMxLjMzLjM0OjkzMDAs
Y29uZi5ob3N0NF9wcml2YXRlX3VybCZodHRwOi8vMTcyLjMxLjMzLjM0MmNvbmYuaG9zdDRfcHVi
bGljX2FkZHJlc3MYMTcyLjMxLjMzLjM0LGNvbmYuaG9zdDRfcHVibGljX3VybHMmaHR0cDovLzE3
Mi4zMS4zMy4zNChjb25mLmhvc3Q0X3JhbV9saW1pdBg2NTY3Njc3OTk5OTk2Y29uZi5ob3N0NV9h
Y2NlcHRzX2ZhaWxvdmVyCkZBTFNFJGNvbmYuaG9zdDVfYWRkcmVzcxgxNzIuMzEuMzMuMzUeY29u
Zi5ob3N0NV9ncHVzDjAsMSwyLDNEY29uZi5ob3N0NV9ob3N0X21hbmFnZXJfcHVibGljX3VybDBo
dHRwOi8vMTcyLjMxLjMzLjM1OjkzMDAsY29uZi5ob3N0NV9wcml2YXRlX3VybCZodHRwOi8vMTcy
LjMxLjMzLjM1MmNvbmYuaG9zdDVfcHVibGljX2FkZHJlc3MYMTcyLjMxLjMzLjM1LGNvbmYuaG9z
dDVfcHVibGljX3VybHMmaHR0cDovLzE3Mi4zMS4zMy4zNShjb25mLmhvc3Q1X3JhbV9saW1pdBg2
NTY3Njc3OTk5OTk2Y29uZi5ob3N0Nl9hY2NlcHRzX2ZhaWxvdmVyCkZBTFNFJGNvbmYuaG9zdDZf
YWRkcmVzcxgxNzIuMzEuMzMuMzYeY29uZi5ob3N0Nl9ncHVzDjAsMSwyLDNEY29uZi5ob3N0Nl9o
b3N0X21hbmFnZXJfcHVibGljX3VybDBodHRwOi8vMTcyLjMxLjMzLjM2OjkzMDAsY29uZi5ob3N0
Nl9wcml2YXRlX3VybCZodHRwOi8vMTcyLjMxLjMzLjM2MmNvbmYuaG9zdDZfcHVibGljX2FkZHJl
c3MYMTcyLjMxLjMzLjM2LGNvbmYuaG9zdDZfcHVibGljX3VybHMmaHR0cDovLzE3Mi4zMS4zMy4z
Nihjb25mLmhvc3Q2X3JhbV9saW1pdBg2NTY3Njc3OTk5OTk2Y29uZi5ob3N0N19hY2NlcHRzX2Zh
aWxvdmVyCkZBTFNFJGNvbmYuaG9zdDdfYWRkcmVzcxgxNzIuMzEuMzMuMzceY29uZi5ob3N0N19n
cHVzDjAsMSwyLDNEY29uZi5ob3N0N19ob3N0X21hbmFnZXJfcHVibGljX3VybDBodHRwOi8vMTcy
LjMxLjMzLjM3OjkzMDAsY29uZi5ob3N0N19wcml2YXRlX3VybCZodHRwOi8vMTcyLjMxLjMzLjM3
MmNvbmYuaG9zdDdfcHVibGljX2FkZHJlc3MYMTcyLjMxLjMzLjM3LGNvbmYuaG9zdDdfcHVibGlj
X3VybHMmaHR0cDovLzE3Mi4zMS4zMy4zNyhjb25mLmhvc3Q3X3JhbV9saW1pdBg2NTY3Njc3OTk5
OTkqY29uZi5odHRwZF9wcm94eV9wb3J0CDgwODI0Y29uZi5odHRwZF9wcm94eV91c2VfaHR0cHMK
RkFMU0U4Y29uZi5pbml0X3dpdGhfbm93X2F0X3dvcmtlcgpGQUxTRSpjb25mLmthZmthLmJhdGNo
X3NpemUIMTAwMC5jb25mLmthZmthLnBvbGxfdGltZW91dAIwKGNvbmYua2Fma2Eud2FpdF90aW1l
BDMwLmNvbmYua2VybmVsX29tcF90aHJlYWRzAjQ0Y29uZi5sb2FkX3ZlY3RvcnNfb25fc3RhcnQS
b25fZGVtYW5kHmNvbmYubG9ja19hdWRpdApGQUxTRTZjb25mLm1heF9hdXRvX3ZpZXdfdXBkYXRv
cnMCMzZjb25mLm1heF9jb25jdXJyZW50X2tlcm5lbHMCMDJjb25mLm1heF9nZXRfcmVjb3Jkc19z
aXplCjIwMDAwKmNvbmYubWF4X2hlYXRtYXBfc2l6ZQgzMDcyKmNvbmYubWF4X2h0dHBfdGhyZWFk
cwY1MTI6Y29uZi5tZXRhZGF0YV9zdG9yZV9zeW5jX21vZGUMbm9ybWFsKmNvbmYubWluX2h0dHBf
dGhyZWFkcwI4MGNvbmYubWluX3Bhc3N3b3JkX2xlbmd0aAIwIGNvbmYubWxfYXBpX3BvcnQIOTE4
N1xjb25mLm5wMS5idWlsZF9tYXRlcmlhbGl6ZWRfdmlld3Nfb25fbWlncmF0aW9uDGFsd2F5c0hj
b25mLm5wMS5idWlsZF9wa19pbmRleF9vbl9taWdyYXRpb24MYWx3YXlzRGNvbmYubnAxLmNyaXRp
Y2FsX3Jlc3RhcnRfYXR0ZW1wdHMCMTpjb25mLm5wMS5lbmFibGVfaGVhZF9mYWlsb3ZlcgpGQUxT
RT5jb25mLm5wMS5lbmFibGVfd29ya2VyX2ZhaWxvdmVyCkZBTFNFSmNvbmYubnAxLmZhaWxvdmVy
X2Rpc3RyaWJ1dGlvbl9wb2xpY3kIZmlsbERjb25mLm5wMS5sb2FkX3ZlY3RvcnNfb25fbWlncmF0
aW9uDGFsd2F5c0xjb25mLm5wMS5ub25fY3JpdGljYWxfcmVzdGFydF9hdHRlbXB0cwIzPGNvbmYu
bnAxLnJhbmtfcmVzdGFydF9hdHRlbXB0cwIxMmNvbmYubnAxLnJlc3RhcnRfaW50ZXJ2YWwENjA2
Y29uZi5ucDEuc3RvcmFnZV9hcGlfc2NyaXB0AChjb25mLm51bWJlcl9vZl9ob3N0cwI4KGNvbmYu
bnVtYmVyX29mX3JhbmtzAjk8Y29uZi5vcGVuZ2xfYW50aWFsaWFzaW5nX2xldmVsAjAsY29uZi5w
ZXJzaXN0X2RpcmVjdG9yeTAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC8sY29uZi5wZXJzaXN0X3N5
bmNfdGltZQI1NmNvbmYucG9pbnRfcmVuZGVyX3RocmVzaG9sZAwxMDAwMDBWY29uZi5wb3N0Z3Jl
c19wcm94eS5pZGxlX2Nvbm5lY3Rpb25fdGltZW91dAYzMDBUY29uZi5wb3N0Z3Jlc19wcm94eS5t
YXhfcXVldWVkX2Nvbm5lY3Rpb25zAjE+Y29uZi5wb3N0Z3Jlc19wcm94eS5tYXhfdGhyZWFkcwQ2
ND5jb25mLnBvc3RncmVzX3Byb3h5Lm1pbl90aHJlYWRzAjIwY29uZi5wb3N0Z3Jlc19wcm94eS5w
b3J0CDU0MzIuY29uZi5wb3N0Z3Jlc19wcm94eS5zc2wIVFJVRRxjb25mLnJhbmswX2dwdQIwKmNv
bmYucmFuazBfaXBfYWRkcmVzcxgxNzIuMzEuMzMuMzAqY29uZi5yYW5rMV9pcF9hZGRyZXNzGDE3
Mi4zMS4zMy4zMCpjb25mLnJhbmsyX2lwX2FkZHJlc3MYMTcyLjMxLjMzLjMxKmNvbmYucmFuazNf
aXBfYWRkcmVzcxgxNzIuMzEuMzMuMzIqY29uZi5yYW5rNF9pcF9hZGRyZXNzGDE3Mi4zMS4zMy4z
Mypjb25mLnJhbms1X2lwX2FkZHJlc3MYMTcyLjMxLjMzLjM0KmNvbmYucmFuazZfaXBfYWRkcmVz
cxgxNzIuMzEuMzMuMzUqY29uZi5yYW5rN19pcF9hZGRyZXNzGDE3Mi4zMS4zMy4zNipjb25mLnJh
bms4X2lwX2FkZHJlc3MYMTcyLjMxLjMzLjM3KGNvbmYucmVxdWVzdF90aW1lb3V0CDI0MDA2Y29u
Zi5yZXF1aXJlX2F1dGhlbnRpY2F0aW9uCFRSVUVeY29uZi5yZXNvdXJjZV9ncm91cC5kZWZhdWx0
Lm1heF9jcHVfY29uY3VycmVuY3kELTFaY29uZi5yZXNvdXJjZV9ncm91cC5kZWZhdWx0Lm1heF90
aWVyX3ByaW9yaXR5BDEwSmNvbmYucmVzb3VyY2VfZ3JvdXAuZGVmYXVsdC5yYW1fbGltaXQELTFa
Y29uZi5yZXNvdXJjZV9ncm91cC5kZWZhdWx0LnNjaGVkdWxlX3ByaW9yaXR5BDUwTGNvbmYucmVz
b3VyY2VfZ3JvdXAuZGVmYXVsdC52cmFtX2xpbWl0BC0xHGNvbmYucmluZ19uYW1lDmRlZmF1bHRW
Y29uZi5zZWN1cml0eS5leHRlcm5hbC5yYW5nZXIuY2FjaGVfbWludXRlcwQ2MFRjb25mLnNlY3Vy
aXR5LmV4dGVybmFsLnJhbmdlci5zZXJ2aWNlX25hbWUQa2luZXRpY2FCY29uZi5zZWN1cml0eS5l
eHRlcm5hbC5yYW5nZXIudXJsAGBjb25mLnNlY3VyaXR5LmV4dGVybmFsLnJhbmdlcl9hdXRob3Jp
emVyLmFkZHJlc3MyaXBjOi8vL3RtcC9ncHVkYi1yYW5nZXItMHRjb25mLnNlY3VyaXR5LmV4dGVy
bmFsLnJhbmdlcl9hdXRob3JpemVyLnJlbW90ZV9kZWJ1Z19wb3J0AjBgY29uZi5zZWN1cml0eS5l
eHRlcm5hbC5yYW5nZXJfYXV0aG9yaXplci50aW1lb3V0BjEyMCpjb25mLnNldF9tb25pdG9yX3Bv
cnQIOTAwMjZjb25mLnNldF9tb25pdG9yX3Byb3h5X3BvcnQIOTAwMzZjb25mLnNldF9tb25pdG9y
X3F1ZXVlX3NpemUIMTAwMChjb25mLnNoYWRvd19hZ2dfc2l6ZRI1MDAwMDAwMDAwY29uZi5zaGFk
b3dfY3ViZV9lbmFibGVkCFRSVUUuY29uZi5zaGFkb3dfZmlsdGVyX3NpemUSNTAwMDAwMDAwJmNv
bmYuc21fb21wX3RocmVhZHMCMiRjb25mLnNtc19kaXJlY3RvcnkwL21udC9kYXRhL2dwdWRiL3Bl
cnNpc3QvLmNvbmYuc21zX21heF9vcGVuX2ZpbGVzBjEyOEBjb25mLnNxbC5jb3N0X2Jhc2VkX29w
dGltaXphdGlvbgpGQUxTRTRjb25mLnNxbC5kaXN0cmlidXRlZF9qb2lucwhUUlVFPmNvbmYuc3Fs
LmRpc3RyaWJ1dGVkX29wZXJhdGlvbnMIVFJVRS5jb25mLnNxbC5lbmFibGVfcGxhbm5lcghUUlVF
NmNvbmYuc3FsLmZvcmNlX2JpbmFyeV9qb2lucwpGQUxTRTpjb25mLnNxbC5mb3JjZV9iaW5hcnlf
c2V0X29wcwpGQUxTRTZjb25mLnNxbC5tYXhfcGFyYWxsZWxfc3RlcHMCNEBjb25mLnNxbC5tYXhf
dmlld19uZXN0aW5nX2xldmVscwQxNjJjb25mLnNxbC5wYWdpbmdfdGFibGVfdHRsBDIwNmNvbmYu
c3FsLnBhcmFsbGVsX2V4ZWN1dGlvbghUUlVFMGNvbmYuc3FsLnBsYW5fY2FjaGVfc2l6ZQg0MDAw
MGNvbmYuc3FsLnBsYW5uZXIuYWRkcmVzcz5pcGM6Ly8vdG1wL2dwdWRiLXF1ZXJ5LWVuZ2luZS0w
NmNvbmYuc3FsLnBsYW5uZXIubWF4X21lbW9yeQg0MDk2NGNvbmYuc3FsLnBsYW5uZXIubWF4X3N0
YWNrAjZEY29uZi5zcWwucGxhbm5lci5yZW1vdGVfZGVidWdfcG9ydAIwMGNvbmYuc3FsLnBsYW5u
ZXIudGltZW91dAYxMjA0Y29uZi5zcWwucmVzdWx0X2NhY2hlX3NpemUINDAwMDRjb25mLnNxbC5y
ZXN1bHRzLmNhY2hlX3R0bAQ2MDBjb25mLnNxbC5yZXN1bHRzLmNhY2hpbmcIVFJVRUBjb25mLnNx
bC5ydWxlX2Jhc2VkX29wdGltaXphdGlvbghUUlVFPGNvbmYuc3VidGFza19jb25jdXJyZW5jeV9s
aW1pdAI0PmNvbmYuc3ltYm9sb2d5X3JlbmRlcl90aHJlc2hvbGQKMTAwMDBQY29uZi5zeXN0ZW1f
bWV0YWRhdGEuc3RhdHNfYWdncl9yb3djb3VudAoxMDAwMEhjb25mLnN5c3RlbV9tZXRhZGF0YS5z
dGF0c19hZ2dyX3RpbWUCMVJjb25mLnN5c3RlbV9tZXRhZGF0YS5zdGF0c19yZXRlbnRpb25fZGF5
cwQyMSZjb25mLnRhc2tjYWxjX2dwdS4xElswLDEsMiwzXSZjb25mLnRhc2tjYWxjX2dwdS4yElsw
LDEsMiwzXSZjb25mLnRhc2tjYWxjX2dwdS4zElswLDEsMiwzXSZjb25mLnRhc2tjYWxjX2dwdS40
ElswLDEsMiwzXSZjb25mLnRhc2tjYWxjX2dwdS41ElswLDEsMiwzXSZjb25mLnRhc2tjYWxjX2dw
dS42ElswLDEsMiwzXSZjb25mLnRhc2tjYWxjX2dwdS43ElswLDEsMiwzXSZjb25mLnRhc2tjYWxj
X2dwdS44ElswLDEsMiwzXSBjb25mLnRjc19wZXJfdG9tBDQwJmNvbmYudGVtcF9kaXJlY3RvcnkI
L3RtcDJjb25mLnRleHRfaW5kZXhfZGlyZWN0b3J5MC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0LzJj
b25mLnRleHRfaW5kaWNlc19wZXJfdG9tAjJMY29uZi50aWVyLmRpc2swLmRlZmF1bHQuaGlnaF93
YXRlcm1hcmsEOTA6Y29uZi50aWVyLmRpc2swLmRlZmF1bHQubGltaXQYNjAwMDAwMDAwMDAwSmNv
bmYudGllci5kaXNrMC5kZWZhdWx0Lmxvd193YXRlcm1hcmsEODA4Y29uZi50aWVyLmRpc2swLmRl
ZmF1bHQucGF0aEQvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC8vZGlza2NhY2hlYGNvbmYudGllci5k
aXNrMC5kZWZhdWx0LnN0b3JlX3BlcnNpc3RlbnRfb2JqZWN0cwpGQUxTRUhjb25mLnRpZXIuZGlz
azAucmFuazAuaGlnaF93YXRlcm1hcmsEOTA2Y29uZi50aWVyLmRpc2swLnJhbmswLmxpbWl0GDYw
MDAwMDAwMDAwMEZjb25mLnRpZXIuZGlzazAucmFuazAubG93X3dhdGVybWFyawQ4MDRjb25mLnRp
ZXIuZGlzazAucmFuazAucGF0aEQvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC8vZGlza2NhY2hlXGNv
bmYudGllci5kaXNrMC5yYW5rMC5zdG9yZV9wZXJzaXN0ZW50X29iamVjdHMKRkFMU0VIY29uZi50
aWVyLmRpc2swLnJhbmsxLmhpZ2hfd2F0ZXJtYXJrBDkwNmNvbmYudGllci5kaXNrMC5yYW5rMS5s
aW1pdBg2MDAwMDAwMDAwMDBGY29uZi50aWVyLmRpc2swLnJhbmsxLmxvd193YXRlcm1hcmsEODA0
Y29uZi50aWVyLmRpc2swLnJhbmsxLnBhdGhEL21udC9kYXRhL2dwdWRiL3BlcnNpc3QvL2Rpc2tj
YWNoZVxjb25mLnRpZXIuZGlzazAucmFuazEuc3RvcmVfcGVyc2lzdGVudF9vYmplY3RzCkZBTFNF
SGNvbmYudGllci5kaXNrMC5yYW5rMi5oaWdoX3dhdGVybWFyawQ5MDZjb25mLnRpZXIuZGlzazAu
cmFuazIubGltaXQYNjAwMDAwMDAwMDAwRmNvbmYudGllci5kaXNrMC5yYW5rMi5sb3dfd2F0ZXJt
YXJrBDgwNGNvbmYudGllci5kaXNrMC5yYW5rMi5wYXRoRC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0
Ly9kaXNrY2FjaGVcY29uZi50aWVyLmRpc2swLnJhbmsyLnN0b3JlX3BlcnNpc3RlbnRfb2JqZWN0
cwpGQUxTRUhjb25mLnRpZXIuZGlzazAucmFuazMuaGlnaF93YXRlcm1hcmsEOTA2Y29uZi50aWVy
LmRpc2swLnJhbmszLmxpbWl0GDYwMDAwMDAwMDAwMEZjb25mLnRpZXIuZGlzazAucmFuazMubG93
X3dhdGVybWFyawQ4MDRjb25mLnRpZXIuZGlzazAucmFuazMucGF0aEQvbW50L2RhdGEvZ3B1ZGIv
cGVyc2lzdC8vZGlza2NhY2hlXGNvbmYudGllci5kaXNrMC5yYW5rMy5zdG9yZV9wZXJzaXN0ZW50
X29iamVjdHMKRkFMU0VIY29uZi50aWVyLmRpc2swLnJhbms0LmhpZ2hfd2F0ZXJtYXJrBDkwNmNv
bmYudGllci5kaXNrMC5yYW5rNC5saW1pdBg2MDAwMDAwMDAwMDBGY29uZi50aWVyLmRpc2swLnJh
bms0Lmxvd193YXRlcm1hcmsEODA0Y29uZi50aWVyLmRpc2swLnJhbms0LnBhdGhEL21udC9kYXRh
L2dwdWRiL3BlcnNpc3QvL2Rpc2tjYWNoZVxjb25mLnRpZXIuZGlzazAucmFuazQuc3RvcmVfcGVy
c2lzdGVudF9vYmplY3RzCkZBTFNFSGNvbmYudGllci5kaXNrMC5yYW5rNS5oaWdoX3dhdGVybWFy
awQ5MDZjb25mLnRpZXIuZGlzazAucmFuazUubGltaXQYNjAwMDAwMDAwMDAwRmNvbmYudGllci5k
aXNrMC5yYW5rNS5sb3dfd2F0ZXJtYXJrBDgwNGNvbmYudGllci5kaXNrMC5yYW5rNS5wYXRoRC9t
bnQvZGF0YS9ncHVkYi9wZXJzaXN0Ly9kaXNrY2FjaGVcY29uZi50aWVyLmRpc2swLnJhbms1LnN0
b3JlX3BlcnNpc3RlbnRfb2JqZWN0cwpGQUxTRUhjb25mLnRpZXIuZGlzazAucmFuazYuaGlnaF93
YXRlcm1hcmsEOTA2Y29uZi50aWVyLmRpc2swLnJhbms2LmxpbWl0GDYwMDAwMDAwMDAwMEZjb25m
LnRpZXIuZGlzazAucmFuazYubG93X3dhdGVybWFyawQ4MDRjb25mLnRpZXIuZGlzazAucmFuazYu
cGF0aEQvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC8vZGlza2NhY2hlXGNvbmYudGllci5kaXNrMC5y
YW5rNi5zdG9yZV9wZXJzaXN0ZW50X29iamVjdHMKRkFMU0VIY29uZi50aWVyLmRpc2swLnJhbms3
LmhpZ2hfd2F0ZXJtYXJrBDkwNmNvbmYudGllci5kaXNrMC5yYW5rNy5saW1pdBg2MDAwMDAwMDAw
MDBGY29uZi50aWVyLmRpc2swLnJhbms3Lmxvd193YXRlcm1hcmsEODA0Y29uZi50aWVyLmRpc2sw
LnJhbms3LnBhdGhEL21udC9kYXRhL2dwdWRiL3BlcnNpc3QvL2Rpc2tjYWNoZVxjb25mLnRpZXIu
ZGlzazAucmFuazcuc3RvcmVfcGVyc2lzdGVudF9vYmplY3RzCkZBTFNFSGNvbmYudGllci5kaXNr
MC5yYW5rOC5oaWdoX3dhdGVybWFyawQ5MDZjb25mLnRpZXIuZGlzazAucmFuazgubGltaXQYNjAw
MDAwMDAwMDAwRmNvbmYudGllci5kaXNrMC5yYW5rOC5sb3dfd2F0ZXJtYXJrBDgwNGNvbmYudGll
ci5kaXNrMC5yYW5rOC5wYXRoRC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0Ly9kaXNrY2FjaGVcY29u
Zi50aWVyLmRpc2swLnJhbms4LnN0b3JlX3BlcnNpc3RlbnRfb2JqZWN0cwpGQUxTRVBjb25mLnRp
ZXIuZ2xvYmFsLmNvbmN1cnJlbnRfd2FpdF90aW1lb3V0BjYwMGpjb25mLnRpZXIuZ2xvYmFsLmRl
ZmVyX2NhY2hlX29iamVjdF9ldmljdGlvbnNfdG9fZGlzawhUUlVFUGNvbmYudGllci5wZXJzaXN0
LmRlZmF1bHQuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnBlcnNpc3QuZGVmYXVsdC5saW1p
dBo2NTAwMDAwMDAwMDAwTmNvbmYudGllci5wZXJzaXN0LmRlZmF1bHQubG93X3dhdGVybWFyawQ4
MDxjb25mLnRpZXIucGVyc2lzdC5kZWZhdWx0LnBhdGgwL21udC9kYXRhL2dwdWRiL3BlcnNpc3Qv
OmNvbmYudGllci5wZXJzaXN0LmdyYXBoMC5wYXRoMC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0Lzpj
b25mLnRpZXIucGVyc2lzdC5ncmFwaDEucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC86Y29u
Zi50aWVyLnBlcnNpc3QuZ3JhcGgyLnBhdGgwL21udC9kYXRhL2dwdWRiL3BlcnNpc3QvOmNvbmYu
dGllci5wZXJzaXN0LmdyYXBoMy5wYXRoMC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0Lzpjb25mLnRp
ZXIucGVyc2lzdC5ncmFwaDQucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC86Y29uZi50aWVy
LnBlcnNpc3QuZ3JhcGg1LnBhdGgwL21udC9kYXRhL2dwdWRiL3BlcnNpc3QvOmNvbmYudGllci5w
ZXJzaXN0LmdyYXBoNi5wYXRoMC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0Lzpjb25mLnRpZXIucGVy
c2lzdC5ncmFwaDcucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC9MY29uZi50aWVyLnBlcnNp
c3QucmFuazAuaGlnaF93YXRlcm1hcmsEOTA6Y29uZi50aWVyLnBlcnNpc3QucmFuazAubGltaXQa
NjUwMDAwMDAwMDAwMEpjb25mLnRpZXIucGVyc2lzdC5yYW5rMC5sb3dfd2F0ZXJtYXJrBDgwOGNv
bmYudGllci5wZXJzaXN0LnJhbmswLnBhdGgwL21udC9kYXRhL2dwdWRiL3BlcnNpc3QvTGNvbmYu
dGllci5wZXJzaXN0LnJhbmsxLmhpZ2hfd2F0ZXJtYXJrBDkwOmNvbmYudGllci5wZXJzaXN0LnJh
bmsxLmxpbWl0GjY1MDAwMDAwMDAwMDBKY29uZi50aWVyLnBlcnNpc3QucmFuazEubG93X3dhdGVy
bWFyawQ4MDhjb25mLnRpZXIucGVyc2lzdC5yYW5rMS5wYXRoMC9tbnQvZGF0YS9ncHVkYi9wZXJz
aXN0L0xjb25mLnRpZXIucGVyc2lzdC5yYW5rMi5oaWdoX3dhdGVybWFyawQ5MDpjb25mLnRpZXIu
cGVyc2lzdC5yYW5rMi5saW1pdBo2NTAwMDAwMDAwMDAwSmNvbmYudGllci5wZXJzaXN0LnJhbmsy
Lmxvd193YXRlcm1hcmsEODA4Y29uZi50aWVyLnBlcnNpc3QucmFuazIucGF0aDAvbW50L2RhdGEv
Z3B1ZGIvcGVyc2lzdC9MY29uZi50aWVyLnBlcnNpc3QucmFuazMuaGlnaF93YXRlcm1hcmsEOTA6
Y29uZi50aWVyLnBlcnNpc3QucmFuazMubGltaXQaNjUwMDAwMDAwMDAwMEpjb25mLnRpZXIucGVy
c2lzdC5yYW5rMy5sb3dfd2F0ZXJtYXJrBDgwOGNvbmYudGllci5wZXJzaXN0LnJhbmszLnBhdGgw
L21udC9kYXRhL2dwdWRiL3BlcnNpc3QvTGNvbmYudGllci5wZXJzaXN0LnJhbms0LmhpZ2hfd2F0
ZXJtYXJrBDkwOmNvbmYudGllci5wZXJzaXN0LnJhbms0LmxpbWl0GjY1MDAwMDAwMDAwMDBKY29u
Zi50aWVyLnBlcnNpc3QucmFuazQubG93X3dhdGVybWFyawQ4MDhjb25mLnRpZXIucGVyc2lzdC5y
YW5rNC5wYXRoMC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0L0xjb25mLnRpZXIucGVyc2lzdC5yYW5r
NS5oaWdoX3dhdGVybWFyawQ5MDpjb25mLnRpZXIucGVyc2lzdC5yYW5rNS5saW1pdBo2NTAwMDAw
MDAwMDAwSmNvbmYudGllci5wZXJzaXN0LnJhbms1Lmxvd193YXRlcm1hcmsEODA4Y29uZi50aWVy
LnBlcnNpc3QucmFuazUucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC9MY29uZi50aWVyLnBl
cnNpc3QucmFuazYuaGlnaF93YXRlcm1hcmsEOTA6Y29uZi50aWVyLnBlcnNpc3QucmFuazYubGlt
aXQaNjUwMDAwMDAwMDAwMEpjb25mLnRpZXIucGVyc2lzdC5yYW5rNi5sb3dfd2F0ZXJtYXJrBDgw
OGNvbmYudGllci5wZXJzaXN0LnJhbms2LnBhdGgwL21udC9kYXRhL2dwdWRiL3BlcnNpc3QvTGNv
bmYudGllci5wZXJzaXN0LnJhbms3LmhpZ2hfd2F0ZXJtYXJrBDkwOmNvbmYudGllci5wZXJzaXN0
LnJhbms3LmxpbWl0GjY1MDAwMDAwMDAwMDBKY29uZi50aWVyLnBlcnNpc3QucmFuazcubG93X3dh
dGVybWFyawQ4MDhjb25mLnRpZXIucGVyc2lzdC5yYW5rNy5wYXRoMC9tbnQvZGF0YS9ncHVkYi9w
ZXJzaXN0L0xjb25mLnRpZXIucGVyc2lzdC5yYW5rOC5oaWdoX3dhdGVybWFyawQ5MDpjb25mLnRp
ZXIucGVyc2lzdC5yYW5rOC5saW1pdBo2NTAwMDAwMDAwMDAwSmNvbmYudGllci5wZXJzaXN0LnJh
bms4Lmxvd193YXRlcm1hcmsEODA4Y29uZi50aWVyLnBlcnNpc3QucmFuazgucGF0aDAvbW50L2Rh
dGEvZ3B1ZGIvcGVyc2lzdC84Y29uZi50aWVyLnBlcnNpc3QudGV4dDEucGF0aDAvbW50L2RhdGEv
Z3B1ZGIvcGVyc2lzdC84Y29uZi50aWVyLnBlcnNpc3QudGV4dDIucGF0aDAvbW50L2RhdGEvZ3B1
ZGIvcGVyc2lzdC84Y29uZi50aWVyLnBlcnNpc3QudGV4dDMucGF0aDAvbW50L2RhdGEvZ3B1ZGIv
cGVyc2lzdC84Y29uZi50aWVyLnBlcnNpc3QudGV4dDQucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVy
c2lzdC84Y29uZi50aWVyLnBlcnNpc3QudGV4dDUucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lz
dC84Y29uZi50aWVyLnBlcnNpc3QudGV4dDYucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC84
Y29uZi50aWVyLnBlcnNpc3QudGV4dDcucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC84Y29u
Zi50aWVyLnBlcnNpc3QudGV4dDgucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC9IY29uZi50
aWVyLnJhbS5kZWZhdWx0LmhpZ2hfd2F0ZXJtYXJrBDkwNmNvbmYudGllci5yYW0uZGVmYXVsdC5s
aW1pdAQtMUZjb25mLnRpZXIucmFtLmRlZmF1bHQubG93X3dhdGVybWFyawQ4MERjb25mLnRpZXIu
cmFtLnJhbmswLmhpZ2hfd2F0ZXJtYXJrBDkwMmNvbmYudGllci5yYW0ucmFuazAubGltaXQWNzcy
NjY4MDAwMDBCY29uZi50aWVyLnJhbS5yYW5rMC5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazEuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rMS5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rMS5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazIuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rMi5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rMi5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazMuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rMy5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rMy5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazQuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rNC5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rNC5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazUuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rNS5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rNS5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazYuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rNi5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rNi5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazcuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rNy5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rNy5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazguaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rOC5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rOC5sb3dfd2F0ZXJtYXJrBDgwXGNvbmYudGllci52
cmFtLmRlZmF1bHQuYWxsX2dwdXMuaGlnaF93YXRlcm1hcmsEOTBKY29uZi50aWVyLnZyYW0uZGVm
YXVsdC5hbGxfZ3B1cy5saW1pdAQtMVpjb25mLnRpZXIudnJhbS5kZWZhdWx0LmFsbF9ncHVzLmxv
d193YXRlcm1hcmsEODBUY29uZi50aWVyLnZyYW0uZGVmYXVsdC5ncHUwLmhpZ2hfd2F0ZXJtYXJr
BDkwQmNvbmYudGllci52cmFtLmRlZmF1bHQuZ3B1MC5saW1pdAQtMVJjb25mLnRpZXIudnJhbS5k
ZWZhdWx0LmdwdTAubG93X3dhdGVybWFyawQ4MFRjb25mLnRpZXIudnJhbS5kZWZhdWx0LmdwdTEu
aGlnaF93YXRlcm1hcmsEOTBCY29uZi50aWVyLnZyYW0uZGVmYXVsdC5ncHUxLmxpbWl0BC0xUmNv
bmYudGllci52cmFtLmRlZmF1bHQuZ3B1MS5sb3dfd2F0ZXJtYXJrBDgwVGNvbmYudGllci52cmFt
LmRlZmF1bHQuZ3B1Mi5oaWdoX3dhdGVybWFyawQ5MEJjb25mLnRpZXIudnJhbS5kZWZhdWx0Lmdw
dTIubGltaXQELTFSY29uZi50aWVyLnZyYW0uZGVmYXVsdC5ncHUyLmxvd193YXRlcm1hcmsEODBU
Y29uZi50aWVyLnZyYW0uZGVmYXVsdC5ncHUzLmhpZ2hfd2F0ZXJtYXJrBDkwQmNvbmYudGllci52
cmFtLmRlZmF1bHQuZ3B1My5saW1pdAQtMVJjb25mLnRpZXIudnJhbS5kZWZhdWx0LmdwdTMubG93
X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rMC5HUFUwLmhpZ2hfd2F0ZXJtYXJrBDkw
PmNvbmYudGllci52cmFtLnJhbmswLkdQVTAubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazAu
R1BVMC5sb3dfd2F0ZXJtYXJrBDgwWGNvbmYudGllci52cmFtLnJhbmswLmFsbF9ncHVzLmhpZ2hf
d2F0ZXJtYXJrBDkwRmNvbmYudGllci52cmFtLnJhbmswLmFsbF9ncHVzLmxpbWl0BC0xVmNvbmYu
dGllci52cmFtLnJhbmswLmFsbF9ncHVzLmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0u
cmFuazAuZ3B1MC5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJhbS5yYW5rMC5ncHUwLmxp
bWl0BC0xTmNvbmYudGllci52cmFtLnJhbmswLmdwdTAubG93X3dhdGVybWFyawQ4MFBjb25mLnRp
ZXIudnJhbS5yYW5rMC5ncHUxLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbmsw
LmdwdTEubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazAuZ3B1MS5sb3dfd2F0ZXJtYXJrBDgw
UGNvbmYudGllci52cmFtLnJhbmswLmdwdTIuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZy
YW0ucmFuazAuZ3B1Mi5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rMC5ncHUyLmxvd193YXRl
cm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazAuZ3B1My5oaWdoX3dhdGVybWFyawQ5MD5jb25m
LnRpZXIudnJhbS5yYW5rMC5ncHUzLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbmswLmdwdTMu
bG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rMS5HUFUwLmhpZ2hfd2F0ZXJtYXJr
BDkwPmNvbmYudGllci52cmFtLnJhbmsxLkdQVTAubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFu
azEuR1BVMC5sb3dfd2F0ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbmsxLkdQVTEuaGlnaF93
YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0ucmFuazEuR1BVMS5saW1pdAQtMU5jb25mLnRpZXIu
dnJhbS5yYW5rMS5HUFUxLmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazEuR1BV
Mi5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJhbS5yYW5rMS5HUFUyLmxpbWl0BC0xTmNv
bmYudGllci52cmFtLnJhbmsxLkdQVTIubG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5y
YW5rMS5HUFUzLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbmsxLkdQVTMubGlt
aXQELTFOY29uZi50aWVyLnZyYW0ucmFuazEuR1BVMy5sb3dfd2F0ZXJtYXJrBDgwWGNvbmYudGll
ci52cmFtLnJhbmsxLmFsbF9ncHVzLmhpZ2hfd2F0ZXJtYXJrBDkwRmNvbmYudGllci52cmFtLnJh
bmsxLmFsbF9ncHVzLmxpbWl0BC0xVmNvbmYudGllci52cmFtLnJhbmsxLmFsbF9ncHVzLmxvd193
YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazEuZ3B1MC5oaWdoX3dhdGVybWFyawQ5MD5j
b25mLnRpZXIudnJhbS5yYW5rMS5ncHUwLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbmsxLmdw
dTAubG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rMS5ncHUxLmhpZ2hfd2F0ZXJt
YXJrBDkwPmNvbmYudGllci52cmFtLnJhbmsxLmdwdTEubGltaXQELTFOY29uZi50aWVyLnZyYW0u
cmFuazEuZ3B1MS5sb3dfd2F0ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbmsxLmdwdTIuaGln
aF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0ucmFuazEuZ3B1Mi5saW1pdAQtMU5jb25mLnRp
ZXIudnJhbS5yYW5rMS5ncHUyLmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazEu
Z3B1My5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJhbS5yYW5rMS5ncHUzLmxpbWl0BC0x
TmNvbmYudGllci52cmFtLnJhbmsxLmdwdTMubG93X3dhdGVybWFyawQ4MFhjb25mLnRpZXIudnJh
bS5yYW5rMi5hbGxfZ3B1cy5oaWdoX3dhdGVybWFyawQ5MEZjb25mLnRpZXIudnJhbS5yYW5rMi5h
bGxfZ3B1cy5saW1pdAQtMVZjb25mLnRpZXIudnJhbS5yYW5rMi5hbGxfZ3B1cy5sb3dfd2F0ZXJt
YXJrBDgwUGNvbmYudGllci52cmFtLnJhbmsyLmdwdTAuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50
aWVyLnZyYW0ucmFuazIuZ3B1MC5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rMi5ncHUwLmxv
d193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazIuZ3B1MS5oaWdoX3dhdGVybWFyawQ5
MD5jb25mLnRpZXIudnJhbS5yYW5rMi5ncHUxLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbmsy
LmdwdTEubG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rMi5ncHUyLmhpZ2hfd2F0
ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbmsyLmdwdTIubGltaXQELTFOY29uZi50aWVyLnZy
YW0ucmFuazIuZ3B1Mi5sb3dfd2F0ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbmsyLmdwdTMu
aGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0ucmFuazIuZ3B1My5saW1pdAQtMU5jb25m
LnRpZXIudnJhbS5yYW5rMi5ncHUzLmxvd193YXRlcm1hcmsEODBYY29uZi50aWVyLnZyYW0ucmFu
azMuYWxsX2dwdXMuaGlnaF93YXRlcm1hcmsEOTBGY29uZi50aWVyLnZyYW0ucmFuazMuYWxsX2dw
dXMubGltaXQELTFWY29uZi50aWVyLnZyYW0ucmFuazMuYWxsX2dwdXMubG93X3dhdGVybWFyawQ4
MFBjb25mLnRpZXIudnJhbS5yYW5rMy5ncHUwLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGllci52
cmFtLnJhbmszLmdwdTAubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazMuZ3B1MC5sb3dfd2F0
ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbmszLmdwdTEuaGlnaF93YXRlcm1hcmsEOTA+Y29u
Zi50aWVyLnZyYW0ucmFuazMuZ3B1MS5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rMy5ncHUx
Lmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazMuZ3B1Mi5oaWdoX3dhdGVybWFy
awQ5MD5jb25mLnRpZXIudnJhbS5yYW5rMy5ncHUyLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJh
bmszLmdwdTIubG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rMy5ncHUzLmhpZ2hf
d2F0ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbmszLmdwdTMubGltaXQELTFOY29uZi50aWVy
LnZyYW0ucmFuazMuZ3B1My5sb3dfd2F0ZXJtYXJrBDgwWGNvbmYudGllci52cmFtLnJhbms0LmFs
bF9ncHVzLmhpZ2hfd2F0ZXJtYXJrBDkwRmNvbmYudGllci52cmFtLnJhbms0LmFsbF9ncHVzLmxp
bWl0BC0xVmNvbmYudGllci52cmFtLnJhbms0LmFsbF9ncHVzLmxvd193YXRlcm1hcmsEODBQY29u
Zi50aWVyLnZyYW0ucmFuazQuZ3B1MC5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJhbS5y
YW5rNC5ncHUwLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbms0LmdwdTAubG93X3dhdGVybWFy
awQ4MFBjb25mLnRpZXIudnJhbS5yYW5rNC5ncHUxLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGll
ci52cmFtLnJhbms0LmdwdTEubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazQuZ3B1MS5sb3df
d2F0ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbms0LmdwdTIuaGlnaF93YXRlcm1hcmsEOTA+
Y29uZi50aWVyLnZyYW0ucmFuazQuZ3B1Mi5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rNC5n
cHUyLmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazQuZ3B1My5oaWdoX3dhdGVy
bWFyawQ5MD5jb25mLnRpZXIudnJhbS5yYW5rNC5ncHUzLmxpbWl0BC0xTmNvbmYudGllci52cmFt
LnJhbms0LmdwdTMubG93X3dhdGVybWFyawQ4MFhjb25mLnRpZXIudnJhbS5yYW5rNS5hbGxfZ3B1
cy5oaWdoX3dhdGVybWFyawQ5MEZjb25mLnRpZXIudnJhbS5yYW5rNS5hbGxfZ3B1cy5saW1pdAQt
MVZjb25mLnRpZXIudnJhbS5yYW5rNS5hbGxfZ3B1cy5sb3dfd2F0ZXJtYXJrBDgwUGNvbmYudGll
ci52cmFtLnJhbms1LmdwdTAuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0ucmFuazUu
Z3B1MC5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rNS5ncHUwLmxvd193YXRlcm1hcmsEODBQ
Y29uZi50aWVyLnZyYW0ucmFuazUuZ3B1MS5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJh
bS5yYW5rNS5ncHUxLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbms1LmdwdTEubG93X3dhdGVy
bWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rNS5ncHUyLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYu
dGllci52cmFtLnJhbms1LmdwdTIubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazUuZ3B1Mi5s
b3dfd2F0ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbms1LmdwdTMuaGlnaF93YXRlcm1hcmsE
OTA+Y29uZi50aWVyLnZyYW0ucmFuazUuZ3B1My5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5r
NS5ncHUzLmxvd193YXRlcm1hcmsEODBYY29uZi50aWVyLnZyYW0ucmFuazYuYWxsX2dwdXMuaGln
aF93YXRlcm1hcmsEOTBGY29uZi50aWVyLnZyYW0ucmFuazYuYWxsX2dwdXMubGltaXQELTFWY29u
Zi50aWVyLnZyYW0ucmFuazYuYWxsX2dwdXMubG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJh
bS5yYW5rNi5ncHUwLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbms2LmdwdTAu
bGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazYuZ3B1MC5sb3dfd2F0ZXJtYXJrBDgwUGNvbmYu
dGllci52cmFtLnJhbms2LmdwdTEuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0ucmFu
azYuZ3B1MS5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rNi5ncHUxLmxvd193YXRlcm1hcmsE
ODBQY29uZi50aWVyLnZyYW0ucmFuazYuZ3B1Mi5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIu
dnJhbS5yYW5rNi5ncHUyLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbms2LmdwdTIubG93X3dh
dGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rNi5ncHUzLmhpZ2hfd2F0ZXJtYXJrBDkwPmNv
bmYudGllci52cmFtLnJhbms2LmdwdTMubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazYuZ3B1
My5sb3dfd2F0ZXJtYXJrBDgwWGNvbmYudGllci52cmFtLnJhbms3LmFsbF9ncHVzLmhpZ2hfd2F0
ZXJtYXJrBDkwRmNvbmYudGllci52cmFtLnJhbms3LmFsbF9ncHVzLmxpbWl0BC0xVmNvbmYudGll
ci52cmFtLnJhbms3LmFsbF9ncHVzLmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFu
azcuZ3B1MC5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJhbS5yYW5rNy5ncHUwLmxpbWl0
BC0xTmNvbmYudGllci52cmFtLnJhbms3LmdwdTAubG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIu
dnJhbS5yYW5rNy5ncHUxLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbms3Lmdw
dTEubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazcuZ3B1MS5sb3dfd2F0ZXJtYXJrBDgwUGNv
bmYudGllci52cmFtLnJhbms3LmdwdTIuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0u
cmFuazcuZ3B1Mi5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rNy5ncHUyLmxvd193YXRlcm1h
cmsEODBQY29uZi50aWVyLnZyYW0ucmFuazcuZ3B1My5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRp
ZXIudnJhbS5yYW5rNy5ncHUzLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbms3LmdwdTMubG93
X3dhdGVybWFyawQ4MFhjb25mLnRpZXIudnJhbS5yYW5rOC5hbGxfZ3B1cy5oaWdoX3dhdGVybWFy
awQ5MEZjb25mLnRpZXIudnJhbS5yYW5rOC5hbGxfZ3B1cy5saW1pdAQtMVZjb25mLnRpZXIudnJh
bS5yYW5rOC5hbGxfZ3B1cy5sb3dfd2F0ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbms4Lmdw
dTAuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0ucmFuazguZ3B1MC5saW1pdAQtMU5j
b25mLnRpZXIudnJhbS5yYW5rOC5ncHUwLmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0u
cmFuazguZ3B1MS5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJhbS5yYW5rOC5ncHUxLmxp
bWl0BC0xTmNvbmYudGllci52cmFtLnJhbms4LmdwdTEubG93X3dhdGVybWFyawQ4MFBjb25mLnRp
ZXIudnJhbS5yYW5rOC5ncHUyLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbms4
LmdwdTIubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazguZ3B1Mi5sb3dfd2F0ZXJtYXJrBDgw
UGNvbmYudGllci52cmFtLnJhbms4LmdwdTMuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZy
YW0ucmFuazguZ3B1My5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rOC5ncHUzLmxvd193YXRl
cm1hcmsEODA0Y29uZi50aWVyX3N0cmF0ZWd5LmRlZmF1bHRCVlJBTSAyLCBSQU0gNSwgRElTSzAg
NSwgUEVSU0lTVCA1YGNvbmYudGllcl9zdHJhdGVneS5wcmVkaWNhdGVfZXZhbHVhdGlvbl9pbnRl
cnZhbAQ2MCRjb25mLnRvbXNfcGVyX3JhbmsCMSBjb25mLnRwc19wZXJfdG9tBDQwImNvbmYudHJp
Z2dlcl9wb3J0CDkwMDE+Y29uZi51bmlmaWVkX3NlY3VyaXR5X25hbWVzcGFjZQpGQUxTRTpjb25m
LnVzZV9leHRlcm5hbF90ZXh0X3NlcnZlcghUUlVFHGNvbmYudXNlX2h0dHBzCkZBTFNFLGNvbmYu
dmlkZW9fZGVmYXVsdF90dGwELTEoY29uZi52aWRlb19tYXhfY291bnQELTEyY29uZi52aWRlb190
ZW1wX2RpcmVjdG9yeSwvdG1wL2dwdWRiLXRlbXAtdmlkZW9zImNvbmYud2FsLmNoZWNrc3VtCFRS
VUUwY29uZi53YWwuZmx1c2hfZnJlcXVlbmN5BDYwMmNvbmYud2FsLm1heF9zZWdtZW50X3NpemUS
NTAwMDAwMDAwLGNvbmYud2FsLnNlZ21lbnRfY291bnQELTEoY29uZi53YWwuc3luY19wb2xpY3kK
Zmx1c2g2Y29uZi53b3JrZXJfaHR0cF9zZXJ2ZXJfaXBz6AExNzIuMzEuMzMuMzA7MTcyLjMxLjMz
LjMwOzE3Mi4zMS4zMy4zMTsxNzIuMzEuMzMuMzI7MTcyLjMxLjMzLjMzOzE3Mi4zMS4zMy4zNDsx
NzIuMzEuMzMuMzU7MTcyLjMxLjMzLjM2OzE3Mi4zMS4zMy4zNzpjb25mLndvcmtlcl9odHRwX3Nl
cnZlcl9wb3J0c1g5MTkxOzkxOTI7OTE5Mzs5MTk0OzkxOTU7OTE5Njs5MTk3OzkxOTg7OTE5OThj
b25mLndvcmtlcl9odHRwX3NlcnZlcl91cmxzwANodHRwOi8vMTcyLjMxLjMzLjMwOjkxOTE7aHR0
cDovLzE3Mi4zMS4zMy4zMDo5MTkyO2h0dHA6Ly8xNzIuMzEuMzMuMzE6OTE5MztodHRwOi8vMTcy
LjMxLjMzLjMyOjkxOTQ7aHR0cDovLzE3Mi4zMS4zMy4zMzo5MTk1O2h0dHA6Ly8xNzIuMzEuMzMu
MzQ6OTE5NjtodHRwOi8vMTcyLjMxLjMzLjM1OjkxOTc7aHR0cDovLzE3Mi4zMS4zMy4zNjo5MTk4
O2h0dHA6Ly8xNzIuMzEuMzMuMzc6OTE5OUhjb25mLndvcmtlcl9odHRwX3NlcnZlcl91cmxzX3By
aXZhdGXAA2h0dHA6Ly8xNzIuMzEuMzMuMzA6OTE5MTtodHRwOi8vMTcyLjMxLjMzLjMwOjkxOTI7
aHR0cDovLzE3Mi4zMS4zMy4zMTo5MTkzO2h0dHA6Ly8xNzIuMzEuMzMuMzI6OTE5NDtodHRwOi8v
MTcyLjMxLjMzLjMzOjkxOTU7aHR0cDovLzE3Mi4zMS4zMy4zNDo5MTk2O2h0dHA6Ly8xNzIuMzEu
MzMuMzU6OTE5NztodHRwOi8vMTcyLjMxLjMzLjM2OjkxOTg7aHR0cDovLzE3Mi4zMS4zMy4zNzo5
MTk5KHN5c3RlbS5mb250X2ZhbWlsaWVzpgFEZWphVnUgTWF0aCBUZVggR3lyZSxEZWphVnUgU2Fu
cyBNb25vLERlamFWdSBTYW5zLERlamFWdSBTZXJpZixTYW5zLFNlcmlmLE1vbm9zcGFjZTB2ZXJz
aW9uLmdwdWRiX2J1aWxkX2RhdGUoRmViIDE0IDIwMjQgMjM6NDk6MDFAdmVyc2lvbi5ncHVkYl9j
b21wdXRlX2NhcGFiaWxpdHkWNjA7NzA7ODA7ODY4dmVyc2lvbi5ncHVkYl9jb21wdXRlX2VuZ2lu
ZQhDVURBPnZlcnNpb24uZ3B1ZGJfY29yZV9saWJzX3ZlcnNpb24UMjAyNDAyMTMwMDR2ZXJzaW9u
LmdwdWRiX2NvcmVfdmVyc2lvbiw3LjIuMC4xLjIwMjQwMjE0MjEwOTA2NHZlcnNpb24uZ3B1ZGJf
ZmlsZV92ZXJzaW9uFDIwMjEwMzExMjAqdmVyc2lvbi5ncHVkYl92ZXJzaW9uUDkyMjYwYTMyOWNh
NDVjYjBlMzc3NzZjZjkxNDQ5NzE3OWY2MjExNDM0dmVyc2lvbi5ncHVkYl92ZXJzaW9uX2RhdGUy
MjAyNC0wMi0xNCAyMTowOTowNiAtMDUwMCx2ZXJzaW9uLnB5dGhvbl92ZXJzaW9uDjMuMTAuMTMA
AAA=
headers:
Access-Control-Allow-Origin:
- '*'
Access-Control-Expose-Headers:
- x-request-time-secs
Connection:
- Close
Content-Type:
- application/octet-stream
Date:
- Thu, 22 Feb 2024 00:26:36 GMT
Strict-Transport-Security:
- max-age=31536000; includeSubDomains
X-Content-Type-Options:
- nosniff
X-Frame-Options:
- SAMEORIGIN
X-XSS-Protection:
- 1; mode=block
x-request-time-secs:
- '0.00063'
status:
code: 200
message: OK
version: 1
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/cassettes/TestChatKinetica.test_generate.yaml | interactions:
- request:
body: "\0"
headers:
Accept:
- application/octet-stream
Content-type:
- application/octet-stream
authorization:
- DUMMY
method: POST
uri: http://172.31.33.30:9191/show/system/status
response:
body:
string: !!binary |
BE9LADZzaG93X3N5c3RlbV9zdGF0dXNfcmVzcG9uc2X8ugEUCmdyYXBovAl7ImNvdW50Ijo4LCJz
dGF0dXMiOlt7InZlcnNpb24iOjM5LCJzZXJ2ZXJfaWQiOjAsImhvc3RfaWQiOiIxNzIuMzEuMzMu
MzAiLCJzdGF0dXMiOiJydW5uaW5nIn0seyJ2ZXJzaW9uIjozOSwic2VydmVyX2lkIjoxLCJob3N0
X2lkIjoiMTcyLjMxLjMzLjMxIiwic3RhdHVzIjoicnVubmluZyJ9LHsidmVyc2lvbiI6MzksInNl
cnZlcl9pZCI6MiwiaG9zdF9pZCI6IjE3Mi4zMS4zMy4zMiIsInN0YXR1cyI6InJ1bm5pbmcifSx7
InZlcnNpb24iOjM5LCJzZXJ2ZXJfaWQiOjMsImhvc3RfaWQiOiIxNzIuMzEuMzMuMzMiLCJzdGF0
dXMiOiJydW5uaW5nIn0seyJ2ZXJzaW9uIjozOSwic2VydmVyX2lkIjo0LCJob3N0X2lkIjoiMTcy
LjMxLjMzLjM0Iiwic3RhdHVzIjoicnVubmluZyJ9LHsidmVyc2lvbiI6MzksInNlcnZlcl9pZCI6
NSwiaG9zdF9pZCI6IjE3Mi4zMS4zMy4zNSIsInN0YXR1cyI6InJ1bm5pbmcifSx7InZlcnNpb24i
OjM5LCJzZXJ2ZXJfaWQiOjYsImhvc3RfaWQiOiIxNzIuMzEuMzMuMzYiLCJzdGF0dXMiOiJydW5u
aW5nIn0seyJ2ZXJzaW9uIjozOSwic2VydmVyX2lkIjo3LCJob3N0X2lkIjoiMTcyLjMxLjMzLjM3
Iiwic3RhdHVzIjoicnVubmluZyJ9XX0eaGFfY2x1c3Rlcl9pbmZvzAN7ImhhX3JhbmtzX2luZm8i
Olt7InByaXZhdGVfdXJsIjoiaHR0cDovLzE3Mi4zMS4zMy4zMDo5MTkxIiwicHVibGljX3VybCI6
Imh0dHA6Ly8xNzIuMzEuMzMuMzA6OTE5MSIsImhhX3VybF9vdmVycmlkZSI6Imh0dHA6Ly8xNzIu
MzEuMzMuMzA6OTE5MSIsImFsdGVybmF0ZV91cmxzIjpbImh0dHA6Ly8xNzIuMzEuMzMuMzA6OTE5
MSJdfV0sInJlYWR5Ijp0cnVlLCJuYW1lIjoicHJvZHVjdGlvbi1uZXcifQpob3N0c/hLeyJjb3Vu
dCI6OCwic3RhdHVzIjpbeyJ2ZXJzaW9uIjoxMzMsImhvc3RfbnVtYmVyIjowLCJpZCI6IjE3Mi4z
MS4zMy4zMCIsImhvc3RuYW1lIjoiMzAwLTMwMy11MzAtdjEwMCIsInN0YXR1cyI6InJ1bm5pbmci
LCJodHRwZF9zdGF0dXMiOiJkaXNhYmxlZCIsInN0YXRzX3N0YXR1cyI6InJ1bm5pbmciLCJtbF9z
dGF0dXMiOiJkaXNhYmxlZCIsInF1ZXJ5X3BsYW5uZXJfc3RhdHVzIjoicnVubmluZyIsInJldmVh
bF9zdGF0dXMiOiJydW5uaW5nIiwidG90YWxfbWVtb3J5Ijo4MTAyMDEyNjAwMzIsInN0YXJ0X3Rp
bWUiOjE3MDgxMDM0NTIsIm5ldHdvcmtfaXBzIjpbIjE3Mi4zMS4zMy4zMCJdLCJncHVfaWRzIjpb
IkdQVS1hMDdjMGU0OC05NDhhLTQwNWMtNDNiNy03Mzc0ZDJkZGMwOGMiLCJHUFUtNTNlMWI3YjQt
NjhiOC1hMjhjLTEwZjMtZGI2YTA1OTdmYmI0IiwiR1BVLTQxZmY5MWZiLWVjYjktMGE1Yi1kNDdj
LWQ1YmI3ZWYxMDM4YSIsIkdQVS1hOTQwYjEyNi1iMWE0LTlmMDctNDRlMS02MzZjMzc5ODllY2Yi
XSwiYWNjZXB0c19mYWlsb3ZlciI6ImZhbHNlIiwiaG9zdF9yb2xlIjoibGVhZGVyIiwiaG9zdF90
ZXJtIjowLCJob3N0X2VsZWN0aW9uX3N0YXR1cyI6ImxlYWRlcl9lbGVjdGVkIn0seyJ2ZXJzaW9u
IjoxMTMsImhvc3RfbnVtYmVyIjoxLCJpZCI6IjE3Mi4zMS4zMy4zMSIsImhvc3RuYW1lIjoiMzAw
LTMwMy11MzEtdjEwMCIsInN0YXR1cyI6InJ1bm5pbmciLCJodHRwZF9zdGF0dXMiOiJkaXNhYmxl
ZCIsInN0YXRzX3N0YXR1cyI6InJ1bm5pbmciLCJtbF9zdGF0dXMiOiJkaXNhYmxlZCIsInF1ZXJ5
X3BsYW5uZXJfc3RhdHVzIjoic3RvcHBlZCIsInJldmVhbF9zdGF0dXMiOiJzdG9wcGVkIiwidG90
YWxfbWVtb3J5Ijo4MTAyMDEyMzU0NTYsInN0YXJ0X3RpbWUiOjE3MDgxMDM0NTIsIm5ldHdvcmtf
aXBzIjpbIjE3Mi4zMS4zMy4zMSJdLCJncHVfaWRzIjpbIkdQVS0xMzJmMWRkYi1hNWY0LTIwZDMt
MTEyMi02ZDM1OTlhZmNmMWQiLCJHUFUtM2JjN2JmY2QtOTVhOS1hNjI2LThlOGYtMzdiOTcxOWFh
OWRkIiwiR1BVLWZlMTBhZDNkLTg4M2QtYTU5MC1kNDA1LWUwYTU2OTNiMGFmMCIsIkdQVS02Yjgy
OTY5OS0wYjRjLTEyZjAtMGMyOC04Y2Y0NmMyNGMxODUiXSwiYWNjZXB0c19mYWlsb3ZlciI6ImZh
bHNlIiwiaG9zdF9yb2xlIjoiZm9sbG93ZXIiLCJob3N0X3Rlcm0iOjAsImhvc3RfZWxlY3Rpb25f
c3RhdHVzIjoibGVhZGVyX2VsZWN0ZWQifSx7InZlcnNpb24iOjExMywiaG9zdF9udW1iZXIiOjIs
ImlkIjoiMTcyLjMxLjMzLjMyIiwiaG9zdG5hbWUiOiIzMDAtMzAzLXUzMi12MTAwIiwic3RhdHVz
IjoicnVubmluZyIsImh0dHBkX3N0YXR1cyI6ImRpc2FibGVkIiwic3RhdHNfc3RhdHVzIjoicnVu
bmluZyIsIm1sX3N0YXR1cyI6ImRpc2FibGVkIiwicXVlcnlfcGxhbm5lcl9zdGF0dXMiOiJzdG9w
cGVkIiwicmV2ZWFsX3N0YXR1cyI6InN0b3BwZWQiLCJ0b3RhbF9tZW1vcnkiOjgxMDIwMTIzMTM2
MCwic3RhcnRfdGltZSI6MTcwODEwMzQ1MiwibmV0d29ya19pcHMiOlsiMTcyLjMxLjMzLjMyIl0s
ImdwdV9pZHMiOlsiR1BVLTA5MGM0NDMwLWRiYmUtMWUxYS03ZjdmLWExODI3ODNhZDIzMSIsIkdQ
VS1iZTE0YjVjZS1iNDExLTQ4Y2EtYTlmZi01YTA2YzdhNmYzOTgiLCJHUFUtNGIxYTU2ODgtMGU4
Yy1jYzk0LTgzM2ItYzJmMzllOTk1M2I4IiwiR1BVLTEyZWJhNDYzLTgzMmUtMTA4Yi1lY2IyLTVj
OWFmOGRhNjE2NCJdLCJhY2NlcHRzX2ZhaWxvdmVyIjoiZmFsc2UiLCJob3N0X3JvbGUiOiJmb2xs
b3dlciIsImhvc3RfdGVybSI6MCwiaG9zdF9lbGVjdGlvbl9zdGF0dXMiOiJsZWFkZXJfZWxlY3Rl
ZCJ9LHsidmVyc2lvbiI6MTEzLCJob3N0X251bWJlciI6MywiaWQiOiIxNzIuMzEuMzMuMzMiLCJo
b3N0bmFtZSI6IjMwMC0zMDMtdTMzLXYxMDAiLCJzdGF0dXMiOiJydW5uaW5nIiwiaHR0cGRfc3Rh
dHVzIjoiZGlzYWJsZWQiLCJzdGF0c19zdGF0dXMiOiJydW5uaW5nIiwibWxfc3RhdHVzIjoiZGlz
YWJsZWQiLCJxdWVyeV9wbGFubmVyX3N0YXR1cyI6InN0b3BwZWQiLCJyZXZlYWxfc3RhdHVzIjoi
c3RvcHBlZCIsInRvdGFsX21lbW9yeSI6ODEwMjAxMjM5NTUyLCJzdGFydF90aW1lIjoxNzA4MTAz
NDUyLCJuZXR3b3JrX2lwcyI6WyIxNzIuMzEuMzMuMzMiXSwiZ3B1X2lkcyI6WyJHUFUtMjYzMzFh
MDctMTc1Ni1mMDY2LTFlNWEtMzc1M2Y1ZTViYzc4IiwiR1BVLWE0MWFhMTg3LTQ1NmQtNjBiMy04
ZmM5LWI4YjMzZWFlMjFiYyIsIkdQVS01NWRlZjYxOS0wMTE2LWViZjctMzMwMy03ZDkzMmRmYzcw
ZmYiLCJHUFUtNGM5YWYzODgtYjlmYi03MWQ5LWZiNDUtODMwYTM4MTIwMGQzIl0sImFjY2VwdHNf
ZmFpbG92ZXIiOiJmYWxzZSIsImhvc3Rfcm9sZSI6ImZvbGxvd2VyIiwiaG9zdF90ZXJtIjowLCJo
b3N0X2VsZWN0aW9uX3N0YXR1cyI6ImxlYWRlcl9lbGVjdGVkIn0seyJ2ZXJzaW9uIjoxMTMsImhv
c3RfbnVtYmVyIjo0LCJpZCI6IjE3Mi4zMS4zMy4zNCIsImhvc3RuYW1lIjoiMzAwLTMwMy11MzQt
djEwMCIsInN0YXR1cyI6InJ1bm5pbmciLCJodHRwZF9zdGF0dXMiOiJkaXNhYmxlZCIsInN0YXRz
X3N0YXR1cyI6InJ1bm5pbmciLCJtbF9zdGF0dXMiOiJkaXNhYmxlZCIsInF1ZXJ5X3BsYW5uZXJf
c3RhdHVzIjoic3RvcHBlZCIsInJldmVhbF9zdGF0dXMiOiJzdG9wcGVkIiwidG90YWxfbWVtb3J5
Ijo4MTAyMDEyMTA4ODAsInN0YXJ0X3RpbWUiOjE3MDgxMDM0NTIsIm5ldHdvcmtfaXBzIjpbIjE3
Mi4zMS4zMy4zNCJdLCJncHVfaWRzIjpbIkdQVS02OGUwOWNmYy1mOWE0LTJhMTQtNTdhNC05NDgz
YjkxYzJkOWEiLCJHUFUtM2RmMjE2ZTgtZmU3NC0wNDdhLTk1YWMtNzJlMmNiZWNiNTIyIiwiR1BV
LTE0ZDQ0Yjk4LWIwNDItY2I4MS0xZGQzLTIwZDRmNjljODljYSIsIkdQVS0zNDA2NzMwYi1iZWFk
LWM1MGEtNDZlYi1lMGEyYzJiZjZlNzYiXSwiYWNjZXB0c19mYWlsb3ZlciI6ImZhbHNlIiwiaG9z
dF9yb2xlIjoiZm9sbG93ZXIiLCJob3N0X3Rlcm0iOjAsImhvc3RfZWxlY3Rpb25fc3RhdHVzIjoi
bGVhZGVyX2VsZWN0ZWQifSx7InZlcnNpb24iOjExMywiaG9zdF9udW1iZXIiOjUsImlkIjoiMTcy
LjMxLjMzLjM1IiwiaG9zdG5hbWUiOiIzMDAtMzAzLXUzNS12MTAwIiwic3RhdHVzIjoicnVubmlu
ZyIsImh0dHBkX3N0YXR1cyI6ImRpc2FibGVkIiwic3RhdHNfc3RhdHVzIjoicnVubmluZyIsIm1s
X3N0YXR1cyI6ImRpc2FibGVkIiwicXVlcnlfcGxhbm5lcl9zdGF0dXMiOiJzdG9wcGVkIiwicmV2
ZWFsX3N0YXR1cyI6InN0b3BwZWQiLCJ0b3RhbF9tZW1vcnkiOjgxMDIwMTI0Nzc0NCwic3RhcnRf
dGltZSI6MTcwODEwMzQ1MiwibmV0d29ya19pcHMiOlsiMTcyLjMxLjMzLjM1Il0sImdwdV9pZHMi
OlsiR1BVLWY3MzAyYWJjLTllYWEtMTRjOS1mNDI2LTE2M2RmM2RhOGMyNiIsIkdQVS03N2RkN2Q0
OC1mOTgwLWZkMDYtNzIyYy0xYzViOTMyMTgyMDMiLCJHUFUtNTUyYjIwYTUtNTdlNi00OTg2LWJl
MmItMmIzNzhmZDRiY2FhIiwiR1BVLTYzMDUzMTYyLTMwN2YtNTVjNS1hOTc0LTU4ZGZlODQzNDJi
MiJdLCJhY2NlcHRzX2ZhaWxvdmVyIjoiZmFsc2UiLCJob3N0X3JvbGUiOiJmb2xsb3dlciIsImhv
c3RfdGVybSI6MCwiaG9zdF9lbGVjdGlvbl9zdGF0dXMiOiJsZWFkZXJfZWxlY3RlZCJ9LHsidmVy
c2lvbiI6MTEzLCJob3N0X251bWJlciI6NiwiaWQiOiIxNzIuMzEuMzMuMzYiLCJob3N0bmFtZSI6
IjMwMC0zMDMtdTM2LXYxMDAiLCJzdGF0dXMiOiJydW5uaW5nIiwiaHR0cGRfc3RhdHVzIjoiZGlz
YWJsZWQiLCJzdGF0c19zdGF0dXMiOiJydW5uaW5nIiwibWxfc3RhdHVzIjoiZGlzYWJsZWQiLCJx
dWVyeV9wbGFubmVyX3N0YXR1cyI6InN0b3BwZWQiLCJyZXZlYWxfc3RhdHVzIjoic3RvcHBlZCIs
InRvdGFsX21lbW9yeSI6ODEwMjAxMjIzMTY4LCJzdGFydF90aW1lIjoxNzA4MTAzNDUyLCJuZXR3
b3JrX2lwcyI6WyIxNzIuMzEuMzMuMzYiXSwiZ3B1X2lkcyI6WyJHUFUtOGU5ZDRhZGItYzYxMS04
MmYwLTNmZTQtZjFmMmMzZTZhNDRmIiwiR1BVLTRjNmFiYWM5LTc0ZWMtZjc4Yy1mYWE5LTQ3NDZk
MDA1N2FiOCIsIkdQVS05NjI1YTc4Yy1lMmFlLTdmMDktOWNjZi1lZTA0OTk1MjYzMTAiLCJHUFUt
YWI3NTRhM2MtNjA4Ni1iYjUxLWU5NGEtM2NmNGExNDkwNWJhIl0sImFjY2VwdHNfZmFpbG92ZXIi
OiJmYWxzZSIsImhvc3Rfcm9sZSI6ImZvbGxvd2VyIiwiaG9zdF90ZXJtIjowLCJob3N0X2VsZWN0
aW9uX3N0YXR1cyI6ImxlYWRlcl9lbGVjdGVkIn0seyJ2ZXJzaW9uIjoxMTMsImhvc3RfbnVtYmVy
Ijo3LCJpZCI6IjE3Mi4zMS4zMy4zNyIsImhvc3RuYW1lIjoiMzAwLTMwMy11MzctdjEwMCIsInN0
YXR1cyI6InJ1bm5pbmciLCJodHRwZF9zdGF0dXMiOiJkaXNhYmxlZCIsInN0YXRzX3N0YXR1cyI6
InJ1bm5pbmciLCJtbF9zdGF0dXMiOiJkaXNhYmxlZCIsInF1ZXJ5X3BsYW5uZXJfc3RhdHVzIjoi
c3RvcHBlZCIsInJldmVhbF9zdGF0dXMiOiJzdG9wcGVkIiwidG90YWxfbWVtb3J5Ijo4MTAyMDEy
MzU0NTYsInN0YXJ0X3RpbWUiOjE3MDgxMDM0NTIsIm5ldHdvcmtfaXBzIjpbIjE3Mi4zMS4zMy4z
NyJdLCJncHVfaWRzIjpbIkdQVS1lYTE4ZDM4OC1lMmIzLTQyODMtYTZiNS1hMzIzYTQ4NzI1YTki
LCJHUFUtZGNkODRlZmItOTRjYS1iNDk3LThjMTUtN2EzMjY5NDBjMWViIiwiR1BVLTkxNjAxYTVl
LTk3M2YtZDFlNC02ZTFkLWY2NTUyOTRkMzQ2MCIsIkdQVS0wN2ZhNGRiNi05ZTU1LWI2MWYtOTA5
YS04NWM0ZDFiZWIwODgiXSwiYWNjZXB0c19mYWlsb3ZlciI6ImZhbHNlIiwiaG9zdF9yb2xlIjoi
Zm9sbG93ZXIiLCJob3N0X3Rlcm0iOjAsImhvc3RfZWxlY3Rpb25fc3RhdHVzIjoibGVhZGVyX2Vs
ZWN0ZWQifV19Fmh0dHBfc2VydmVyoAN7ImNvbm5lY3Rpb25zIjp7ImN1cnJlbnQiOjEsIm1heF9j
b25jdXJyZW50IjoxMTYsInF1ZXVlZCI6MCwibWF4X3F1ZXVlZF9hbGxvd2VkIjo2NTUzNiwidG90
YWwiOjMyNTg5OCwicmVmdXNlZCI6MCwidGhyZWFkcyI6Mn0sInRocmVhZHMiOnsidXNlZCI6Miwi
Y2FwYWNpdHkiOjUxMiwiYWxsb2NhdGVkIjo4LCJhdmFpbGFibGUiOjUxMCwic3RhY2tfc2l6ZSI6
MH19FG1pZ3JhdGlvbnMueyJjb3VudCI6MCwic3RhdHVzIjpbXX0KcmFua3PgUnsiY291bnQiOjks
InN0YXR1cyI6W3sidmVyc2lvbiI6MTE4LCJyYW5rIjowLCJyYW5rX2lkIjoiMCA6IDE3Mi4zMS4z
My4zMCA6IDMyNzc0MDYiLCJyYW5rX21vZGUiOiJydW4iLCJyYW5rX3N0YXR1cyI6InJ1bm5pbmci
LCJyZW1vdmFsX3N0YXR1cyI6Im5vbmUiLCJhcHBfdmVyc2lvbiI6IjcuMi4wLjEuMjAyNDAyMTQy
MTA5MDYiLCJwaWQiOjMyNzc0MDYsInN0YXJ0X3RpbWUiOjE3MDg0NDM2NTYsInN0YXJ0X3RpbWVf
c3RyIjoiVHVlIEZlYiAyMCAxNTo0MDo1NiAyMDI0Iiwic3RhcnRfY291bnQiOjEsImFjY2VwdGlu
Z19qb2JzIjp0cnVlLCJuZXdfcmFuayI6ZmFsc2UsInJlYWRfb25seSI6ZmFsc2UsImhvc3RuYW1l
IjoiMzAwLTMwMy11MzAtdjEwMCIsImhvc3RfaWQiOiIxNzIuMzEuMzMuMzAiLCJncHVfaWRzIjpb
IkdQVS1hMDdjMGU0OC05NDhhLTQwNWMtNDNiNy03Mzc0ZDJkZGMwOGMiXSwiZ3B1X2luZGV4Ijow
LCJncHVfaW5kaWNlcyI6WzBdfSx7InZlcnNpb24iOjEwMiwicmFuayI6MSwicmFua19pZCI6IjEg
OiAxNzIuMzEuMzMuMzAgOiAzMjc4NTYwIiwicmFua19tb2RlIjoicnVuIiwicmFua19zdGF0dXMi
OiJydW5uaW5nIiwicmVtb3ZhbF9zdGF0dXMiOiJub25lIiwiYXBwX3ZlcnNpb24iOiI3LjIuMC4x
LjIwMjQwMjE0MjEwOTA2IiwicGlkIjozMjc4NTYwLCJzdGFydF90aW1lIjoxNzA4NDQzNjU2LCJz
dGFydF90aW1lX3N0ciI6IlR1ZSBGZWIgMjAgMTU6NDA6NTYgMjAyNCIsInN0YXJ0X2NvdW50Ijox
LCJhY2NlcHRpbmdfam9icyI6dHJ1ZSwibmV3X3JhbmsiOmZhbHNlLCJyZWFkX29ubHkiOmZhbHNl
LCJob3N0bmFtZSI6IjMwMC0zMDMtdTMwLXYxMDAiLCJob3N0X2lkIjoiMTcyLjMxLjMzLjMwIiwi
Z3B1X2lkcyI6WyJHUFUtYTA3YzBlNDgtOTQ4YS00MDVjLTQzYjctNzM3NGQyZGRjMDhjIiwiR1BV
LTUzZTFiN2I0LTY4YjgtYTI4Yy0xMGYzLWRiNmEwNTk3ZmJiNCIsIkdQVS00MWZmOTFmYi1lY2I5
LTBhNWItZDQ3Yy1kNWJiN2VmMTAzOGEiLCJHUFUtYTk0MGIxMjYtYjFhNC05ZjA3LTQ0ZTEtNjM2
YzM3OTg5ZWNmIl0sImdwdV9pbmRleCI6MCwiZ3B1X2luZGljZXMiOlswLDEsMiwzXX0seyJ2ZXJz
aW9uIjoxMDIsInJhbmsiOjIsInJhbmtfaWQiOiIyIDogMTcyLjMxLjMzLjMxIDogMTU0NDUxOSIs
InJhbmtfbW9kZSI6InJ1biIsInJhbmtfc3RhdHVzIjoicnVubmluZyIsInJlbW92YWxfc3RhdHVz
Ijoibm9uZSIsImFwcF92ZXJzaW9uIjoiNy4yLjAuMS4yMDI0MDIxNDIxMDkwNiIsInBpZCI6MTU0
NDUxOSwic3RhcnRfdGltZSI6MTcwODQ0MzY1Niwic3RhcnRfdGltZV9zdHIiOiJUdWUgRmViIDIw
IDE1OjQwOjU2IDIwMjQiLCJzdGFydF9jb3VudCI6MSwiYWNjZXB0aW5nX2pvYnMiOnRydWUsIm5l
d19yYW5rIjpmYWxzZSwicmVhZF9vbmx5IjpmYWxzZSwiaG9zdG5hbWUiOiIzMDAtMzAzLXUzMS12
MTAwIiwiaG9zdF9pZCI6IjE3Mi4zMS4zMy4zMSIsImdwdV9pZHMiOlsiR1BVLTEzMmYxZGRiLWE1
ZjQtMjBkMy0xMTIyLTZkMzU5OWFmY2YxZCIsIkdQVS0zYmM3YmZjZC05NWE5LWE2MjYtOGU4Zi0z
N2I5NzE5YWE5ZGQiLCJHUFUtZmUxMGFkM2QtODgzZC1hNTkwLWQ0MDUtZTBhNTY5M2IwYWYwIiwi
R1BVLTZiODI5Njk5LTBiNGMtMTJmMC0wYzI4LThjZjQ2YzI0YzE4NSJdLCJncHVfaW5kZXgiOjAs
ImdwdV9pbmRpY2VzIjpbMCwxLDIsM119LHsidmVyc2lvbiI6MTAwLCJyYW5rIjozLCJyYW5rX2lk
IjoiMyA6IDE3Mi4zMS4zMy4zMiA6IDE0Mjk2ODUiLCJyYW5rX21vZGUiOiJydW4iLCJyYW5rX3N0
YXR1cyI6InJ1bm5pbmciLCJyZW1vdmFsX3N0YXR1cyI6Im5vbmUiLCJhcHBfdmVyc2lvbiI6Ijcu
Mi4wLjEuMjAyNDAyMTQyMTA5MDYiLCJwaWQiOjE0Mjk2ODUsInN0YXJ0X3RpbWUiOjE3MDg0NDM2
NTYsInN0YXJ0X3RpbWVfc3RyIjoiVHVlIEZlYiAyMCAxNTo0MDo1NiAyMDI0Iiwic3RhcnRfY291
bnQiOjEsImFjY2VwdGluZ19qb2JzIjp0cnVlLCJuZXdfcmFuayI6ZmFsc2UsInJlYWRfb25seSI6
ZmFsc2UsImhvc3RuYW1lIjoiMzAwLTMwMy11MzItdjEwMCIsImhvc3RfaWQiOiIxNzIuMzEuMzMu
MzIiLCJncHVfaWRzIjpbIkdQVS0wOTBjNDQzMC1kYmJlLTFlMWEtN2Y3Zi1hMTgyNzgzYWQyMzEi
LCJHUFUtYmUxNGI1Y2UtYjQxMS00OGNhLWE5ZmYtNWEwNmM3YTZmMzk4IiwiR1BVLTRiMWE1Njg4
LTBlOGMtY2M5NC04MzNiLWMyZjM5ZTk5NTNiOCIsIkdQVS0xMmViYTQ2My04MzJlLTEwOGItZWNi
Mi01YzlhZjhkYTYxNjQiXSwiZ3B1X2luZGV4IjowLCJncHVfaW5kaWNlcyI6WzAsMSwyLDNdfSx7
InZlcnNpb24iOjExNiwicmFuayI6NCwicmFua19pZCI6IjQgOiAxNzIuMzEuMzMuMzMgOiAxNjA4
NzYxIiwicmFua19tb2RlIjoicnVuIiwicmFua19zdGF0dXMiOiJydW5uaW5nIiwicmVtb3ZhbF9z
dGF0dXMiOiJub25lIiwiYXBwX3ZlcnNpb24iOiI3LjIuMC4xLjIwMjQwMjE0MjEwOTA2IiwicGlk
IjoxNjA4NzYxLCJzdGFydF90aW1lIjoxNzA4NDQzNjU1LCJzdGFydF90aW1lX3N0ciI6IlR1ZSBG
ZWIgMjAgMTU6NDA6NTUgMjAyNCIsInN0YXJ0X2NvdW50IjoxLCJhY2NlcHRpbmdfam9icyI6dHJ1
ZSwibmV3X3JhbmsiOmZhbHNlLCJyZWFkX29ubHkiOmZhbHNlLCJob3N0bmFtZSI6IjMwMC0zMDMt
dTMzLXYxMDAiLCJob3N0X2lkIjoiMTcyLjMxLjMzLjMzIiwiZ3B1X2lkcyI6WyJHUFUtMjYzMzFh
MDctMTc1Ni1mMDY2LTFlNWEtMzc1M2Y1ZTViYzc4IiwiR1BVLWE0MWFhMTg3LTQ1NmQtNjBiMy04
ZmM5LWI4YjMzZWFlMjFiYyIsIkdQVS01NWRlZjYxOS0wMTE2LWViZjctMzMwMy03ZDkzMmRmYzcw
ZmYiLCJHUFUtNGM5YWYzODgtYjlmYi03MWQ5LWZiNDUtODMwYTM4MTIwMGQzIl0sImdwdV9pbmRl
eCI6MCwiZ3B1X2luZGljZXMiOlswLDEsMiwzXX0seyJ2ZXJzaW9uIjoxMDIsInJhbmsiOjUsInJh
bmtfaWQiOiI1IDogMTcyLjMxLjMzLjM0IDogMTY2MDEwMiIsInJhbmtfbW9kZSI6InJ1biIsInJh
bmtfc3RhdHVzIjoicnVubmluZyIsInJlbW92YWxfc3RhdHVzIjoibm9uZSIsImFwcF92ZXJzaW9u
IjoiNy4yLjAuMS4yMDI0MDIxNDIxMDkwNiIsInBpZCI6MTY2MDEwMiwic3RhcnRfdGltZSI6MTcw
ODQ0MzY1Niwic3RhcnRfdGltZV9zdHIiOiJUdWUgRmViIDIwIDE1OjQwOjU2IDIwMjQiLCJzdGFy
dF9jb3VudCI6MSwiYWNjZXB0aW5nX2pvYnMiOnRydWUsIm5ld19yYW5rIjpmYWxzZSwicmVhZF9v
bmx5IjpmYWxzZSwiaG9zdG5hbWUiOiIzMDAtMzAzLXUzNC12MTAwIiwiaG9zdF9pZCI6IjE3Mi4z
MS4zMy4zNCIsImdwdV9pZHMiOlsiR1BVLTY4ZTA5Y2ZjLWY5YTQtMmExNC01N2E0LTk0ODNiOTFj
MmQ5YSIsIkdQVS0zZGYyMTZlOC1mZTc0LTA0N2EtOTVhYy03MmUyY2JlY2I1MjIiLCJHUFUtMTRk
NDRiOTgtYjA0Mi1jYjgxLTFkZDMtMjBkNGY2OWM4OWNhIiwiR1BVLTM0MDY3MzBiLWJlYWQtYzUw
YS00NmViLWUwYTJjMmJmNmU3NiJdLCJncHVfaW5kZXgiOjAsImdwdV9pbmRpY2VzIjpbMCwxLDIs
M119LHsidmVyc2lvbiI6MTAyLCJyYW5rIjo2LCJyYW5rX2lkIjoiNiA6IDE3Mi4zMS4zMy4zNSA6
IDEzNzk3MTUiLCJyYW5rX21vZGUiOiJydW4iLCJyYW5rX3N0YXR1cyI6InJ1bm5pbmciLCJyZW1v
dmFsX3N0YXR1cyI6Im5vbmUiLCJhcHBfdmVyc2lvbiI6IjcuMi4wLjEuMjAyNDAyMTQyMTA5MDYi
LCJwaWQiOjEzNzk3MTUsInN0YXJ0X3RpbWUiOjE3MDg0NDM2NTYsInN0YXJ0X3RpbWVfc3RyIjoi
VHVlIEZlYiAyMCAxNTo0MDo1NiAyMDI0Iiwic3RhcnRfY291bnQiOjEsImFjY2VwdGluZ19qb2Jz
Ijp0cnVlLCJuZXdfcmFuayI6ZmFsc2UsInJlYWRfb25seSI6ZmFsc2UsImhvc3RuYW1lIjoiMzAw
LTMwMy11MzUtdjEwMCIsImhvc3RfaWQiOiIxNzIuMzEuMzMuMzUiLCJncHVfaWRzIjpbIkdQVS1m
NzMwMmFiYy05ZWFhLTE0YzktZjQyNi0xNjNkZjNkYThjMjYiLCJHUFUtNzdkZDdkNDgtZjk4MC1m
ZDA2LTcyMmMtMWM1YjkzMjE4MjAzIiwiR1BVLTU1MmIyMGE1LTU3ZTYtNDk4Ni1iZTJiLTJiMzc4
ZmQ0YmNhYSIsIkdQVS02MzA1MzE2Mi0zMDdmLTU1YzUtYTk3NC01OGRmZTg0MzQyYjIiXSwiZ3B1
X2luZGV4IjowLCJncHVfaW5kaWNlcyI6WzAsMSwyLDNdfSx7InZlcnNpb24iOjEwMiwicmFuayI6
NywicmFua19pZCI6IjcgOiAxNzIuMzEuMzMuMzYgOiAxMzc5NTM5IiwicmFua19tb2RlIjoicnVu
IiwicmFua19zdGF0dXMiOiJydW5uaW5nIiwicmVtb3ZhbF9zdGF0dXMiOiJub25lIiwiYXBwX3Zl
cnNpb24iOiI3LjIuMC4xLjIwMjQwMjE0MjEwOTA2IiwicGlkIjoxMzc5NTM5LCJzdGFydF90aW1l
IjoxNzA4NDQzNjU2LCJzdGFydF90aW1lX3N0ciI6IlR1ZSBGZWIgMjAgMTU6NDA6NTYgMjAyNCIs
InN0YXJ0X2NvdW50IjoxLCJhY2NlcHRpbmdfam9icyI6dHJ1ZSwibmV3X3JhbmsiOmZhbHNlLCJy
ZWFkX29ubHkiOmZhbHNlLCJob3N0bmFtZSI6IjMwMC0zMDMtdTM2LXYxMDAiLCJob3N0X2lkIjoi
MTcyLjMxLjMzLjM2IiwiZ3B1X2lkcyI6WyJHUFUtOGU5ZDRhZGItYzYxMS04MmYwLTNmZTQtZjFm
MmMzZTZhNDRmIiwiR1BVLTRjNmFiYWM5LTc0ZWMtZjc4Yy1mYWE5LTQ3NDZkMDA1N2FiOCIsIkdQ
VS05NjI1YTc4Yy1lMmFlLTdmMDktOWNjZi1lZTA0OTk1MjYzMTAiLCJHUFUtYWI3NTRhM2MtNjA4
Ni1iYjUxLWU5NGEtM2NmNGExNDkwNWJhIl0sImdwdV9pbmRleCI6MCwiZ3B1X2luZGljZXMiOlsw
LDEsMiwzXX0seyJ2ZXJzaW9uIjoxMDIsInJhbmsiOjgsInJhbmtfaWQiOiI4IDogMTcyLjMxLjMz
LjM3IDogMTM5Nzg5MyIsInJhbmtfbW9kZSI6InJ1biIsInJhbmtfc3RhdHVzIjoicnVubmluZyIs
InJlbW92YWxfc3RhdHVzIjoibm9uZSIsImFwcF92ZXJzaW9uIjoiNy4yLjAuMS4yMDI0MDIxNDIx
MDkwNiIsInBpZCI6MTM5Nzg5Mywic3RhcnRfdGltZSI6MTcwODQ0MzY1Niwic3RhcnRfdGltZV9z
dHIiOiJUdWUgRmViIDIwIDE1OjQwOjU2IDIwMjQiLCJzdGFydF9jb3VudCI6MSwiYWNjZXB0aW5n
X2pvYnMiOnRydWUsIm5ld19yYW5rIjpmYWxzZSwicmVhZF9vbmx5IjpmYWxzZSwiaG9zdG5hbWUi
OiIzMDAtMzAzLXUzNy12MTAwIiwiaG9zdF9pZCI6IjE3Mi4zMS4zMy4zNyIsImdwdV9pZHMiOlsi
R1BVLWVhMThkMzg4LWUyYjMtNDI4My1hNmI1LWEzMjNhNDg3MjVhOSIsIkdQVS1kY2Q4NGVmYi05
NGNhLWI0OTctOGMxNS03YTMyNjk0MGMxZWIiLCJHUFUtOTE2MDFhNWUtOTczZi1kMWU0LTZlMWQt
ZjY1NTI5NGQzNDYwIiwiR1BVLTA3ZmE0ZGI2LTllNTUtYjYxZi05MDlhLTg1YzRkMWJlYjA4OCJd
LCJncHVfaW5kZXgiOjAsImdwdV9pbmRpY2VzIjpbMCwxLDIsM119XX0Oc3ltYm9scxZ7ImNvdW50
IjoxfQxzeXN0ZW2qA3siaWQiOiJLaW5ldGljYSAzMDAtMzAzLXUzMC12MTAwIiwic3RhcnRfdGlt
ZSI6MTcwODEwMzQ1Miwic3RhdHVzIjoicnVubmluZyIsImNsdXN0ZXJfbGVhZGVyIjoiMTcyLjMx
LjMzLjMwIiwidmVyc2lvbiI6MjUsImNsdXN0ZXJfb3BlcmF0aW9uX3J1bm5pbmciOiJmYWxzZSIs
ImNsdXN0ZXJfb3BlcmF0aW9uX3N0YXR1cyI6IiIsIm9mZmxpbmVfc3RhdHVzIjoiZmFsc2UifQh0
ZXh03AV7ImNvdW50Ijo4LCJzdGF0dXMiOlt7InZlcnNpb24iOjMzLCJyYW5rIjoxLCJzdGF0dXMi
OiJydW5uaW5nIn0seyJ2ZXJzaW9uIjozMywicmFuayI6Miwic3RhdHVzIjoicnVubmluZyJ9LHsi
dmVyc2lvbiI6MzMsInJhbmsiOjMsInN0YXR1cyI6InJ1bm5pbmcifSx7InZlcnNpb24iOjMzLCJy
YW5rIjo0LCJzdGF0dXMiOiJydW5uaW5nIn0seyJ2ZXJzaW9uIjozMywicmFuayI6NSwic3RhdHVz
IjoicnVubmluZyJ9LHsidmVyc2lvbiI6MzMsInJhbmsiOjYsInN0YXR1cyI6InJ1bm5pbmcifSx7
InZlcnNpb24iOjMzLCJyYW5rIjo3LCJzdGF0dXMiOiJydW5uaW5nIn0seyJ2ZXJzaW9uIjozMywi
cmFuayI6OCwic3RhdHVzIjoicnVubmluZyJ9XX0QdHJpZ2dlcnNeeyJ0b3RhbF9jb3VudCI6MCwi
cmFuZ2VfY291bnQiOjAsIm5haV9jb3VudCI6MH0AAAA=
headers:
Access-Control-Allow-Origin:
- '*'
Access-Control-Expose-Headers:
- x-request-time-secs
Connection:
- Close
Content-Type:
- application/octet-stream
Date:
- Thu, 22 Feb 2024 00:26:37 GMT
Strict-Transport-Security:
- max-age=31536000; includeSubDomains
X-Content-Type-Options:
- nosniff
X-Frame-Options:
- SAMEORIGIN
X-XSS-Protection:
- 1; mode=block
x-request-time-secs:
- '0.00061'
status:
code: 200
message: OK
- request:
body: "\0"
headers:
Accept:
- application/octet-stream
Content-type:
- application/octet-stream
authorization:
- DUMMY
method: POST
uri: http://172.31.33.30:9191/show/system/properties
response:
body:
string: !!binary |
BE9LAD5zaG93X3N5c3RlbV9wcm9wZXJ0aWVzX3Jlc3BvbnNlvPUCpAk8Y29uZi5haS5hcGkuY29u
bmVjdGlvbl90aW1lb3V0BDkwHmNvbmYuYWkuYXBpLmtleQAoY29uZi5haS5hcGkucHJvdmlkZXIW
a2luZXRpY2FsbG0eY29uZi5haS5hcGkudXJsSGh0dHA6Ly8xNzIuMzEuMzEuMTM6ODA1MC9zcWwv
c3VnZ2VzdDBjb25mLmFsZXJ0X2Rpc2tfYWJzb2x1dGUANGNvbmYuYWxlcnRfZGlza19wZXJjZW50
YWdlGDEsIDUsIDEwLCAyMBxjb25mLmFsZXJ0X2V4ZQAsY29uZi5hbGVydF9ob3N0X3N0YXR1cwhU
UlVFOmNvbmYuYWxlcnRfaG9zdF9zdGF0dXNfZmlsdGVyIGZhdGFsX2luaXRfZXJyb3I4Y29uZi5h
bGVydF9tYXhfc3RvcmVkX2FsZXJ0cwYxMDA0Y29uZi5hbGVydF9tZW1vcnlfYWJzb2x1dGUAOGNv
bmYuYWxlcnRfbWVtb3J5X3BlcmNlbnRhZ2UYMSwgNSwgMTAsIDIwNGNvbmYuYWxlcnRfcmFua19j
dWRhX2Vycm9yCFRSVUVEY29uZi5hbGVydF9yYW5rX2ZhbGxiYWNrX2FsbG9jYXRvcghUUlVFLGNv
bmYuYWxlcnRfcmFua19zdGF0dXMIVFJVRTpjb25mLmFsZXJ0X3Jhbmtfc3RhdHVzX2ZpbHRlclhm
YXRhbF9pbml0X2Vycm9yLCBub3RfcmVzcG9uZGluZywgdGVybWluYXRlZB5jb25mLmF1ZGl0X2Jv
ZHkKRkFMU0UeY29uZi5hdWRpdF9kYXRhCkZBTFNFJGNvbmYuYXVkaXRfaGVhZGVycwpGQUxTRT5j
b25mLmF1dG9fY3JlYXRlX2V4dGVybmFsX3VzZXJzCkZBTFNFTGNvbmYuYnVpbGRfbWF0ZXJpYWxp
emVkX3ZpZXdzX29uX3N0YXJ0Em9uX2RlbWFuZDhjb25mLmJ1aWxkX3BrX2luZGV4X29uX3N0YXJ0
Em9uX2RlbWFuZDhjb25mLmNodW5rX2NvbHVtbl9tYXhfbWVtb3J5EjUxMjAwMDAwMCpjb25mLmNo
dW5rX21heF9tZW1vcnkUODE5MjAwMDAwMB5jb25mLmNodW5rX3NpemUOODAwMDAwMCJjb25mLmNs
dXN0ZXJfbmFtZRxwcm9kdWN0aW9uLW5ld0Bjb25mLmNvbmN1cnJlbnRfa2VybmVsX2V4ZWN1dGlv
bghUUlVFOmNvbmYuZGVmYXVsdF9wcmltYXJ5X2tleV90eXBlDG1lbW9yeSBjb25mLmRlZmF1bHRf
dHRsBDIwLGNvbmYuZGlzYWJsZV9jbGVhcl9hbGwIVFJVRT5jb25mLmVncmVzc19wYXJxdWV0X2Nv
bXByZXNzaW9uDHNuYXBweSRjb25mLmVuYWJsZV9hbGVydHMIVFJVRSJjb25mLmVuYWJsZV9hdWRp
dApGQUxTRTJjb25mLmVuYWJsZV9hdXRob3JpemF0aW9uCFRSVUVGY29uZi5lbmFibGVfZXh0ZXJu
YWxfYXV0aGVudGljYXRpb24KRkFMU0UwY29uZi5lbmFibGVfZ3JhcGhfc2VydmVyCFRSVUUcY29u
Zi5lbmFibGVfaGEKRkFMU0UuY29uZi5lbmFibGVfaHR0cGRfcHJveHkKRkFMU0UcY29uZi5lbmFi
bGVfbWwKRkFMU0U2Y29uZi5lbmFibGVfb3BlbmdsX3JlbmRlcmVyCFRSVUVAY29uZi5lbmFibGVf
b3ZlcmxhcHBlZF9lcXVpX2pvaW4IVFJVRTRjb25mLmVuYWJsZV9wb3N0Z3Jlc19wcm94eQhUUlVF
PmNvbmYuZW5hYmxlX3ByZWRpY2F0ZV9lcXVpX2pvaW4IVFJVRSJjb25mLmVuYWJsZV9wcm9jcwhU
UlVFJGNvbmYuZW5hYmxlX3JldmVhbAhUUlVFMGNvbmYuZW5hYmxlX3N0YXRzX3NlcnZlcghUUlVF
LmNvbmYuZW5hYmxlX3RleHRfc2VhcmNoCFRSVUU8Y29uZi5lbmFibGVfdmVjdG9ydGlsZV9zZXJ2
aWNlCFRSVUUsY29uZi5lbmFibGVfdnJhbV9jYWNoZQhUUlVFPmNvbmYuZW5hYmxlX3dvcmtlcl9o
dHRwX3NlcnZlcnMIVFJVRTJjb25mLmV2ZW50X3NlcnZlcl9hZGRyZXNzGDE3Mi4zMS4zMy4zMDRj
b25mLmV2ZW50X3NlcnZlcl9pbnRlcm5hbApGQUxTRTpjb25mLmV4dGVybmFsX2ZpbGVzX2RpcmVj
dG9yeSAvbmZzL2RhdGEvcHVibGljFGNvbmYuZ21faXAYMTcyLjMxLjMzLjMwGmNvbmYuZ21fcG9y
dDEINTU1MiBjb25mLmdtX3B1Yl9wb3J0CDU1NTMoY29uZi5ncmFwaC5oZWFkX3BvcnQIODEwMC5j
b25mLmdyYXBoLnNlcnZlcjAuaG9zdApob3N0MC5jb25mLmdyYXBoLnNlcnZlcjAucG9ydAg4MTAx
OGNvbmYuZ3JhcGguc2VydmVyMC5yYW1fbGltaXQCMC5jb25mLmdyYXBoLnNlcnZlcjEuaG9zdApo
b3N0MS5jb25mLmdyYXBoLnNlcnZlcjEucG9ydAg4MTAyOGNvbmYuZ3JhcGguc2VydmVyMS5yYW1f
bGltaXQCMC5jb25mLmdyYXBoLnNlcnZlcjIuaG9zdApob3N0Mi5jb25mLmdyYXBoLnNlcnZlcjIu
cG9ydAg4MTAzOGNvbmYuZ3JhcGguc2VydmVyMi5yYW1fbGltaXQCMC5jb25mLmdyYXBoLnNlcnZl
cjMuaG9zdApob3N0My5jb25mLmdyYXBoLnNlcnZlcjMucG9ydAg4MTA0OGNvbmYuZ3JhcGguc2Vy
dmVyMy5yYW1fbGltaXQCMC5jb25mLmdyYXBoLnNlcnZlcjQuaG9zdApob3N0NC5jb25mLmdyYXBo
LnNlcnZlcjQucG9ydAg4MTA1OGNvbmYuZ3JhcGguc2VydmVyNC5yYW1fbGltaXQCMC5jb25mLmdy
YXBoLnNlcnZlcjUuaG9zdApob3N0NS5jb25mLmdyYXBoLnNlcnZlcjUucG9ydAg4MTA2OGNvbmYu
Z3JhcGguc2VydmVyNS5yYW1fbGltaXQCMC5jb25mLmdyYXBoLnNlcnZlcjYuaG9zdApob3N0Ni5j
b25mLmdyYXBoLnNlcnZlcjYucG9ydAg4MTA3OGNvbmYuZ3JhcGguc2VydmVyNi5yYW1fbGltaXQC
MC5jb25mLmdyYXBoLnNlcnZlcjcuaG9zdApob3N0Ny5jb25mLmdyYXBoLnNlcnZlcjcucG9ydAg4
MTA4OGNvbmYuZ3JhcGguc2VydmVyNy5yYW1fbGltaXQCMBxjb25mLmhhX3F1ZXVlcwAuY29uZi5o
YV9yaW5nX2hlYWRfbm9kZXMAKGNvbmYuaGVhZF9pcF9hZGRyZXNzGDE3Mi4zMS4zMy4zMBxjb25m
LmhlYWRfcG9ydAg5MTkxImNvbmYuaG1faHR0cF9wb3J0CDkzMDA2Y29uZi5ob3N0MF9hY2NlcHRz
X2ZhaWxvdmVyCkZBTFNFJGNvbmYuaG9zdDBfYWRkcmVzcxgxNzIuMzEuMzMuMzAeY29uZi5ob3N0
MF9ncHVzDjAsMSwyLDNEY29uZi5ob3N0MF9ob3N0X21hbmFnZXJfcHVibGljX3VybDBodHRwOi8v
MTcyLjMxLjMzLjMwOjkzMDAsY29uZi5ob3N0MF9wcml2YXRlX3VybCZodHRwOi8vMTcyLjMxLjMz
LjMwMmNvbmYuaG9zdDBfcHVibGljX2FkZHJlc3MYMTcyLjMxLjMzLjMwLGNvbmYuaG9zdDBfcHVi
bGljX3VybHMmaHR0cDovLzE3Mi4zMS4zMy4zMChjb25mLmhvc3QwX3JhbV9saW1pdBg2NTY3Njc3
OTk5OTk2Y29uZi5ob3N0MV9hY2NlcHRzX2ZhaWxvdmVyCkZBTFNFJGNvbmYuaG9zdDFfYWRkcmVz
cxgxNzIuMzEuMzMuMzEeY29uZi5ob3N0MV9ncHVzDjAsMSwyLDNEY29uZi5ob3N0MV9ob3N0X21h
bmFnZXJfcHVibGljX3VybDBodHRwOi8vMTcyLjMxLjMzLjMxOjkzMDAsY29uZi5ob3N0MV9wcml2
YXRlX3VybCZodHRwOi8vMTcyLjMxLjMzLjMxMmNvbmYuaG9zdDFfcHVibGljX2FkZHJlc3MYMTcy
LjMxLjMzLjMxLGNvbmYuaG9zdDFfcHVibGljX3VybHMmaHR0cDovLzE3Mi4zMS4zMy4zMShjb25m
Lmhvc3QxX3JhbV9saW1pdBg2NTY3Njc3OTk5OTk2Y29uZi5ob3N0Ml9hY2NlcHRzX2ZhaWxvdmVy
CkZBTFNFJGNvbmYuaG9zdDJfYWRkcmVzcxgxNzIuMzEuMzMuMzIeY29uZi5ob3N0Ml9ncHVzDjAs
MSwyLDNEY29uZi5ob3N0Ml9ob3N0X21hbmFnZXJfcHVibGljX3VybDBodHRwOi8vMTcyLjMxLjMz
LjMyOjkzMDAsY29uZi5ob3N0Ml9wcml2YXRlX3VybCZodHRwOi8vMTcyLjMxLjMzLjMyMmNvbmYu
aG9zdDJfcHVibGljX2FkZHJlc3MYMTcyLjMxLjMzLjMyLGNvbmYuaG9zdDJfcHVibGljX3VybHMm
aHR0cDovLzE3Mi4zMS4zMy4zMihjb25mLmhvc3QyX3JhbV9saW1pdBg2NTY3Njc3OTk5OTk2Y29u
Zi5ob3N0M19hY2NlcHRzX2ZhaWxvdmVyCkZBTFNFJGNvbmYuaG9zdDNfYWRkcmVzcxgxNzIuMzEu
MzMuMzMeY29uZi5ob3N0M19ncHVzDjAsMSwyLDNEY29uZi5ob3N0M19ob3N0X21hbmFnZXJfcHVi
bGljX3VybDBodHRwOi8vMTcyLjMxLjMzLjMzOjkzMDAsY29uZi5ob3N0M19wcml2YXRlX3VybCZo
dHRwOi8vMTcyLjMxLjMzLjMzMmNvbmYuaG9zdDNfcHVibGljX2FkZHJlc3MYMTcyLjMxLjMzLjMz
LGNvbmYuaG9zdDNfcHVibGljX3VybHMmaHR0cDovLzE3Mi4zMS4zMy4zMyhjb25mLmhvc3QzX3Jh
bV9saW1pdBg2NTY3Njc3OTk5OTk2Y29uZi5ob3N0NF9hY2NlcHRzX2ZhaWxvdmVyCkZBTFNFJGNv
bmYuaG9zdDRfYWRkcmVzcxgxNzIuMzEuMzMuMzQeY29uZi5ob3N0NF9ncHVzDjAsMSwyLDNEY29u
Zi5ob3N0NF9ob3N0X21hbmFnZXJfcHVibGljX3VybDBodHRwOi8vMTcyLjMxLjMzLjM0OjkzMDAs
Y29uZi5ob3N0NF9wcml2YXRlX3VybCZodHRwOi8vMTcyLjMxLjMzLjM0MmNvbmYuaG9zdDRfcHVi
bGljX2FkZHJlc3MYMTcyLjMxLjMzLjM0LGNvbmYuaG9zdDRfcHVibGljX3VybHMmaHR0cDovLzE3
Mi4zMS4zMy4zNChjb25mLmhvc3Q0X3JhbV9saW1pdBg2NTY3Njc3OTk5OTk2Y29uZi5ob3N0NV9h
Y2NlcHRzX2ZhaWxvdmVyCkZBTFNFJGNvbmYuaG9zdDVfYWRkcmVzcxgxNzIuMzEuMzMuMzUeY29u
Zi5ob3N0NV9ncHVzDjAsMSwyLDNEY29uZi5ob3N0NV9ob3N0X21hbmFnZXJfcHVibGljX3VybDBo
dHRwOi8vMTcyLjMxLjMzLjM1OjkzMDAsY29uZi5ob3N0NV9wcml2YXRlX3VybCZodHRwOi8vMTcy
LjMxLjMzLjM1MmNvbmYuaG9zdDVfcHVibGljX2FkZHJlc3MYMTcyLjMxLjMzLjM1LGNvbmYuaG9z
dDVfcHVibGljX3VybHMmaHR0cDovLzE3Mi4zMS4zMy4zNShjb25mLmhvc3Q1X3JhbV9saW1pdBg2
NTY3Njc3OTk5OTk2Y29uZi5ob3N0Nl9hY2NlcHRzX2ZhaWxvdmVyCkZBTFNFJGNvbmYuaG9zdDZf
YWRkcmVzcxgxNzIuMzEuMzMuMzYeY29uZi5ob3N0Nl9ncHVzDjAsMSwyLDNEY29uZi5ob3N0Nl9o
b3N0X21hbmFnZXJfcHVibGljX3VybDBodHRwOi8vMTcyLjMxLjMzLjM2OjkzMDAsY29uZi5ob3N0
Nl9wcml2YXRlX3VybCZodHRwOi8vMTcyLjMxLjMzLjM2MmNvbmYuaG9zdDZfcHVibGljX2FkZHJl
c3MYMTcyLjMxLjMzLjM2LGNvbmYuaG9zdDZfcHVibGljX3VybHMmaHR0cDovLzE3Mi4zMS4zMy4z
Nihjb25mLmhvc3Q2X3JhbV9saW1pdBg2NTY3Njc3OTk5OTk2Y29uZi5ob3N0N19hY2NlcHRzX2Zh
aWxvdmVyCkZBTFNFJGNvbmYuaG9zdDdfYWRkcmVzcxgxNzIuMzEuMzMuMzceY29uZi5ob3N0N19n
cHVzDjAsMSwyLDNEY29uZi5ob3N0N19ob3N0X21hbmFnZXJfcHVibGljX3VybDBodHRwOi8vMTcy
LjMxLjMzLjM3OjkzMDAsY29uZi5ob3N0N19wcml2YXRlX3VybCZodHRwOi8vMTcyLjMxLjMzLjM3
MmNvbmYuaG9zdDdfcHVibGljX2FkZHJlc3MYMTcyLjMxLjMzLjM3LGNvbmYuaG9zdDdfcHVibGlj
X3VybHMmaHR0cDovLzE3Mi4zMS4zMy4zNyhjb25mLmhvc3Q3X3JhbV9saW1pdBg2NTY3Njc3OTk5
OTkqY29uZi5odHRwZF9wcm94eV9wb3J0CDgwODI0Y29uZi5odHRwZF9wcm94eV91c2VfaHR0cHMK
RkFMU0U4Y29uZi5pbml0X3dpdGhfbm93X2F0X3dvcmtlcgpGQUxTRSpjb25mLmthZmthLmJhdGNo
X3NpemUIMTAwMC5jb25mLmthZmthLnBvbGxfdGltZW91dAIwKGNvbmYua2Fma2Eud2FpdF90aW1l
BDMwLmNvbmYua2VybmVsX29tcF90aHJlYWRzAjQ0Y29uZi5sb2FkX3ZlY3RvcnNfb25fc3RhcnQS
b25fZGVtYW5kHmNvbmYubG9ja19hdWRpdApGQUxTRTZjb25mLm1heF9hdXRvX3ZpZXdfdXBkYXRv
cnMCMzZjb25mLm1heF9jb25jdXJyZW50X2tlcm5lbHMCMDJjb25mLm1heF9nZXRfcmVjb3Jkc19z
aXplCjIwMDAwKmNvbmYubWF4X2hlYXRtYXBfc2l6ZQgzMDcyKmNvbmYubWF4X2h0dHBfdGhyZWFk
cwY1MTI6Y29uZi5tZXRhZGF0YV9zdG9yZV9zeW5jX21vZGUMbm9ybWFsKmNvbmYubWluX2h0dHBf
dGhyZWFkcwI4MGNvbmYubWluX3Bhc3N3b3JkX2xlbmd0aAIwIGNvbmYubWxfYXBpX3BvcnQIOTE4
N1xjb25mLm5wMS5idWlsZF9tYXRlcmlhbGl6ZWRfdmlld3Nfb25fbWlncmF0aW9uDGFsd2F5c0hj
b25mLm5wMS5idWlsZF9wa19pbmRleF9vbl9taWdyYXRpb24MYWx3YXlzRGNvbmYubnAxLmNyaXRp
Y2FsX3Jlc3RhcnRfYXR0ZW1wdHMCMTpjb25mLm5wMS5lbmFibGVfaGVhZF9mYWlsb3ZlcgpGQUxT
RT5jb25mLm5wMS5lbmFibGVfd29ya2VyX2ZhaWxvdmVyCkZBTFNFSmNvbmYubnAxLmZhaWxvdmVy
X2Rpc3RyaWJ1dGlvbl9wb2xpY3kIZmlsbERjb25mLm5wMS5sb2FkX3ZlY3RvcnNfb25fbWlncmF0
aW9uDGFsd2F5c0xjb25mLm5wMS5ub25fY3JpdGljYWxfcmVzdGFydF9hdHRlbXB0cwIzPGNvbmYu
bnAxLnJhbmtfcmVzdGFydF9hdHRlbXB0cwIxMmNvbmYubnAxLnJlc3RhcnRfaW50ZXJ2YWwENjA2
Y29uZi5ucDEuc3RvcmFnZV9hcGlfc2NyaXB0AChjb25mLm51bWJlcl9vZl9ob3N0cwI4KGNvbmYu
bnVtYmVyX29mX3JhbmtzAjk8Y29uZi5vcGVuZ2xfYW50aWFsaWFzaW5nX2xldmVsAjAsY29uZi5w
ZXJzaXN0X2RpcmVjdG9yeTAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC8sY29uZi5wZXJzaXN0X3N5
bmNfdGltZQI1NmNvbmYucG9pbnRfcmVuZGVyX3RocmVzaG9sZAwxMDAwMDBWY29uZi5wb3N0Z3Jl
c19wcm94eS5pZGxlX2Nvbm5lY3Rpb25fdGltZW91dAYzMDBUY29uZi5wb3N0Z3Jlc19wcm94eS5t
YXhfcXVldWVkX2Nvbm5lY3Rpb25zAjE+Y29uZi5wb3N0Z3Jlc19wcm94eS5tYXhfdGhyZWFkcwQ2
ND5jb25mLnBvc3RncmVzX3Byb3h5Lm1pbl90aHJlYWRzAjIwY29uZi5wb3N0Z3Jlc19wcm94eS5w
b3J0CDU0MzIuY29uZi5wb3N0Z3Jlc19wcm94eS5zc2wIVFJVRRxjb25mLnJhbmswX2dwdQIwKmNv
bmYucmFuazBfaXBfYWRkcmVzcxgxNzIuMzEuMzMuMzAqY29uZi5yYW5rMV9pcF9hZGRyZXNzGDE3
Mi4zMS4zMy4zMCpjb25mLnJhbmsyX2lwX2FkZHJlc3MYMTcyLjMxLjMzLjMxKmNvbmYucmFuazNf
aXBfYWRkcmVzcxgxNzIuMzEuMzMuMzIqY29uZi5yYW5rNF9pcF9hZGRyZXNzGDE3Mi4zMS4zMy4z
Mypjb25mLnJhbms1X2lwX2FkZHJlc3MYMTcyLjMxLjMzLjM0KmNvbmYucmFuazZfaXBfYWRkcmVz
cxgxNzIuMzEuMzMuMzUqY29uZi5yYW5rN19pcF9hZGRyZXNzGDE3Mi4zMS4zMy4zNipjb25mLnJh
bms4X2lwX2FkZHJlc3MYMTcyLjMxLjMzLjM3KGNvbmYucmVxdWVzdF90aW1lb3V0CDI0MDA2Y29u
Zi5yZXF1aXJlX2F1dGhlbnRpY2F0aW9uCFRSVUVeY29uZi5yZXNvdXJjZV9ncm91cC5kZWZhdWx0
Lm1heF9jcHVfY29uY3VycmVuY3kELTFaY29uZi5yZXNvdXJjZV9ncm91cC5kZWZhdWx0Lm1heF90
aWVyX3ByaW9yaXR5BDEwSmNvbmYucmVzb3VyY2VfZ3JvdXAuZGVmYXVsdC5yYW1fbGltaXQELTFa
Y29uZi5yZXNvdXJjZV9ncm91cC5kZWZhdWx0LnNjaGVkdWxlX3ByaW9yaXR5BDUwTGNvbmYucmVz
b3VyY2VfZ3JvdXAuZGVmYXVsdC52cmFtX2xpbWl0BC0xHGNvbmYucmluZ19uYW1lDmRlZmF1bHRW
Y29uZi5zZWN1cml0eS5leHRlcm5hbC5yYW5nZXIuY2FjaGVfbWludXRlcwQ2MFRjb25mLnNlY3Vy
aXR5LmV4dGVybmFsLnJhbmdlci5zZXJ2aWNlX25hbWUQa2luZXRpY2FCY29uZi5zZWN1cml0eS5l
eHRlcm5hbC5yYW5nZXIudXJsAGBjb25mLnNlY3VyaXR5LmV4dGVybmFsLnJhbmdlcl9hdXRob3Jp
emVyLmFkZHJlc3MyaXBjOi8vL3RtcC9ncHVkYi1yYW5nZXItMHRjb25mLnNlY3VyaXR5LmV4dGVy
bmFsLnJhbmdlcl9hdXRob3JpemVyLnJlbW90ZV9kZWJ1Z19wb3J0AjBgY29uZi5zZWN1cml0eS5l
eHRlcm5hbC5yYW5nZXJfYXV0aG9yaXplci50aW1lb3V0BjEyMCpjb25mLnNldF9tb25pdG9yX3Bv
cnQIOTAwMjZjb25mLnNldF9tb25pdG9yX3Byb3h5X3BvcnQIOTAwMzZjb25mLnNldF9tb25pdG9y
X3F1ZXVlX3NpemUIMTAwMChjb25mLnNoYWRvd19hZ2dfc2l6ZRI1MDAwMDAwMDAwY29uZi5zaGFk
b3dfY3ViZV9lbmFibGVkCFRSVUUuY29uZi5zaGFkb3dfZmlsdGVyX3NpemUSNTAwMDAwMDAwJmNv
bmYuc21fb21wX3RocmVhZHMCMiRjb25mLnNtc19kaXJlY3RvcnkwL21udC9kYXRhL2dwdWRiL3Bl
cnNpc3QvLmNvbmYuc21zX21heF9vcGVuX2ZpbGVzBjEyOEBjb25mLnNxbC5jb3N0X2Jhc2VkX29w
dGltaXphdGlvbgpGQUxTRTRjb25mLnNxbC5kaXN0cmlidXRlZF9qb2lucwhUUlVFPmNvbmYuc3Fs
LmRpc3RyaWJ1dGVkX29wZXJhdGlvbnMIVFJVRS5jb25mLnNxbC5lbmFibGVfcGxhbm5lcghUUlVF
NmNvbmYuc3FsLmZvcmNlX2JpbmFyeV9qb2lucwpGQUxTRTpjb25mLnNxbC5mb3JjZV9iaW5hcnlf
c2V0X29wcwpGQUxTRTZjb25mLnNxbC5tYXhfcGFyYWxsZWxfc3RlcHMCNEBjb25mLnNxbC5tYXhf
dmlld19uZXN0aW5nX2xldmVscwQxNjJjb25mLnNxbC5wYWdpbmdfdGFibGVfdHRsBDIwNmNvbmYu
c3FsLnBhcmFsbGVsX2V4ZWN1dGlvbghUUlVFMGNvbmYuc3FsLnBsYW5fY2FjaGVfc2l6ZQg0MDAw
MGNvbmYuc3FsLnBsYW5uZXIuYWRkcmVzcz5pcGM6Ly8vdG1wL2dwdWRiLXF1ZXJ5LWVuZ2luZS0w
NmNvbmYuc3FsLnBsYW5uZXIubWF4X21lbW9yeQg0MDk2NGNvbmYuc3FsLnBsYW5uZXIubWF4X3N0
YWNrAjZEY29uZi5zcWwucGxhbm5lci5yZW1vdGVfZGVidWdfcG9ydAIwMGNvbmYuc3FsLnBsYW5u
ZXIudGltZW91dAYxMjA0Y29uZi5zcWwucmVzdWx0X2NhY2hlX3NpemUINDAwMDRjb25mLnNxbC5y
ZXN1bHRzLmNhY2hlX3R0bAQ2MDBjb25mLnNxbC5yZXN1bHRzLmNhY2hpbmcIVFJVRUBjb25mLnNx
bC5ydWxlX2Jhc2VkX29wdGltaXphdGlvbghUUlVFPGNvbmYuc3VidGFza19jb25jdXJyZW5jeV9s
aW1pdAI0PmNvbmYuc3ltYm9sb2d5X3JlbmRlcl90aHJlc2hvbGQKMTAwMDBQY29uZi5zeXN0ZW1f
bWV0YWRhdGEuc3RhdHNfYWdncl9yb3djb3VudAoxMDAwMEhjb25mLnN5c3RlbV9tZXRhZGF0YS5z
dGF0c19hZ2dyX3RpbWUCMVJjb25mLnN5c3RlbV9tZXRhZGF0YS5zdGF0c19yZXRlbnRpb25fZGF5
cwQyMSZjb25mLnRhc2tjYWxjX2dwdS4xElswLDEsMiwzXSZjb25mLnRhc2tjYWxjX2dwdS4yElsw
LDEsMiwzXSZjb25mLnRhc2tjYWxjX2dwdS4zElswLDEsMiwzXSZjb25mLnRhc2tjYWxjX2dwdS40
ElswLDEsMiwzXSZjb25mLnRhc2tjYWxjX2dwdS41ElswLDEsMiwzXSZjb25mLnRhc2tjYWxjX2dw
dS42ElswLDEsMiwzXSZjb25mLnRhc2tjYWxjX2dwdS43ElswLDEsMiwzXSZjb25mLnRhc2tjYWxj
X2dwdS44ElswLDEsMiwzXSBjb25mLnRjc19wZXJfdG9tBDQwJmNvbmYudGVtcF9kaXJlY3RvcnkI
L3RtcDJjb25mLnRleHRfaW5kZXhfZGlyZWN0b3J5MC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0LzJj
b25mLnRleHRfaW5kaWNlc19wZXJfdG9tAjJMY29uZi50aWVyLmRpc2swLmRlZmF1bHQuaGlnaF93
YXRlcm1hcmsEOTA6Y29uZi50aWVyLmRpc2swLmRlZmF1bHQubGltaXQYNjAwMDAwMDAwMDAwSmNv
bmYudGllci5kaXNrMC5kZWZhdWx0Lmxvd193YXRlcm1hcmsEODA4Y29uZi50aWVyLmRpc2swLmRl
ZmF1bHQucGF0aEQvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC8vZGlza2NhY2hlYGNvbmYudGllci5k
aXNrMC5kZWZhdWx0LnN0b3JlX3BlcnNpc3RlbnRfb2JqZWN0cwpGQUxTRUhjb25mLnRpZXIuZGlz
azAucmFuazAuaGlnaF93YXRlcm1hcmsEOTA2Y29uZi50aWVyLmRpc2swLnJhbmswLmxpbWl0GDYw
MDAwMDAwMDAwMEZjb25mLnRpZXIuZGlzazAucmFuazAubG93X3dhdGVybWFyawQ4MDRjb25mLnRp
ZXIuZGlzazAucmFuazAucGF0aEQvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC8vZGlza2NhY2hlXGNv
bmYudGllci5kaXNrMC5yYW5rMC5zdG9yZV9wZXJzaXN0ZW50X29iamVjdHMKRkFMU0VIY29uZi50
aWVyLmRpc2swLnJhbmsxLmhpZ2hfd2F0ZXJtYXJrBDkwNmNvbmYudGllci5kaXNrMC5yYW5rMS5s
aW1pdBg2MDAwMDAwMDAwMDBGY29uZi50aWVyLmRpc2swLnJhbmsxLmxvd193YXRlcm1hcmsEODA0
Y29uZi50aWVyLmRpc2swLnJhbmsxLnBhdGhEL21udC9kYXRhL2dwdWRiL3BlcnNpc3QvL2Rpc2tj
YWNoZVxjb25mLnRpZXIuZGlzazAucmFuazEuc3RvcmVfcGVyc2lzdGVudF9vYmplY3RzCkZBTFNF
SGNvbmYudGllci5kaXNrMC5yYW5rMi5oaWdoX3dhdGVybWFyawQ5MDZjb25mLnRpZXIuZGlzazAu
cmFuazIubGltaXQYNjAwMDAwMDAwMDAwRmNvbmYudGllci5kaXNrMC5yYW5rMi5sb3dfd2F0ZXJt
YXJrBDgwNGNvbmYudGllci5kaXNrMC5yYW5rMi5wYXRoRC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0
Ly9kaXNrY2FjaGVcY29uZi50aWVyLmRpc2swLnJhbmsyLnN0b3JlX3BlcnNpc3RlbnRfb2JqZWN0
cwpGQUxTRUhjb25mLnRpZXIuZGlzazAucmFuazMuaGlnaF93YXRlcm1hcmsEOTA2Y29uZi50aWVy
LmRpc2swLnJhbmszLmxpbWl0GDYwMDAwMDAwMDAwMEZjb25mLnRpZXIuZGlzazAucmFuazMubG93
X3dhdGVybWFyawQ4MDRjb25mLnRpZXIuZGlzazAucmFuazMucGF0aEQvbW50L2RhdGEvZ3B1ZGIv
cGVyc2lzdC8vZGlza2NhY2hlXGNvbmYudGllci5kaXNrMC5yYW5rMy5zdG9yZV9wZXJzaXN0ZW50
X29iamVjdHMKRkFMU0VIY29uZi50aWVyLmRpc2swLnJhbms0LmhpZ2hfd2F0ZXJtYXJrBDkwNmNv
bmYudGllci5kaXNrMC5yYW5rNC5saW1pdBg2MDAwMDAwMDAwMDBGY29uZi50aWVyLmRpc2swLnJh
bms0Lmxvd193YXRlcm1hcmsEODA0Y29uZi50aWVyLmRpc2swLnJhbms0LnBhdGhEL21udC9kYXRh
L2dwdWRiL3BlcnNpc3QvL2Rpc2tjYWNoZVxjb25mLnRpZXIuZGlzazAucmFuazQuc3RvcmVfcGVy
c2lzdGVudF9vYmplY3RzCkZBTFNFSGNvbmYudGllci5kaXNrMC5yYW5rNS5oaWdoX3dhdGVybWFy
awQ5MDZjb25mLnRpZXIuZGlzazAucmFuazUubGltaXQYNjAwMDAwMDAwMDAwRmNvbmYudGllci5k
aXNrMC5yYW5rNS5sb3dfd2F0ZXJtYXJrBDgwNGNvbmYudGllci5kaXNrMC5yYW5rNS5wYXRoRC9t
bnQvZGF0YS9ncHVkYi9wZXJzaXN0Ly9kaXNrY2FjaGVcY29uZi50aWVyLmRpc2swLnJhbms1LnN0
b3JlX3BlcnNpc3RlbnRfb2JqZWN0cwpGQUxTRUhjb25mLnRpZXIuZGlzazAucmFuazYuaGlnaF93
YXRlcm1hcmsEOTA2Y29uZi50aWVyLmRpc2swLnJhbms2LmxpbWl0GDYwMDAwMDAwMDAwMEZjb25m
LnRpZXIuZGlzazAucmFuazYubG93X3dhdGVybWFyawQ4MDRjb25mLnRpZXIuZGlzazAucmFuazYu
cGF0aEQvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC8vZGlza2NhY2hlXGNvbmYudGllci5kaXNrMC5y
YW5rNi5zdG9yZV9wZXJzaXN0ZW50X29iamVjdHMKRkFMU0VIY29uZi50aWVyLmRpc2swLnJhbms3
LmhpZ2hfd2F0ZXJtYXJrBDkwNmNvbmYudGllci5kaXNrMC5yYW5rNy5saW1pdBg2MDAwMDAwMDAw
MDBGY29uZi50aWVyLmRpc2swLnJhbms3Lmxvd193YXRlcm1hcmsEODA0Y29uZi50aWVyLmRpc2sw
LnJhbms3LnBhdGhEL21udC9kYXRhL2dwdWRiL3BlcnNpc3QvL2Rpc2tjYWNoZVxjb25mLnRpZXIu
ZGlzazAucmFuazcuc3RvcmVfcGVyc2lzdGVudF9vYmplY3RzCkZBTFNFSGNvbmYudGllci5kaXNr
MC5yYW5rOC5oaWdoX3dhdGVybWFyawQ5MDZjb25mLnRpZXIuZGlzazAucmFuazgubGltaXQYNjAw
MDAwMDAwMDAwRmNvbmYudGllci5kaXNrMC5yYW5rOC5sb3dfd2F0ZXJtYXJrBDgwNGNvbmYudGll
ci5kaXNrMC5yYW5rOC5wYXRoRC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0Ly9kaXNrY2FjaGVcY29u
Zi50aWVyLmRpc2swLnJhbms4LnN0b3JlX3BlcnNpc3RlbnRfb2JqZWN0cwpGQUxTRVBjb25mLnRp
ZXIuZ2xvYmFsLmNvbmN1cnJlbnRfd2FpdF90aW1lb3V0BjYwMGpjb25mLnRpZXIuZ2xvYmFsLmRl
ZmVyX2NhY2hlX29iamVjdF9ldmljdGlvbnNfdG9fZGlzawhUUlVFUGNvbmYudGllci5wZXJzaXN0
LmRlZmF1bHQuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnBlcnNpc3QuZGVmYXVsdC5saW1p
dBo2NTAwMDAwMDAwMDAwTmNvbmYudGllci5wZXJzaXN0LmRlZmF1bHQubG93X3dhdGVybWFyawQ4
MDxjb25mLnRpZXIucGVyc2lzdC5kZWZhdWx0LnBhdGgwL21udC9kYXRhL2dwdWRiL3BlcnNpc3Qv
OmNvbmYudGllci5wZXJzaXN0LmdyYXBoMC5wYXRoMC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0Lzpj
b25mLnRpZXIucGVyc2lzdC5ncmFwaDEucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC86Y29u
Zi50aWVyLnBlcnNpc3QuZ3JhcGgyLnBhdGgwL21udC9kYXRhL2dwdWRiL3BlcnNpc3QvOmNvbmYu
dGllci5wZXJzaXN0LmdyYXBoMy5wYXRoMC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0Lzpjb25mLnRp
ZXIucGVyc2lzdC5ncmFwaDQucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC86Y29uZi50aWVy
LnBlcnNpc3QuZ3JhcGg1LnBhdGgwL21udC9kYXRhL2dwdWRiL3BlcnNpc3QvOmNvbmYudGllci5w
ZXJzaXN0LmdyYXBoNi5wYXRoMC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0Lzpjb25mLnRpZXIucGVy
c2lzdC5ncmFwaDcucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC9MY29uZi50aWVyLnBlcnNp
c3QucmFuazAuaGlnaF93YXRlcm1hcmsEOTA6Y29uZi50aWVyLnBlcnNpc3QucmFuazAubGltaXQa
NjUwMDAwMDAwMDAwMEpjb25mLnRpZXIucGVyc2lzdC5yYW5rMC5sb3dfd2F0ZXJtYXJrBDgwOGNv
bmYudGllci5wZXJzaXN0LnJhbmswLnBhdGgwL21udC9kYXRhL2dwdWRiL3BlcnNpc3QvTGNvbmYu
dGllci5wZXJzaXN0LnJhbmsxLmhpZ2hfd2F0ZXJtYXJrBDkwOmNvbmYudGllci5wZXJzaXN0LnJh
bmsxLmxpbWl0GjY1MDAwMDAwMDAwMDBKY29uZi50aWVyLnBlcnNpc3QucmFuazEubG93X3dhdGVy
bWFyawQ4MDhjb25mLnRpZXIucGVyc2lzdC5yYW5rMS5wYXRoMC9tbnQvZGF0YS9ncHVkYi9wZXJz
aXN0L0xjb25mLnRpZXIucGVyc2lzdC5yYW5rMi5oaWdoX3dhdGVybWFyawQ5MDpjb25mLnRpZXIu
cGVyc2lzdC5yYW5rMi5saW1pdBo2NTAwMDAwMDAwMDAwSmNvbmYudGllci5wZXJzaXN0LnJhbmsy
Lmxvd193YXRlcm1hcmsEODA4Y29uZi50aWVyLnBlcnNpc3QucmFuazIucGF0aDAvbW50L2RhdGEv
Z3B1ZGIvcGVyc2lzdC9MY29uZi50aWVyLnBlcnNpc3QucmFuazMuaGlnaF93YXRlcm1hcmsEOTA6
Y29uZi50aWVyLnBlcnNpc3QucmFuazMubGltaXQaNjUwMDAwMDAwMDAwMEpjb25mLnRpZXIucGVy
c2lzdC5yYW5rMy5sb3dfd2F0ZXJtYXJrBDgwOGNvbmYudGllci5wZXJzaXN0LnJhbmszLnBhdGgw
L21udC9kYXRhL2dwdWRiL3BlcnNpc3QvTGNvbmYudGllci5wZXJzaXN0LnJhbms0LmhpZ2hfd2F0
ZXJtYXJrBDkwOmNvbmYudGllci5wZXJzaXN0LnJhbms0LmxpbWl0GjY1MDAwMDAwMDAwMDBKY29u
Zi50aWVyLnBlcnNpc3QucmFuazQubG93X3dhdGVybWFyawQ4MDhjb25mLnRpZXIucGVyc2lzdC5y
YW5rNC5wYXRoMC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0L0xjb25mLnRpZXIucGVyc2lzdC5yYW5r
NS5oaWdoX3dhdGVybWFyawQ5MDpjb25mLnRpZXIucGVyc2lzdC5yYW5rNS5saW1pdBo2NTAwMDAw
MDAwMDAwSmNvbmYudGllci5wZXJzaXN0LnJhbms1Lmxvd193YXRlcm1hcmsEODA4Y29uZi50aWVy
LnBlcnNpc3QucmFuazUucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC9MY29uZi50aWVyLnBl
cnNpc3QucmFuazYuaGlnaF93YXRlcm1hcmsEOTA6Y29uZi50aWVyLnBlcnNpc3QucmFuazYubGlt
aXQaNjUwMDAwMDAwMDAwMEpjb25mLnRpZXIucGVyc2lzdC5yYW5rNi5sb3dfd2F0ZXJtYXJrBDgw
OGNvbmYudGllci5wZXJzaXN0LnJhbms2LnBhdGgwL21udC9kYXRhL2dwdWRiL3BlcnNpc3QvTGNv
bmYudGllci5wZXJzaXN0LnJhbms3LmhpZ2hfd2F0ZXJtYXJrBDkwOmNvbmYudGllci5wZXJzaXN0
LnJhbms3LmxpbWl0GjY1MDAwMDAwMDAwMDBKY29uZi50aWVyLnBlcnNpc3QucmFuazcubG93X3dh
dGVybWFyawQ4MDhjb25mLnRpZXIucGVyc2lzdC5yYW5rNy5wYXRoMC9tbnQvZGF0YS9ncHVkYi9w
ZXJzaXN0L0xjb25mLnRpZXIucGVyc2lzdC5yYW5rOC5oaWdoX3dhdGVybWFyawQ5MDpjb25mLnRp
ZXIucGVyc2lzdC5yYW5rOC5saW1pdBo2NTAwMDAwMDAwMDAwSmNvbmYudGllci5wZXJzaXN0LnJh
bms4Lmxvd193YXRlcm1hcmsEODA4Y29uZi50aWVyLnBlcnNpc3QucmFuazgucGF0aDAvbW50L2Rh
dGEvZ3B1ZGIvcGVyc2lzdC84Y29uZi50aWVyLnBlcnNpc3QudGV4dDEucGF0aDAvbW50L2RhdGEv
Z3B1ZGIvcGVyc2lzdC84Y29uZi50aWVyLnBlcnNpc3QudGV4dDIucGF0aDAvbW50L2RhdGEvZ3B1
ZGIvcGVyc2lzdC84Y29uZi50aWVyLnBlcnNpc3QudGV4dDMucGF0aDAvbW50L2RhdGEvZ3B1ZGIv
cGVyc2lzdC84Y29uZi50aWVyLnBlcnNpc3QudGV4dDQucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVy
c2lzdC84Y29uZi50aWVyLnBlcnNpc3QudGV4dDUucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lz
dC84Y29uZi50aWVyLnBlcnNpc3QudGV4dDYucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC84
Y29uZi50aWVyLnBlcnNpc3QudGV4dDcucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC84Y29u
Zi50aWVyLnBlcnNpc3QudGV4dDgucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC9IY29uZi50
aWVyLnJhbS5kZWZhdWx0LmhpZ2hfd2F0ZXJtYXJrBDkwNmNvbmYudGllci5yYW0uZGVmYXVsdC5s
aW1pdAQtMUZjb25mLnRpZXIucmFtLmRlZmF1bHQubG93X3dhdGVybWFyawQ4MERjb25mLnRpZXIu
cmFtLnJhbmswLmhpZ2hfd2F0ZXJtYXJrBDkwMmNvbmYudGllci5yYW0ucmFuazAubGltaXQWNzcy
NjY4MDAwMDBCY29uZi50aWVyLnJhbS5yYW5rMC5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazEuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rMS5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rMS5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazIuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rMi5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rMi5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazMuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rMy5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rMy5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazQuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rNC5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rNC5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazUuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rNS5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rNS5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazYuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rNi5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rNi5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazcuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rNy5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rNy5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazguaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rOC5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rOC5sb3dfd2F0ZXJtYXJrBDgwXGNvbmYudGllci52
cmFtLmRlZmF1bHQuYWxsX2dwdXMuaGlnaF93YXRlcm1hcmsEOTBKY29uZi50aWVyLnZyYW0uZGVm
YXVsdC5hbGxfZ3B1cy5saW1pdAQtMVpjb25mLnRpZXIudnJhbS5kZWZhdWx0LmFsbF9ncHVzLmxv
d193YXRlcm1hcmsEODBUY29uZi50aWVyLnZyYW0uZGVmYXVsdC5ncHUwLmhpZ2hfd2F0ZXJtYXJr
BDkwQmNvbmYudGllci52cmFtLmRlZmF1bHQuZ3B1MC5saW1pdAQtMVJjb25mLnRpZXIudnJhbS5k
ZWZhdWx0LmdwdTAubG93X3dhdGVybWFyawQ4MFRjb25mLnRpZXIudnJhbS5kZWZhdWx0LmdwdTEu
aGlnaF93YXRlcm1hcmsEOTBCY29uZi50aWVyLnZyYW0uZGVmYXVsdC5ncHUxLmxpbWl0BC0xUmNv
bmYudGllci52cmFtLmRlZmF1bHQuZ3B1MS5sb3dfd2F0ZXJtYXJrBDgwVGNvbmYudGllci52cmFt
LmRlZmF1bHQuZ3B1Mi5oaWdoX3dhdGVybWFyawQ5MEJjb25mLnRpZXIudnJhbS5kZWZhdWx0Lmdw
dTIubGltaXQELTFSY29uZi50aWVyLnZyYW0uZGVmYXVsdC5ncHUyLmxvd193YXRlcm1hcmsEODBU
Y29uZi50aWVyLnZyYW0uZGVmYXVsdC5ncHUzLmhpZ2hfd2F0ZXJtYXJrBDkwQmNvbmYudGllci52
cmFtLmRlZmF1bHQuZ3B1My5saW1pdAQtMVJjb25mLnRpZXIudnJhbS5kZWZhdWx0LmdwdTMubG93
X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rMC5HUFUwLmhpZ2hfd2F0ZXJtYXJrBDkw
PmNvbmYudGllci52cmFtLnJhbmswLkdQVTAubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazAu
R1BVMC5sb3dfd2F0ZXJtYXJrBDgwWGNvbmYudGllci52cmFtLnJhbmswLmFsbF9ncHVzLmhpZ2hf
d2F0ZXJtYXJrBDkwRmNvbmYudGllci52cmFtLnJhbmswLmFsbF9ncHVzLmxpbWl0BC0xVmNvbmYu
dGllci52cmFtLnJhbmswLmFsbF9ncHVzLmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0u
cmFuazAuZ3B1MC5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJhbS5yYW5rMC5ncHUwLmxp
bWl0BC0xTmNvbmYudGllci52cmFtLnJhbmswLmdwdTAubG93X3dhdGVybWFyawQ4MFBjb25mLnRp
ZXIudnJhbS5yYW5rMC5ncHUxLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbmsw
LmdwdTEubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazAuZ3B1MS5sb3dfd2F0ZXJtYXJrBDgw
UGNvbmYudGllci52cmFtLnJhbmswLmdwdTIuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZy
YW0ucmFuazAuZ3B1Mi5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rMC5ncHUyLmxvd193YXRl
cm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazAuZ3B1My5oaWdoX3dhdGVybWFyawQ5MD5jb25m
LnRpZXIudnJhbS5yYW5rMC5ncHUzLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbmswLmdwdTMu
bG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rMS5HUFUwLmhpZ2hfd2F0ZXJtYXJr
BDkwPmNvbmYudGllci52cmFtLnJhbmsxLkdQVTAubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFu
azEuR1BVMC5sb3dfd2F0ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbmsxLkdQVTEuaGlnaF93
YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0ucmFuazEuR1BVMS5saW1pdAQtMU5jb25mLnRpZXIu
dnJhbS5yYW5rMS5HUFUxLmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazEuR1BV
Mi5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJhbS5yYW5rMS5HUFUyLmxpbWl0BC0xTmNv
bmYudGllci52cmFtLnJhbmsxLkdQVTIubG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5y
YW5rMS5HUFUzLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbmsxLkdQVTMubGlt
aXQELTFOY29uZi50aWVyLnZyYW0ucmFuazEuR1BVMy5sb3dfd2F0ZXJtYXJrBDgwWGNvbmYudGll
ci52cmFtLnJhbmsxLmFsbF9ncHVzLmhpZ2hfd2F0ZXJtYXJrBDkwRmNvbmYudGllci52cmFtLnJh
bmsxLmFsbF9ncHVzLmxpbWl0BC0xVmNvbmYudGllci52cmFtLnJhbmsxLmFsbF9ncHVzLmxvd193
YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazEuZ3B1MC5oaWdoX3dhdGVybWFyawQ5MD5j
b25mLnRpZXIudnJhbS5yYW5rMS5ncHUwLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbmsxLmdw
dTAubG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rMS5ncHUxLmhpZ2hfd2F0ZXJt
YXJrBDkwPmNvbmYudGllci52cmFtLnJhbmsxLmdwdTEubGltaXQELTFOY29uZi50aWVyLnZyYW0u
cmFuazEuZ3B1MS5sb3dfd2F0ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbmsxLmdwdTIuaGln
aF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0ucmFuazEuZ3B1Mi5saW1pdAQtMU5jb25mLnRp
ZXIudnJhbS5yYW5rMS5ncHUyLmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazEu
Z3B1My5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJhbS5yYW5rMS5ncHUzLmxpbWl0BC0x
TmNvbmYudGllci52cmFtLnJhbmsxLmdwdTMubG93X3dhdGVybWFyawQ4MFhjb25mLnRpZXIudnJh
bS5yYW5rMi5hbGxfZ3B1cy5oaWdoX3dhdGVybWFyawQ5MEZjb25mLnRpZXIudnJhbS5yYW5rMi5h
bGxfZ3B1cy5saW1pdAQtMVZjb25mLnRpZXIudnJhbS5yYW5rMi5hbGxfZ3B1cy5sb3dfd2F0ZXJt
YXJrBDgwUGNvbmYudGllci52cmFtLnJhbmsyLmdwdTAuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50
aWVyLnZyYW0ucmFuazIuZ3B1MC5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rMi5ncHUwLmxv
d193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazIuZ3B1MS5oaWdoX3dhdGVybWFyawQ5
MD5jb25mLnRpZXIudnJhbS5yYW5rMi5ncHUxLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbmsy
LmdwdTEubG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rMi5ncHUyLmhpZ2hfd2F0
ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbmsyLmdwdTIubGltaXQELTFOY29uZi50aWVyLnZy
YW0ucmFuazIuZ3B1Mi5sb3dfd2F0ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbmsyLmdwdTMu
aGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0ucmFuazIuZ3B1My5saW1pdAQtMU5jb25m
LnRpZXIudnJhbS5yYW5rMi5ncHUzLmxvd193YXRlcm1hcmsEODBYY29uZi50aWVyLnZyYW0ucmFu
azMuYWxsX2dwdXMuaGlnaF93YXRlcm1hcmsEOTBGY29uZi50aWVyLnZyYW0ucmFuazMuYWxsX2dw
dXMubGltaXQELTFWY29uZi50aWVyLnZyYW0ucmFuazMuYWxsX2dwdXMubG93X3dhdGVybWFyawQ4
MFBjb25mLnRpZXIudnJhbS5yYW5rMy5ncHUwLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGllci52
cmFtLnJhbmszLmdwdTAubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazMuZ3B1MC5sb3dfd2F0
ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbmszLmdwdTEuaGlnaF93YXRlcm1hcmsEOTA+Y29u
Zi50aWVyLnZyYW0ucmFuazMuZ3B1MS5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rMy5ncHUx
Lmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazMuZ3B1Mi5oaWdoX3dhdGVybWFy
awQ5MD5jb25mLnRpZXIudnJhbS5yYW5rMy5ncHUyLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJh
bmszLmdwdTIubG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rMy5ncHUzLmhpZ2hf
d2F0ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbmszLmdwdTMubGltaXQELTFOY29uZi50aWVy
LnZyYW0ucmFuazMuZ3B1My5sb3dfd2F0ZXJtYXJrBDgwWGNvbmYudGllci52cmFtLnJhbms0LmFs
bF9ncHVzLmhpZ2hfd2F0ZXJtYXJrBDkwRmNvbmYudGllci52cmFtLnJhbms0LmFsbF9ncHVzLmxp
bWl0BC0xVmNvbmYudGllci52cmFtLnJhbms0LmFsbF9ncHVzLmxvd193YXRlcm1hcmsEODBQY29u
Zi50aWVyLnZyYW0ucmFuazQuZ3B1MC5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJhbS5y
YW5rNC5ncHUwLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbms0LmdwdTAubG93X3dhdGVybWFy
awQ4MFBjb25mLnRpZXIudnJhbS5yYW5rNC5ncHUxLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGll
ci52cmFtLnJhbms0LmdwdTEubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazQuZ3B1MS5sb3df
d2F0ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbms0LmdwdTIuaGlnaF93YXRlcm1hcmsEOTA+
Y29uZi50aWVyLnZyYW0ucmFuazQuZ3B1Mi5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rNC5n
cHUyLmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazQuZ3B1My5oaWdoX3dhdGVy
bWFyawQ5MD5jb25mLnRpZXIudnJhbS5yYW5rNC5ncHUzLmxpbWl0BC0xTmNvbmYudGllci52cmFt
LnJhbms0LmdwdTMubG93X3dhdGVybWFyawQ4MFhjb25mLnRpZXIudnJhbS5yYW5rNS5hbGxfZ3B1
cy5oaWdoX3dhdGVybWFyawQ5MEZjb25mLnRpZXIudnJhbS5yYW5rNS5hbGxfZ3B1cy5saW1pdAQt
MVZjb25mLnRpZXIudnJhbS5yYW5rNS5hbGxfZ3B1cy5sb3dfd2F0ZXJtYXJrBDgwUGNvbmYudGll
ci52cmFtLnJhbms1LmdwdTAuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0ucmFuazUu
Z3B1MC5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rNS5ncHUwLmxvd193YXRlcm1hcmsEODBQ
Y29uZi50aWVyLnZyYW0ucmFuazUuZ3B1MS5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJh
bS5yYW5rNS5ncHUxLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbms1LmdwdTEubG93X3dhdGVy
bWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rNS5ncHUyLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYu
dGllci52cmFtLnJhbms1LmdwdTIubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazUuZ3B1Mi5s
b3dfd2F0ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbms1LmdwdTMuaGlnaF93YXRlcm1hcmsE
OTA+Y29uZi50aWVyLnZyYW0ucmFuazUuZ3B1My5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5r
NS5ncHUzLmxvd193YXRlcm1hcmsEODBYY29uZi50aWVyLnZyYW0ucmFuazYuYWxsX2dwdXMuaGln
aF93YXRlcm1hcmsEOTBGY29uZi50aWVyLnZyYW0ucmFuazYuYWxsX2dwdXMubGltaXQELTFWY29u
Zi50aWVyLnZyYW0ucmFuazYuYWxsX2dwdXMubG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJh
bS5yYW5rNi5ncHUwLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbms2LmdwdTAu
bGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazYuZ3B1MC5sb3dfd2F0ZXJtYXJrBDgwUGNvbmYu
dGllci52cmFtLnJhbms2LmdwdTEuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0ucmFu
azYuZ3B1MS5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rNi5ncHUxLmxvd193YXRlcm1hcmsE
ODBQY29uZi50aWVyLnZyYW0ucmFuazYuZ3B1Mi5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIu
dnJhbS5yYW5rNi5ncHUyLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbms2LmdwdTIubG93X3dh
dGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rNi5ncHUzLmhpZ2hfd2F0ZXJtYXJrBDkwPmNv
bmYudGllci52cmFtLnJhbms2LmdwdTMubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazYuZ3B1
My5sb3dfd2F0ZXJtYXJrBDgwWGNvbmYudGllci52cmFtLnJhbms3LmFsbF9ncHVzLmhpZ2hfd2F0
ZXJtYXJrBDkwRmNvbmYudGllci52cmFtLnJhbms3LmFsbF9ncHVzLmxpbWl0BC0xVmNvbmYudGll
ci52cmFtLnJhbms3LmFsbF9ncHVzLmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFu
azcuZ3B1MC5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJhbS5yYW5rNy5ncHUwLmxpbWl0
BC0xTmNvbmYudGllci52cmFtLnJhbms3LmdwdTAubG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIu
dnJhbS5yYW5rNy5ncHUxLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbms3Lmdw
dTEubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazcuZ3B1MS5sb3dfd2F0ZXJtYXJrBDgwUGNv
bmYudGllci52cmFtLnJhbms3LmdwdTIuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0u
cmFuazcuZ3B1Mi5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rNy5ncHUyLmxvd193YXRlcm1h
cmsEODBQY29uZi50aWVyLnZyYW0ucmFuazcuZ3B1My5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRp
ZXIudnJhbS5yYW5rNy5ncHUzLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbms3LmdwdTMubG93
X3dhdGVybWFyawQ4MFhjb25mLnRpZXIudnJhbS5yYW5rOC5hbGxfZ3B1cy5oaWdoX3dhdGVybWFy
awQ5MEZjb25mLnRpZXIudnJhbS5yYW5rOC5hbGxfZ3B1cy5saW1pdAQtMVZjb25mLnRpZXIudnJh
bS5yYW5rOC5hbGxfZ3B1cy5sb3dfd2F0ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbms4Lmdw
dTAuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0ucmFuazguZ3B1MC5saW1pdAQtMU5j
b25mLnRpZXIudnJhbS5yYW5rOC5ncHUwLmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0u
cmFuazguZ3B1MS5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJhbS5yYW5rOC5ncHUxLmxp
bWl0BC0xTmNvbmYudGllci52cmFtLnJhbms4LmdwdTEubG93X3dhdGVybWFyawQ4MFBjb25mLnRp
ZXIudnJhbS5yYW5rOC5ncHUyLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbms4
LmdwdTIubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazguZ3B1Mi5sb3dfd2F0ZXJtYXJrBDgw
UGNvbmYudGllci52cmFtLnJhbms4LmdwdTMuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZy
YW0ucmFuazguZ3B1My5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rOC5ncHUzLmxvd193YXRl
cm1hcmsEODA0Y29uZi50aWVyX3N0cmF0ZWd5LmRlZmF1bHRCVlJBTSAyLCBSQU0gNSwgRElTSzAg
NSwgUEVSU0lTVCA1YGNvbmYudGllcl9zdHJhdGVneS5wcmVkaWNhdGVfZXZhbHVhdGlvbl9pbnRl
cnZhbAQ2MCRjb25mLnRvbXNfcGVyX3JhbmsCMSBjb25mLnRwc19wZXJfdG9tBDQwImNvbmYudHJp
Z2dlcl9wb3J0CDkwMDE+Y29uZi51bmlmaWVkX3NlY3VyaXR5X25hbWVzcGFjZQpGQUxTRTpjb25m
LnVzZV9leHRlcm5hbF90ZXh0X3NlcnZlcghUUlVFHGNvbmYudXNlX2h0dHBzCkZBTFNFLGNvbmYu
dmlkZW9fZGVmYXVsdF90dGwELTEoY29uZi52aWRlb19tYXhfY291bnQELTEyY29uZi52aWRlb190
ZW1wX2RpcmVjdG9yeSwvdG1wL2dwdWRiLXRlbXAtdmlkZW9zImNvbmYud2FsLmNoZWNrc3VtCFRS
VUUwY29uZi53YWwuZmx1c2hfZnJlcXVlbmN5BDYwMmNvbmYud2FsLm1heF9zZWdtZW50X3NpemUS
NTAwMDAwMDAwLGNvbmYud2FsLnNlZ21lbnRfY291bnQELTEoY29uZi53YWwuc3luY19wb2xpY3kK
Zmx1c2g2Y29uZi53b3JrZXJfaHR0cF9zZXJ2ZXJfaXBz6AExNzIuMzEuMzMuMzA7MTcyLjMxLjMz
LjMwOzE3Mi4zMS4zMy4zMTsxNzIuMzEuMzMuMzI7MTcyLjMxLjMzLjMzOzE3Mi4zMS4zMy4zNDsx
NzIuMzEuMzMuMzU7MTcyLjMxLjMzLjM2OzE3Mi4zMS4zMy4zNzpjb25mLndvcmtlcl9odHRwX3Nl
cnZlcl9wb3J0c1g5MTkxOzkxOTI7OTE5Mzs5MTk0OzkxOTU7OTE5Njs5MTk3OzkxOTg7OTE5OThj
b25mLndvcmtlcl9odHRwX3NlcnZlcl91cmxzwANodHRwOi8vMTcyLjMxLjMzLjMwOjkxOTE7aHR0
cDovLzE3Mi4zMS4zMy4zMDo5MTkyO2h0dHA6Ly8xNzIuMzEuMzMuMzE6OTE5MztodHRwOi8vMTcy
LjMxLjMzLjMyOjkxOTQ7aHR0cDovLzE3Mi4zMS4zMy4zMzo5MTk1O2h0dHA6Ly8xNzIuMzEuMzMu
MzQ6OTE5NjtodHRwOi8vMTcyLjMxLjMzLjM1OjkxOTc7aHR0cDovLzE3Mi4zMS4zMy4zNjo5MTk4
O2h0dHA6Ly8xNzIuMzEuMzMuMzc6OTE5OUhjb25mLndvcmtlcl9odHRwX3NlcnZlcl91cmxzX3By
aXZhdGXAA2h0dHA6Ly8xNzIuMzEuMzMuMzA6OTE5MTtodHRwOi8vMTcyLjMxLjMzLjMwOjkxOTI7
aHR0cDovLzE3Mi4zMS4zMy4zMTo5MTkzO2h0dHA6Ly8xNzIuMzEuMzMuMzI6OTE5NDtodHRwOi8v
MTcyLjMxLjMzLjMzOjkxOTU7aHR0cDovLzE3Mi4zMS4zMy4zNDo5MTk2O2h0dHA6Ly8xNzIuMzEu
MzMuMzU6OTE5NztodHRwOi8vMTcyLjMxLjMzLjM2OjkxOTg7aHR0cDovLzE3Mi4zMS4zMy4zNzo5
MTk5KHN5c3RlbS5mb250X2ZhbWlsaWVzpgFEZWphVnUgTWF0aCBUZVggR3lyZSxEZWphVnUgU2Fu
cyBNb25vLERlamFWdSBTYW5zLERlamFWdSBTZXJpZixTYW5zLFNlcmlmLE1vbm9zcGFjZTB2ZXJz
aW9uLmdwdWRiX2J1aWxkX2RhdGUoRmViIDE0IDIwMjQgMjM6NDk6MDFAdmVyc2lvbi5ncHVkYl9j
b21wdXRlX2NhcGFiaWxpdHkWNjA7NzA7ODA7ODY4dmVyc2lvbi5ncHVkYl9jb21wdXRlX2VuZ2lu
ZQhDVURBPnZlcnNpb24uZ3B1ZGJfY29yZV9saWJzX3ZlcnNpb24UMjAyNDAyMTMwMDR2ZXJzaW9u
LmdwdWRiX2NvcmVfdmVyc2lvbiw3LjIuMC4xLjIwMjQwMjE0MjEwOTA2NHZlcnNpb24uZ3B1ZGJf
ZmlsZV92ZXJzaW9uFDIwMjEwMzExMjAqdmVyc2lvbi5ncHVkYl92ZXJzaW9uUDkyMjYwYTMyOWNh
NDVjYjBlMzc3NzZjZjkxNDQ5NzE3OWY2MjExNDM0dmVyc2lvbi5ncHVkYl92ZXJzaW9uX2RhdGUy
MjAyNC0wMi0xNCAyMTowOTowNiAtMDUwMCx2ZXJzaW9uLnB5dGhvbl92ZXJzaW9uDjMuMTAuMTMA
AAA=
headers:
Access-Control-Allow-Origin:
- '*'
Access-Control-Expose-Headers:
- x-request-time-secs
Connection:
- Close
Content-Type:
- application/octet-stream
Date:
- Thu, 22 Feb 2024 00:26:37 GMT
Strict-Transport-Security:
- max-age=31536000; includeSubDomains
X-Content-Type-Options:
- nosniff
X-Frame-Options:
- SAMEORIGIN
X-XSS-Protection:
- 1; mode=block
x-request-time-secs:
- '0.00057'
status:
code: 200
message: OK
- request:
body: !!binary |
hAFHRU5FUkFURSBQUk9NUFQgV0lUSCBPUFRJT05TIChDT05URVhUX05BTUVTID0gJ2RlbW8udGVz
dF9sbG1fY3R4JykAAgxiaW5hcnkAAAA=
headers:
Accept:
- application/octet-stream
Content-type:
- application/octet-stream
authorization:
- DUMMY
method: POST
uri: http://172.31.33.30:9191/execute/sql
response:
body:
string: !!binary |
BE9LAChleGVjdXRlX3NxbF9yZXNwb25zZe4MAPYDeyJuYW1lIjoiZ2VuZXJpY19yZXNwb25zZSIs
InR5cGUiOiJyZWNvcmQiLCJmaWVsZHMiOlt7Im5hbWUiOiJjb2x1bW5fMSIsInR5cGUiOnsidHlw
ZSI6ImFycmF5IiwiaXRlbXMiOiJzdHJpbmcifX0seyJuYW1lIjoiY29sdW1uX2hlYWRlcnMiLCJ0
eXBlIjp7InR5cGUiOiJhcnJheSIsIml0ZW1zIjoic3RyaW5nIn19LHsibmFtZSI6ImNvbHVtbl9k
YXRhdHlwZXMiLCJ0eXBlIjp7InR5cGUiOiJhcnJheSIsIml0ZW1zIjoic3RyaW5nIn19XX2qBwL+
BnsicGF5bG9hZCI6eyJjb250ZXh0IjpbeyJ0YWJsZSI6ImRlbW8udGVzdF9wcm9maWxlcyIsImNv
bHVtbnMiOlsidXNlcm5hbWUgVkFSQ0hBUiAoMzIpIE5PVCBOVUxMIiwibmFtZSBWQVJDSEFSICgz
MikgTk9UIE5VTEwiLCJzZXggVkFSQ0hBUiAoMSkgTk9UIE5VTEwiLCJhZGRyZXNzIFZBUkNIQVIg
KDY0KSBOT1QgTlVMTCIsIm1haWwgVkFSQ0hBUiAoMzIpIE5PVCBOVUxMIiwiYmlydGhkYXRlIFRJ
TUVTVEFNUCBOT1QgTlVMTCJdLCJkZXNjcmlwdGlvbiI6IkNvbnRhaW5zIHVzZXIgcHJvZmlsZXMu
IiwicnVsZXMiOltdfSx7InNhbXBsZXMiOnsiSG93IG1hbnkgbWFsZSB1c2VycyBhcmUgdGhlcmU/
Ijoic2VsZWN0IGNvdW50KDEpIGFzIG51bV91c2Vyc1xuICAgICAgICAgICAgZnJvbSBkZW1vLnRl
c3RfcHJvZmlsZXNcbiAgICAgICAgICAgIHdoZXJlIHNleCA9ICcnTScnOyJ9fV19fQACDFByb21w
dAACDHN0cmluZwAAAgAACCBYLUtpbmV0aWNhLUdyb3VwBkRETApjb3VudAIwGmxhc3RfZW5kcG9p
bnQsL2dlbmVyYXRlL3NxbC9pbnRlcm5hbC50b3RhbF9udW1iZXJfb2ZfcmVjb3JkcwIwAAA=
headers:
Access-Control-Allow-Origin:
- '*'
Access-Control-Expose-Headers:
- x-request-time-secs
Connection:
- Close
Content-Type:
- application/octet-stream
Date:
- Thu, 22 Feb 2024 00:26:37 GMT
Strict-Transport-Security:
- max-age=31536000; includeSubDomains
X-Content-Type-Options:
- nosniff
X-Frame-Options:
- SAMEORIGIN
X-Kinetica-Group:
- DDL
X-XSS-Protection:
- 1; mode=block
x-request-time-secs:
- '0.00666'
status:
code: 200
message: OK
- request:
body: '{"messages": [{"role": "system", "content": "CREATE TABLE demo.test_profiles
AS\n(\n username VARCHAR (32) NOT NULL,\n name VARCHAR (32) NOT NULL,\n sex
VARCHAR (1) NOT NULL,\n address VARCHAR (64) NOT NULL,\n mail VARCHAR (32)
NOT NULL,\n birthdate TIMESTAMP NOT NULL\n);\nCOMMENT ON TABLE demo.test_profiles
IS ''Contains user profiles.'';"}, {"role": "user", "content": "How many male
users are there?"}, {"role": "assistant", "content": "select count(1) as num_users\n from
demo.test_profiles\n where sex = ''M'';"}, {"role": "user", "content":
"What are the female users ordered by username?"}]}'
headers:
Accept:
- text/plain
Content-type:
- application/json
authorization:
- DUMMY
method: POST
uri: http://172.31.33.30:9191/chat/completions
response:
body:
string: '{"status":"OK","data":{"status":"OK","data":{"id":"chatCompl-1708561598","object":"chat.completion","created":1708561598,"model":"sqlassist-1.1","choices":[{"index":0,"message":{"role":"assistant","content":"SELECT
username, name\n FROM demo.test_profiles\n WHERE sex
= ''F''\n ORDER BY username;"},"finish_reason":"stop"}],"usage":{"prompt_tokens":140,"completion_tokens":22,"total_tokens":162},"prompt":"What
are the female users ordered by username?"}}}'
headers:
Access-Control-Allow-Origin:
- '*'
Access-Control-Expose-Headers:
- x-request-time-secs
Connection:
- Close
Content-Type:
- application/json
Date:
- Thu, 22 Feb 2024 00:26:37 GMT
Strict-Transport-Security:
- max-age=31536000; includeSubDomains
X-Content-Type-Options:
- nosniff
X-Frame-Options:
- SAMEORIGIN
X-XSS-Protection:
- 1; mode=block
x-request-time-secs:
- '0.90840'
status:
code: 200
message: OK
version: 1
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/cassettes/TestChatKinetica.test_load_context.yaml | interactions:
- request:
body: "\0"
headers:
Accept:
- application/octet-stream
Content-type:
- application/octet-stream
authorization:
- DUMMY
method: POST
uri: http://172.31.33.30:9191/show/system/status
response:
body:
string: !!binary |
BE9LADZzaG93X3N5c3RlbV9zdGF0dXNfcmVzcG9uc2X8ugEUCmdyYXBovAl7ImNvdW50Ijo4LCJz
dGF0dXMiOlt7InZlcnNpb24iOjM5LCJzZXJ2ZXJfaWQiOjAsImhvc3RfaWQiOiIxNzIuMzEuMzMu
MzAiLCJzdGF0dXMiOiJydW5uaW5nIn0seyJ2ZXJzaW9uIjozOSwic2VydmVyX2lkIjoxLCJob3N0
X2lkIjoiMTcyLjMxLjMzLjMxIiwic3RhdHVzIjoicnVubmluZyJ9LHsidmVyc2lvbiI6MzksInNl
cnZlcl9pZCI6MiwiaG9zdF9pZCI6IjE3Mi4zMS4zMy4zMiIsInN0YXR1cyI6InJ1bm5pbmcifSx7
InZlcnNpb24iOjM5LCJzZXJ2ZXJfaWQiOjMsImhvc3RfaWQiOiIxNzIuMzEuMzMuMzMiLCJzdGF0
dXMiOiJydW5uaW5nIn0seyJ2ZXJzaW9uIjozOSwic2VydmVyX2lkIjo0LCJob3N0X2lkIjoiMTcy
LjMxLjMzLjM0Iiwic3RhdHVzIjoicnVubmluZyJ9LHsidmVyc2lvbiI6MzksInNlcnZlcl9pZCI6
NSwiaG9zdF9pZCI6IjE3Mi4zMS4zMy4zNSIsInN0YXR1cyI6InJ1bm5pbmcifSx7InZlcnNpb24i
OjM5LCJzZXJ2ZXJfaWQiOjYsImhvc3RfaWQiOiIxNzIuMzEuMzMuMzYiLCJzdGF0dXMiOiJydW5u
aW5nIn0seyJ2ZXJzaW9uIjozOSwic2VydmVyX2lkIjo3LCJob3N0X2lkIjoiMTcyLjMxLjMzLjM3
Iiwic3RhdHVzIjoicnVubmluZyJ9XX0eaGFfY2x1c3Rlcl9pbmZvzAN7ImhhX3JhbmtzX2luZm8i
Olt7InByaXZhdGVfdXJsIjoiaHR0cDovLzE3Mi4zMS4zMy4zMDo5MTkxIiwicHVibGljX3VybCI6
Imh0dHA6Ly8xNzIuMzEuMzMuMzA6OTE5MSIsImhhX3VybF9vdmVycmlkZSI6Imh0dHA6Ly8xNzIu
MzEuMzMuMzA6OTE5MSIsImFsdGVybmF0ZV91cmxzIjpbImh0dHA6Ly8xNzIuMzEuMzMuMzA6OTE5
MSJdfV0sInJlYWR5Ijp0cnVlLCJuYW1lIjoicHJvZHVjdGlvbi1uZXcifQpob3N0c/hLeyJjb3Vu
dCI6OCwic3RhdHVzIjpbeyJ2ZXJzaW9uIjoxMzMsImhvc3RfbnVtYmVyIjowLCJpZCI6IjE3Mi4z
MS4zMy4zMCIsImhvc3RuYW1lIjoiMzAwLTMwMy11MzAtdjEwMCIsInN0YXR1cyI6InJ1bm5pbmci
LCJodHRwZF9zdGF0dXMiOiJkaXNhYmxlZCIsInN0YXRzX3N0YXR1cyI6InJ1bm5pbmciLCJtbF9z
dGF0dXMiOiJkaXNhYmxlZCIsInF1ZXJ5X3BsYW5uZXJfc3RhdHVzIjoicnVubmluZyIsInJldmVh
bF9zdGF0dXMiOiJydW5uaW5nIiwidG90YWxfbWVtb3J5Ijo4MTAyMDEyNjAwMzIsInN0YXJ0X3Rp
bWUiOjE3MDgxMDM0NTIsIm5ldHdvcmtfaXBzIjpbIjE3Mi4zMS4zMy4zMCJdLCJncHVfaWRzIjpb
IkdQVS1hMDdjMGU0OC05NDhhLTQwNWMtNDNiNy03Mzc0ZDJkZGMwOGMiLCJHUFUtNTNlMWI3YjQt
NjhiOC1hMjhjLTEwZjMtZGI2YTA1OTdmYmI0IiwiR1BVLTQxZmY5MWZiLWVjYjktMGE1Yi1kNDdj
LWQ1YmI3ZWYxMDM4YSIsIkdQVS1hOTQwYjEyNi1iMWE0LTlmMDctNDRlMS02MzZjMzc5ODllY2Yi
XSwiYWNjZXB0c19mYWlsb3ZlciI6ImZhbHNlIiwiaG9zdF9yb2xlIjoibGVhZGVyIiwiaG9zdF90
ZXJtIjowLCJob3N0X2VsZWN0aW9uX3N0YXR1cyI6ImxlYWRlcl9lbGVjdGVkIn0seyJ2ZXJzaW9u
IjoxMTMsImhvc3RfbnVtYmVyIjoxLCJpZCI6IjE3Mi4zMS4zMy4zMSIsImhvc3RuYW1lIjoiMzAw
LTMwMy11MzEtdjEwMCIsInN0YXR1cyI6InJ1bm5pbmciLCJodHRwZF9zdGF0dXMiOiJkaXNhYmxl
ZCIsInN0YXRzX3N0YXR1cyI6InJ1bm5pbmciLCJtbF9zdGF0dXMiOiJkaXNhYmxlZCIsInF1ZXJ5
X3BsYW5uZXJfc3RhdHVzIjoic3RvcHBlZCIsInJldmVhbF9zdGF0dXMiOiJzdG9wcGVkIiwidG90
YWxfbWVtb3J5Ijo4MTAyMDEyMzU0NTYsInN0YXJ0X3RpbWUiOjE3MDgxMDM0NTIsIm5ldHdvcmtf
aXBzIjpbIjE3Mi4zMS4zMy4zMSJdLCJncHVfaWRzIjpbIkdQVS0xMzJmMWRkYi1hNWY0LTIwZDMt
MTEyMi02ZDM1OTlhZmNmMWQiLCJHUFUtM2JjN2JmY2QtOTVhOS1hNjI2LThlOGYtMzdiOTcxOWFh
OWRkIiwiR1BVLWZlMTBhZDNkLTg4M2QtYTU5MC1kNDA1LWUwYTU2OTNiMGFmMCIsIkdQVS02Yjgy
OTY5OS0wYjRjLTEyZjAtMGMyOC04Y2Y0NmMyNGMxODUiXSwiYWNjZXB0c19mYWlsb3ZlciI6ImZh
bHNlIiwiaG9zdF9yb2xlIjoiZm9sbG93ZXIiLCJob3N0X3Rlcm0iOjAsImhvc3RfZWxlY3Rpb25f
c3RhdHVzIjoibGVhZGVyX2VsZWN0ZWQifSx7InZlcnNpb24iOjExMywiaG9zdF9udW1iZXIiOjIs
ImlkIjoiMTcyLjMxLjMzLjMyIiwiaG9zdG5hbWUiOiIzMDAtMzAzLXUzMi12MTAwIiwic3RhdHVz
IjoicnVubmluZyIsImh0dHBkX3N0YXR1cyI6ImRpc2FibGVkIiwic3RhdHNfc3RhdHVzIjoicnVu
bmluZyIsIm1sX3N0YXR1cyI6ImRpc2FibGVkIiwicXVlcnlfcGxhbm5lcl9zdGF0dXMiOiJzdG9w
cGVkIiwicmV2ZWFsX3N0YXR1cyI6InN0b3BwZWQiLCJ0b3RhbF9tZW1vcnkiOjgxMDIwMTIzMTM2
MCwic3RhcnRfdGltZSI6MTcwODEwMzQ1MiwibmV0d29ya19pcHMiOlsiMTcyLjMxLjMzLjMyIl0s
ImdwdV9pZHMiOlsiR1BVLTA5MGM0NDMwLWRiYmUtMWUxYS03ZjdmLWExODI3ODNhZDIzMSIsIkdQ
VS1iZTE0YjVjZS1iNDExLTQ4Y2EtYTlmZi01YTA2YzdhNmYzOTgiLCJHUFUtNGIxYTU2ODgtMGU4
Yy1jYzk0LTgzM2ItYzJmMzllOTk1M2I4IiwiR1BVLTEyZWJhNDYzLTgzMmUtMTA4Yi1lY2IyLTVj
OWFmOGRhNjE2NCJdLCJhY2NlcHRzX2ZhaWxvdmVyIjoiZmFsc2UiLCJob3N0X3JvbGUiOiJmb2xs
b3dlciIsImhvc3RfdGVybSI6MCwiaG9zdF9lbGVjdGlvbl9zdGF0dXMiOiJsZWFkZXJfZWxlY3Rl
ZCJ9LHsidmVyc2lvbiI6MTEzLCJob3N0X251bWJlciI6MywiaWQiOiIxNzIuMzEuMzMuMzMiLCJo
b3N0bmFtZSI6IjMwMC0zMDMtdTMzLXYxMDAiLCJzdGF0dXMiOiJydW5uaW5nIiwiaHR0cGRfc3Rh
dHVzIjoiZGlzYWJsZWQiLCJzdGF0c19zdGF0dXMiOiJydW5uaW5nIiwibWxfc3RhdHVzIjoiZGlz
YWJsZWQiLCJxdWVyeV9wbGFubmVyX3N0YXR1cyI6InN0b3BwZWQiLCJyZXZlYWxfc3RhdHVzIjoi
c3RvcHBlZCIsInRvdGFsX21lbW9yeSI6ODEwMjAxMjM5NTUyLCJzdGFydF90aW1lIjoxNzA4MTAz
NDUyLCJuZXR3b3JrX2lwcyI6WyIxNzIuMzEuMzMuMzMiXSwiZ3B1X2lkcyI6WyJHUFUtMjYzMzFh
MDctMTc1Ni1mMDY2LTFlNWEtMzc1M2Y1ZTViYzc4IiwiR1BVLWE0MWFhMTg3LTQ1NmQtNjBiMy04
ZmM5LWI4YjMzZWFlMjFiYyIsIkdQVS01NWRlZjYxOS0wMTE2LWViZjctMzMwMy03ZDkzMmRmYzcw
ZmYiLCJHUFUtNGM5YWYzODgtYjlmYi03MWQ5LWZiNDUtODMwYTM4MTIwMGQzIl0sImFjY2VwdHNf
ZmFpbG92ZXIiOiJmYWxzZSIsImhvc3Rfcm9sZSI6ImZvbGxvd2VyIiwiaG9zdF90ZXJtIjowLCJo
b3N0X2VsZWN0aW9uX3N0YXR1cyI6ImxlYWRlcl9lbGVjdGVkIn0seyJ2ZXJzaW9uIjoxMTMsImhv
c3RfbnVtYmVyIjo0LCJpZCI6IjE3Mi4zMS4zMy4zNCIsImhvc3RuYW1lIjoiMzAwLTMwMy11MzQt
djEwMCIsInN0YXR1cyI6InJ1bm5pbmciLCJodHRwZF9zdGF0dXMiOiJkaXNhYmxlZCIsInN0YXRz
X3N0YXR1cyI6InJ1bm5pbmciLCJtbF9zdGF0dXMiOiJkaXNhYmxlZCIsInF1ZXJ5X3BsYW5uZXJf
c3RhdHVzIjoic3RvcHBlZCIsInJldmVhbF9zdGF0dXMiOiJzdG9wcGVkIiwidG90YWxfbWVtb3J5
Ijo4MTAyMDEyMTA4ODAsInN0YXJ0X3RpbWUiOjE3MDgxMDM0NTIsIm5ldHdvcmtfaXBzIjpbIjE3
Mi4zMS4zMy4zNCJdLCJncHVfaWRzIjpbIkdQVS02OGUwOWNmYy1mOWE0LTJhMTQtNTdhNC05NDgz
YjkxYzJkOWEiLCJHUFUtM2RmMjE2ZTgtZmU3NC0wNDdhLTk1YWMtNzJlMmNiZWNiNTIyIiwiR1BV
LTE0ZDQ0Yjk4LWIwNDItY2I4MS0xZGQzLTIwZDRmNjljODljYSIsIkdQVS0zNDA2NzMwYi1iZWFk
LWM1MGEtNDZlYi1lMGEyYzJiZjZlNzYiXSwiYWNjZXB0c19mYWlsb3ZlciI6ImZhbHNlIiwiaG9z
dF9yb2xlIjoiZm9sbG93ZXIiLCJob3N0X3Rlcm0iOjAsImhvc3RfZWxlY3Rpb25fc3RhdHVzIjoi
bGVhZGVyX2VsZWN0ZWQifSx7InZlcnNpb24iOjExMywiaG9zdF9udW1iZXIiOjUsImlkIjoiMTcy
LjMxLjMzLjM1IiwiaG9zdG5hbWUiOiIzMDAtMzAzLXUzNS12MTAwIiwic3RhdHVzIjoicnVubmlu
ZyIsImh0dHBkX3N0YXR1cyI6ImRpc2FibGVkIiwic3RhdHNfc3RhdHVzIjoicnVubmluZyIsIm1s
X3N0YXR1cyI6ImRpc2FibGVkIiwicXVlcnlfcGxhbm5lcl9zdGF0dXMiOiJzdG9wcGVkIiwicmV2
ZWFsX3N0YXR1cyI6InN0b3BwZWQiLCJ0b3RhbF9tZW1vcnkiOjgxMDIwMTI0Nzc0NCwic3RhcnRf
dGltZSI6MTcwODEwMzQ1MiwibmV0d29ya19pcHMiOlsiMTcyLjMxLjMzLjM1Il0sImdwdV9pZHMi
OlsiR1BVLWY3MzAyYWJjLTllYWEtMTRjOS1mNDI2LTE2M2RmM2RhOGMyNiIsIkdQVS03N2RkN2Q0
OC1mOTgwLWZkMDYtNzIyYy0xYzViOTMyMTgyMDMiLCJHUFUtNTUyYjIwYTUtNTdlNi00OTg2LWJl
MmItMmIzNzhmZDRiY2FhIiwiR1BVLTYzMDUzMTYyLTMwN2YtNTVjNS1hOTc0LTU4ZGZlODQzNDJi
MiJdLCJhY2NlcHRzX2ZhaWxvdmVyIjoiZmFsc2UiLCJob3N0X3JvbGUiOiJmb2xsb3dlciIsImhv
c3RfdGVybSI6MCwiaG9zdF9lbGVjdGlvbl9zdGF0dXMiOiJsZWFkZXJfZWxlY3RlZCJ9LHsidmVy
c2lvbiI6MTEzLCJob3N0X251bWJlciI6NiwiaWQiOiIxNzIuMzEuMzMuMzYiLCJob3N0bmFtZSI6
IjMwMC0zMDMtdTM2LXYxMDAiLCJzdGF0dXMiOiJydW5uaW5nIiwiaHR0cGRfc3RhdHVzIjoiZGlz
YWJsZWQiLCJzdGF0c19zdGF0dXMiOiJydW5uaW5nIiwibWxfc3RhdHVzIjoiZGlzYWJsZWQiLCJx
dWVyeV9wbGFubmVyX3N0YXR1cyI6InN0b3BwZWQiLCJyZXZlYWxfc3RhdHVzIjoic3RvcHBlZCIs
InRvdGFsX21lbW9yeSI6ODEwMjAxMjIzMTY4LCJzdGFydF90aW1lIjoxNzA4MTAzNDUyLCJuZXR3
b3JrX2lwcyI6WyIxNzIuMzEuMzMuMzYiXSwiZ3B1X2lkcyI6WyJHUFUtOGU5ZDRhZGItYzYxMS04
MmYwLTNmZTQtZjFmMmMzZTZhNDRmIiwiR1BVLTRjNmFiYWM5LTc0ZWMtZjc4Yy1mYWE5LTQ3NDZk
MDA1N2FiOCIsIkdQVS05NjI1YTc4Yy1lMmFlLTdmMDktOWNjZi1lZTA0OTk1MjYzMTAiLCJHUFUt
YWI3NTRhM2MtNjA4Ni1iYjUxLWU5NGEtM2NmNGExNDkwNWJhIl0sImFjY2VwdHNfZmFpbG92ZXIi
OiJmYWxzZSIsImhvc3Rfcm9sZSI6ImZvbGxvd2VyIiwiaG9zdF90ZXJtIjowLCJob3N0X2VsZWN0
aW9uX3N0YXR1cyI6ImxlYWRlcl9lbGVjdGVkIn0seyJ2ZXJzaW9uIjoxMTMsImhvc3RfbnVtYmVy
Ijo3LCJpZCI6IjE3Mi4zMS4zMy4zNyIsImhvc3RuYW1lIjoiMzAwLTMwMy11MzctdjEwMCIsInN0
YXR1cyI6InJ1bm5pbmciLCJodHRwZF9zdGF0dXMiOiJkaXNhYmxlZCIsInN0YXRzX3N0YXR1cyI6
InJ1bm5pbmciLCJtbF9zdGF0dXMiOiJkaXNhYmxlZCIsInF1ZXJ5X3BsYW5uZXJfc3RhdHVzIjoi
c3RvcHBlZCIsInJldmVhbF9zdGF0dXMiOiJzdG9wcGVkIiwidG90YWxfbWVtb3J5Ijo4MTAyMDEy
MzU0NTYsInN0YXJ0X3RpbWUiOjE3MDgxMDM0NTIsIm5ldHdvcmtfaXBzIjpbIjE3Mi4zMS4zMy4z
NyJdLCJncHVfaWRzIjpbIkdQVS1lYTE4ZDM4OC1lMmIzLTQyODMtYTZiNS1hMzIzYTQ4NzI1YTki
LCJHUFUtZGNkODRlZmItOTRjYS1iNDk3LThjMTUtN2EzMjY5NDBjMWViIiwiR1BVLTkxNjAxYTVl
LTk3M2YtZDFlNC02ZTFkLWY2NTUyOTRkMzQ2MCIsIkdQVS0wN2ZhNGRiNi05ZTU1LWI2MWYtOTA5
YS04NWM0ZDFiZWIwODgiXSwiYWNjZXB0c19mYWlsb3ZlciI6ImZhbHNlIiwiaG9zdF9yb2xlIjoi
Zm9sbG93ZXIiLCJob3N0X3Rlcm0iOjAsImhvc3RfZWxlY3Rpb25fc3RhdHVzIjoibGVhZGVyX2Vs
ZWN0ZWQifV19Fmh0dHBfc2VydmVyoAN7ImNvbm5lY3Rpb25zIjp7ImN1cnJlbnQiOjEsIm1heF9j
b25jdXJyZW50IjoxMTYsInF1ZXVlZCI6MCwibWF4X3F1ZXVlZF9hbGxvd2VkIjo2NTUzNiwidG90
YWwiOjMyNTg5NSwicmVmdXNlZCI6MCwidGhyZWFkcyI6Mn0sInRocmVhZHMiOnsidXNlZCI6Miwi
Y2FwYWNpdHkiOjUxMiwiYWxsb2NhdGVkIjo4LCJhdmFpbGFibGUiOjUxMCwic3RhY2tfc2l6ZSI6
MH19FG1pZ3JhdGlvbnMueyJjb3VudCI6MCwic3RhdHVzIjpbXX0KcmFua3PgUnsiY291bnQiOjks
InN0YXR1cyI6W3sidmVyc2lvbiI6MTE4LCJyYW5rIjowLCJyYW5rX2lkIjoiMCA6IDE3Mi4zMS4z
My4zMCA6IDMyNzc0MDYiLCJyYW5rX21vZGUiOiJydW4iLCJyYW5rX3N0YXR1cyI6InJ1bm5pbmci
LCJyZW1vdmFsX3N0YXR1cyI6Im5vbmUiLCJhcHBfdmVyc2lvbiI6IjcuMi4wLjEuMjAyNDAyMTQy
MTA5MDYiLCJwaWQiOjMyNzc0MDYsInN0YXJ0X3RpbWUiOjE3MDg0NDM2NTYsInN0YXJ0X3RpbWVf
c3RyIjoiVHVlIEZlYiAyMCAxNTo0MDo1NiAyMDI0Iiwic3RhcnRfY291bnQiOjEsImFjY2VwdGlu
Z19qb2JzIjp0cnVlLCJuZXdfcmFuayI6ZmFsc2UsInJlYWRfb25seSI6ZmFsc2UsImhvc3RuYW1l
IjoiMzAwLTMwMy11MzAtdjEwMCIsImhvc3RfaWQiOiIxNzIuMzEuMzMuMzAiLCJncHVfaWRzIjpb
IkdQVS1hMDdjMGU0OC05NDhhLTQwNWMtNDNiNy03Mzc0ZDJkZGMwOGMiXSwiZ3B1X2luZGV4Ijow
LCJncHVfaW5kaWNlcyI6WzBdfSx7InZlcnNpb24iOjEwMiwicmFuayI6MSwicmFua19pZCI6IjEg
OiAxNzIuMzEuMzMuMzAgOiAzMjc4NTYwIiwicmFua19tb2RlIjoicnVuIiwicmFua19zdGF0dXMi
OiJydW5uaW5nIiwicmVtb3ZhbF9zdGF0dXMiOiJub25lIiwiYXBwX3ZlcnNpb24iOiI3LjIuMC4x
LjIwMjQwMjE0MjEwOTA2IiwicGlkIjozMjc4NTYwLCJzdGFydF90aW1lIjoxNzA4NDQzNjU2LCJz
dGFydF90aW1lX3N0ciI6IlR1ZSBGZWIgMjAgMTU6NDA6NTYgMjAyNCIsInN0YXJ0X2NvdW50Ijox
LCJhY2NlcHRpbmdfam9icyI6dHJ1ZSwibmV3X3JhbmsiOmZhbHNlLCJyZWFkX29ubHkiOmZhbHNl
LCJob3N0bmFtZSI6IjMwMC0zMDMtdTMwLXYxMDAiLCJob3N0X2lkIjoiMTcyLjMxLjMzLjMwIiwi
Z3B1X2lkcyI6WyJHUFUtYTA3YzBlNDgtOTQ4YS00MDVjLTQzYjctNzM3NGQyZGRjMDhjIiwiR1BV
LTUzZTFiN2I0LTY4YjgtYTI4Yy0xMGYzLWRiNmEwNTk3ZmJiNCIsIkdQVS00MWZmOTFmYi1lY2I5
LTBhNWItZDQ3Yy1kNWJiN2VmMTAzOGEiLCJHUFUtYTk0MGIxMjYtYjFhNC05ZjA3LTQ0ZTEtNjM2
YzM3OTg5ZWNmIl0sImdwdV9pbmRleCI6MCwiZ3B1X2luZGljZXMiOlswLDEsMiwzXX0seyJ2ZXJz
aW9uIjoxMDIsInJhbmsiOjIsInJhbmtfaWQiOiIyIDogMTcyLjMxLjMzLjMxIDogMTU0NDUxOSIs
InJhbmtfbW9kZSI6InJ1biIsInJhbmtfc3RhdHVzIjoicnVubmluZyIsInJlbW92YWxfc3RhdHVz
Ijoibm9uZSIsImFwcF92ZXJzaW9uIjoiNy4yLjAuMS4yMDI0MDIxNDIxMDkwNiIsInBpZCI6MTU0
NDUxOSwic3RhcnRfdGltZSI6MTcwODQ0MzY1Niwic3RhcnRfdGltZV9zdHIiOiJUdWUgRmViIDIw
IDE1OjQwOjU2IDIwMjQiLCJzdGFydF9jb3VudCI6MSwiYWNjZXB0aW5nX2pvYnMiOnRydWUsIm5l
d19yYW5rIjpmYWxzZSwicmVhZF9vbmx5IjpmYWxzZSwiaG9zdG5hbWUiOiIzMDAtMzAzLXUzMS12
MTAwIiwiaG9zdF9pZCI6IjE3Mi4zMS4zMy4zMSIsImdwdV9pZHMiOlsiR1BVLTEzMmYxZGRiLWE1
ZjQtMjBkMy0xMTIyLTZkMzU5OWFmY2YxZCIsIkdQVS0zYmM3YmZjZC05NWE5LWE2MjYtOGU4Zi0z
N2I5NzE5YWE5ZGQiLCJHUFUtZmUxMGFkM2QtODgzZC1hNTkwLWQ0MDUtZTBhNTY5M2IwYWYwIiwi
R1BVLTZiODI5Njk5LTBiNGMtMTJmMC0wYzI4LThjZjQ2YzI0YzE4NSJdLCJncHVfaW5kZXgiOjAs
ImdwdV9pbmRpY2VzIjpbMCwxLDIsM119LHsidmVyc2lvbiI6MTAwLCJyYW5rIjozLCJyYW5rX2lk
IjoiMyA6IDE3Mi4zMS4zMy4zMiA6IDE0Mjk2ODUiLCJyYW5rX21vZGUiOiJydW4iLCJyYW5rX3N0
YXR1cyI6InJ1bm5pbmciLCJyZW1vdmFsX3N0YXR1cyI6Im5vbmUiLCJhcHBfdmVyc2lvbiI6Ijcu
Mi4wLjEuMjAyNDAyMTQyMTA5MDYiLCJwaWQiOjE0Mjk2ODUsInN0YXJ0X3RpbWUiOjE3MDg0NDM2
NTYsInN0YXJ0X3RpbWVfc3RyIjoiVHVlIEZlYiAyMCAxNTo0MDo1NiAyMDI0Iiwic3RhcnRfY291
bnQiOjEsImFjY2VwdGluZ19qb2JzIjp0cnVlLCJuZXdfcmFuayI6ZmFsc2UsInJlYWRfb25seSI6
ZmFsc2UsImhvc3RuYW1lIjoiMzAwLTMwMy11MzItdjEwMCIsImhvc3RfaWQiOiIxNzIuMzEuMzMu
MzIiLCJncHVfaWRzIjpbIkdQVS0wOTBjNDQzMC1kYmJlLTFlMWEtN2Y3Zi1hMTgyNzgzYWQyMzEi
LCJHUFUtYmUxNGI1Y2UtYjQxMS00OGNhLWE5ZmYtNWEwNmM3YTZmMzk4IiwiR1BVLTRiMWE1Njg4
LTBlOGMtY2M5NC04MzNiLWMyZjM5ZTk5NTNiOCIsIkdQVS0xMmViYTQ2My04MzJlLTEwOGItZWNi
Mi01YzlhZjhkYTYxNjQiXSwiZ3B1X2luZGV4IjowLCJncHVfaW5kaWNlcyI6WzAsMSwyLDNdfSx7
InZlcnNpb24iOjExNiwicmFuayI6NCwicmFua19pZCI6IjQgOiAxNzIuMzEuMzMuMzMgOiAxNjA4
NzYxIiwicmFua19tb2RlIjoicnVuIiwicmFua19zdGF0dXMiOiJydW5uaW5nIiwicmVtb3ZhbF9z
dGF0dXMiOiJub25lIiwiYXBwX3ZlcnNpb24iOiI3LjIuMC4xLjIwMjQwMjE0MjEwOTA2IiwicGlk
IjoxNjA4NzYxLCJzdGFydF90aW1lIjoxNzA4NDQzNjU1LCJzdGFydF90aW1lX3N0ciI6IlR1ZSBG
ZWIgMjAgMTU6NDA6NTUgMjAyNCIsInN0YXJ0X2NvdW50IjoxLCJhY2NlcHRpbmdfam9icyI6dHJ1
ZSwibmV3X3JhbmsiOmZhbHNlLCJyZWFkX29ubHkiOmZhbHNlLCJob3N0bmFtZSI6IjMwMC0zMDMt
dTMzLXYxMDAiLCJob3N0X2lkIjoiMTcyLjMxLjMzLjMzIiwiZ3B1X2lkcyI6WyJHUFUtMjYzMzFh
MDctMTc1Ni1mMDY2LTFlNWEtMzc1M2Y1ZTViYzc4IiwiR1BVLWE0MWFhMTg3LTQ1NmQtNjBiMy04
ZmM5LWI4YjMzZWFlMjFiYyIsIkdQVS01NWRlZjYxOS0wMTE2LWViZjctMzMwMy03ZDkzMmRmYzcw
ZmYiLCJHUFUtNGM5YWYzODgtYjlmYi03MWQ5LWZiNDUtODMwYTM4MTIwMGQzIl0sImdwdV9pbmRl
eCI6MCwiZ3B1X2luZGljZXMiOlswLDEsMiwzXX0seyJ2ZXJzaW9uIjoxMDIsInJhbmsiOjUsInJh
bmtfaWQiOiI1IDogMTcyLjMxLjMzLjM0IDogMTY2MDEwMiIsInJhbmtfbW9kZSI6InJ1biIsInJh
bmtfc3RhdHVzIjoicnVubmluZyIsInJlbW92YWxfc3RhdHVzIjoibm9uZSIsImFwcF92ZXJzaW9u
IjoiNy4yLjAuMS4yMDI0MDIxNDIxMDkwNiIsInBpZCI6MTY2MDEwMiwic3RhcnRfdGltZSI6MTcw
ODQ0MzY1Niwic3RhcnRfdGltZV9zdHIiOiJUdWUgRmViIDIwIDE1OjQwOjU2IDIwMjQiLCJzdGFy
dF9jb3VudCI6MSwiYWNjZXB0aW5nX2pvYnMiOnRydWUsIm5ld19yYW5rIjpmYWxzZSwicmVhZF9v
bmx5IjpmYWxzZSwiaG9zdG5hbWUiOiIzMDAtMzAzLXUzNC12MTAwIiwiaG9zdF9pZCI6IjE3Mi4z
MS4zMy4zNCIsImdwdV9pZHMiOlsiR1BVLTY4ZTA5Y2ZjLWY5YTQtMmExNC01N2E0LTk0ODNiOTFj
MmQ5YSIsIkdQVS0zZGYyMTZlOC1mZTc0LTA0N2EtOTVhYy03MmUyY2JlY2I1MjIiLCJHUFUtMTRk
NDRiOTgtYjA0Mi1jYjgxLTFkZDMtMjBkNGY2OWM4OWNhIiwiR1BVLTM0MDY3MzBiLWJlYWQtYzUw
YS00NmViLWUwYTJjMmJmNmU3NiJdLCJncHVfaW5kZXgiOjAsImdwdV9pbmRpY2VzIjpbMCwxLDIs
M119LHsidmVyc2lvbiI6MTAyLCJyYW5rIjo2LCJyYW5rX2lkIjoiNiA6IDE3Mi4zMS4zMy4zNSA6
IDEzNzk3MTUiLCJyYW5rX21vZGUiOiJydW4iLCJyYW5rX3N0YXR1cyI6InJ1bm5pbmciLCJyZW1v
dmFsX3N0YXR1cyI6Im5vbmUiLCJhcHBfdmVyc2lvbiI6IjcuMi4wLjEuMjAyNDAyMTQyMTA5MDYi
LCJwaWQiOjEzNzk3MTUsInN0YXJ0X3RpbWUiOjE3MDg0NDM2NTYsInN0YXJ0X3RpbWVfc3RyIjoi
VHVlIEZlYiAyMCAxNTo0MDo1NiAyMDI0Iiwic3RhcnRfY291bnQiOjEsImFjY2VwdGluZ19qb2Jz
Ijp0cnVlLCJuZXdfcmFuayI6ZmFsc2UsInJlYWRfb25seSI6ZmFsc2UsImhvc3RuYW1lIjoiMzAw
LTMwMy11MzUtdjEwMCIsImhvc3RfaWQiOiIxNzIuMzEuMzMuMzUiLCJncHVfaWRzIjpbIkdQVS1m
NzMwMmFiYy05ZWFhLTE0YzktZjQyNi0xNjNkZjNkYThjMjYiLCJHUFUtNzdkZDdkNDgtZjk4MC1m
ZDA2LTcyMmMtMWM1YjkzMjE4MjAzIiwiR1BVLTU1MmIyMGE1LTU3ZTYtNDk4Ni1iZTJiLTJiMzc4
ZmQ0YmNhYSIsIkdQVS02MzA1MzE2Mi0zMDdmLTU1YzUtYTk3NC01OGRmZTg0MzQyYjIiXSwiZ3B1
X2luZGV4IjowLCJncHVfaW5kaWNlcyI6WzAsMSwyLDNdfSx7InZlcnNpb24iOjEwMiwicmFuayI6
NywicmFua19pZCI6IjcgOiAxNzIuMzEuMzMuMzYgOiAxMzc5NTM5IiwicmFua19tb2RlIjoicnVu
IiwicmFua19zdGF0dXMiOiJydW5uaW5nIiwicmVtb3ZhbF9zdGF0dXMiOiJub25lIiwiYXBwX3Zl
cnNpb24iOiI3LjIuMC4xLjIwMjQwMjE0MjEwOTA2IiwicGlkIjoxMzc5NTM5LCJzdGFydF90aW1l
IjoxNzA4NDQzNjU2LCJzdGFydF90aW1lX3N0ciI6IlR1ZSBGZWIgMjAgMTU6NDA6NTYgMjAyNCIs
InN0YXJ0X2NvdW50IjoxLCJhY2NlcHRpbmdfam9icyI6dHJ1ZSwibmV3X3JhbmsiOmZhbHNlLCJy
ZWFkX29ubHkiOmZhbHNlLCJob3N0bmFtZSI6IjMwMC0zMDMtdTM2LXYxMDAiLCJob3N0X2lkIjoi
MTcyLjMxLjMzLjM2IiwiZ3B1X2lkcyI6WyJHUFUtOGU5ZDRhZGItYzYxMS04MmYwLTNmZTQtZjFm
MmMzZTZhNDRmIiwiR1BVLTRjNmFiYWM5LTc0ZWMtZjc4Yy1mYWE5LTQ3NDZkMDA1N2FiOCIsIkdQ
VS05NjI1YTc4Yy1lMmFlLTdmMDktOWNjZi1lZTA0OTk1MjYzMTAiLCJHUFUtYWI3NTRhM2MtNjA4
Ni1iYjUxLWU5NGEtM2NmNGExNDkwNWJhIl0sImdwdV9pbmRleCI6MCwiZ3B1X2luZGljZXMiOlsw
LDEsMiwzXX0seyJ2ZXJzaW9uIjoxMDIsInJhbmsiOjgsInJhbmtfaWQiOiI4IDogMTcyLjMxLjMz
LjM3IDogMTM5Nzg5MyIsInJhbmtfbW9kZSI6InJ1biIsInJhbmtfc3RhdHVzIjoicnVubmluZyIs
InJlbW92YWxfc3RhdHVzIjoibm9uZSIsImFwcF92ZXJzaW9uIjoiNy4yLjAuMS4yMDI0MDIxNDIx
MDkwNiIsInBpZCI6MTM5Nzg5Mywic3RhcnRfdGltZSI6MTcwODQ0MzY1Niwic3RhcnRfdGltZV9z
dHIiOiJUdWUgRmViIDIwIDE1OjQwOjU2IDIwMjQiLCJzdGFydF9jb3VudCI6MSwiYWNjZXB0aW5n
X2pvYnMiOnRydWUsIm5ld19yYW5rIjpmYWxzZSwicmVhZF9vbmx5IjpmYWxzZSwiaG9zdG5hbWUi
OiIzMDAtMzAzLXUzNy12MTAwIiwiaG9zdF9pZCI6IjE3Mi4zMS4zMy4zNyIsImdwdV9pZHMiOlsi
R1BVLWVhMThkMzg4LWUyYjMtNDI4My1hNmI1LWEzMjNhNDg3MjVhOSIsIkdQVS1kY2Q4NGVmYi05
NGNhLWI0OTctOGMxNS03YTMyNjk0MGMxZWIiLCJHUFUtOTE2MDFhNWUtOTczZi1kMWU0LTZlMWQt
ZjY1NTI5NGQzNDYwIiwiR1BVLTA3ZmE0ZGI2LTllNTUtYjYxZi05MDlhLTg1YzRkMWJlYjA4OCJd
LCJncHVfaW5kZXgiOjAsImdwdV9pbmRpY2VzIjpbMCwxLDIsM119XX0Oc3ltYm9scxZ7ImNvdW50
IjoxfQxzeXN0ZW2qA3siaWQiOiJLaW5ldGljYSAzMDAtMzAzLXUzMC12MTAwIiwic3RhcnRfdGlt
ZSI6MTcwODEwMzQ1Miwic3RhdHVzIjoicnVubmluZyIsImNsdXN0ZXJfbGVhZGVyIjoiMTcyLjMx
LjMzLjMwIiwidmVyc2lvbiI6MjUsImNsdXN0ZXJfb3BlcmF0aW9uX3J1bm5pbmciOiJmYWxzZSIs
ImNsdXN0ZXJfb3BlcmF0aW9uX3N0YXR1cyI6IiIsIm9mZmxpbmVfc3RhdHVzIjoiZmFsc2UifQh0
ZXh03AV7ImNvdW50Ijo4LCJzdGF0dXMiOlt7InZlcnNpb24iOjMzLCJyYW5rIjoxLCJzdGF0dXMi
OiJydW5uaW5nIn0seyJ2ZXJzaW9uIjozMywicmFuayI6Miwic3RhdHVzIjoicnVubmluZyJ9LHsi
dmVyc2lvbiI6MzMsInJhbmsiOjMsInN0YXR1cyI6InJ1bm5pbmcifSx7InZlcnNpb24iOjMzLCJy
YW5rIjo0LCJzdGF0dXMiOiJydW5uaW5nIn0seyJ2ZXJzaW9uIjozMywicmFuayI6NSwic3RhdHVz
IjoicnVubmluZyJ9LHsidmVyc2lvbiI6MzMsInJhbmsiOjYsInN0YXR1cyI6InJ1bm5pbmcifSx7
InZlcnNpb24iOjMzLCJyYW5rIjo3LCJzdGF0dXMiOiJydW5uaW5nIn0seyJ2ZXJzaW9uIjozMywi
cmFuayI6OCwic3RhdHVzIjoicnVubmluZyJ9XX0QdHJpZ2dlcnNeeyJ0b3RhbF9jb3VudCI6MCwi
cmFuZ2VfY291bnQiOjAsIm5haV9jb3VudCI6MH0AAAA=
headers:
Access-Control-Allow-Origin:
- '*'
Access-Control-Expose-Headers:
- x-request-time-secs
Connection:
- Close
Content-Type:
- application/octet-stream
Date:
- Thu, 22 Feb 2024 00:26:36 GMT
Strict-Transport-Security:
- max-age=31536000; includeSubDomains
X-Content-Type-Options:
- nosniff
X-Frame-Options:
- SAMEORIGIN
X-XSS-Protection:
- 1; mode=block
x-request-time-secs:
- '0.00056'
status:
code: 200
message: OK
- request:
body: "\0"
headers:
Accept:
- application/octet-stream
Content-type:
- application/octet-stream
authorization:
- DUMMY
method: POST
uri: http://172.31.33.30:9191/show/system/properties
response:
body:
string: !!binary |
BE9LAD5zaG93X3N5c3RlbV9wcm9wZXJ0aWVzX3Jlc3BvbnNlvPUCpAk8Y29uZi5haS5hcGkuY29u
bmVjdGlvbl90aW1lb3V0BDkwHmNvbmYuYWkuYXBpLmtleQAoY29uZi5haS5hcGkucHJvdmlkZXIW
a2luZXRpY2FsbG0eY29uZi5haS5hcGkudXJsSGh0dHA6Ly8xNzIuMzEuMzEuMTM6ODA1MC9zcWwv
c3VnZ2VzdDBjb25mLmFsZXJ0X2Rpc2tfYWJzb2x1dGUANGNvbmYuYWxlcnRfZGlza19wZXJjZW50
YWdlGDEsIDUsIDEwLCAyMBxjb25mLmFsZXJ0X2V4ZQAsY29uZi5hbGVydF9ob3N0X3N0YXR1cwhU
UlVFOmNvbmYuYWxlcnRfaG9zdF9zdGF0dXNfZmlsdGVyIGZhdGFsX2luaXRfZXJyb3I4Y29uZi5h
bGVydF9tYXhfc3RvcmVkX2FsZXJ0cwYxMDA0Y29uZi5hbGVydF9tZW1vcnlfYWJzb2x1dGUAOGNv
bmYuYWxlcnRfbWVtb3J5X3BlcmNlbnRhZ2UYMSwgNSwgMTAsIDIwNGNvbmYuYWxlcnRfcmFua19j
dWRhX2Vycm9yCFRSVUVEY29uZi5hbGVydF9yYW5rX2ZhbGxiYWNrX2FsbG9jYXRvcghUUlVFLGNv
bmYuYWxlcnRfcmFua19zdGF0dXMIVFJVRTpjb25mLmFsZXJ0X3Jhbmtfc3RhdHVzX2ZpbHRlclhm
YXRhbF9pbml0X2Vycm9yLCBub3RfcmVzcG9uZGluZywgdGVybWluYXRlZB5jb25mLmF1ZGl0X2Jv
ZHkKRkFMU0UeY29uZi5hdWRpdF9kYXRhCkZBTFNFJGNvbmYuYXVkaXRfaGVhZGVycwpGQUxTRT5j
b25mLmF1dG9fY3JlYXRlX2V4dGVybmFsX3VzZXJzCkZBTFNFTGNvbmYuYnVpbGRfbWF0ZXJpYWxp
emVkX3ZpZXdzX29uX3N0YXJ0Em9uX2RlbWFuZDhjb25mLmJ1aWxkX3BrX2luZGV4X29uX3N0YXJ0
Em9uX2RlbWFuZDhjb25mLmNodW5rX2NvbHVtbl9tYXhfbWVtb3J5EjUxMjAwMDAwMCpjb25mLmNo
dW5rX21heF9tZW1vcnkUODE5MjAwMDAwMB5jb25mLmNodW5rX3NpemUOODAwMDAwMCJjb25mLmNs
dXN0ZXJfbmFtZRxwcm9kdWN0aW9uLW5ld0Bjb25mLmNvbmN1cnJlbnRfa2VybmVsX2V4ZWN1dGlv
bghUUlVFOmNvbmYuZGVmYXVsdF9wcmltYXJ5X2tleV90eXBlDG1lbW9yeSBjb25mLmRlZmF1bHRf
dHRsBDIwLGNvbmYuZGlzYWJsZV9jbGVhcl9hbGwIVFJVRT5jb25mLmVncmVzc19wYXJxdWV0X2Nv
bXByZXNzaW9uDHNuYXBweSRjb25mLmVuYWJsZV9hbGVydHMIVFJVRSJjb25mLmVuYWJsZV9hdWRp
dApGQUxTRTJjb25mLmVuYWJsZV9hdXRob3JpemF0aW9uCFRSVUVGY29uZi5lbmFibGVfZXh0ZXJu
YWxfYXV0aGVudGljYXRpb24KRkFMU0UwY29uZi5lbmFibGVfZ3JhcGhfc2VydmVyCFRSVUUcY29u
Zi5lbmFibGVfaGEKRkFMU0UuY29uZi5lbmFibGVfaHR0cGRfcHJveHkKRkFMU0UcY29uZi5lbmFi
bGVfbWwKRkFMU0U2Y29uZi5lbmFibGVfb3BlbmdsX3JlbmRlcmVyCFRSVUVAY29uZi5lbmFibGVf
b3ZlcmxhcHBlZF9lcXVpX2pvaW4IVFJVRTRjb25mLmVuYWJsZV9wb3N0Z3Jlc19wcm94eQhUUlVF
PmNvbmYuZW5hYmxlX3ByZWRpY2F0ZV9lcXVpX2pvaW4IVFJVRSJjb25mLmVuYWJsZV9wcm9jcwhU
UlVFJGNvbmYuZW5hYmxlX3JldmVhbAhUUlVFMGNvbmYuZW5hYmxlX3N0YXRzX3NlcnZlcghUUlVF
LmNvbmYuZW5hYmxlX3RleHRfc2VhcmNoCFRSVUU8Y29uZi5lbmFibGVfdmVjdG9ydGlsZV9zZXJ2
aWNlCFRSVUUsY29uZi5lbmFibGVfdnJhbV9jYWNoZQhUUlVFPmNvbmYuZW5hYmxlX3dvcmtlcl9o
dHRwX3NlcnZlcnMIVFJVRTJjb25mLmV2ZW50X3NlcnZlcl9hZGRyZXNzGDE3Mi4zMS4zMy4zMDRj
b25mLmV2ZW50X3NlcnZlcl9pbnRlcm5hbApGQUxTRTpjb25mLmV4dGVybmFsX2ZpbGVzX2RpcmVj
dG9yeSAvbmZzL2RhdGEvcHVibGljFGNvbmYuZ21faXAYMTcyLjMxLjMzLjMwGmNvbmYuZ21fcG9y
dDEINTU1MiBjb25mLmdtX3B1Yl9wb3J0CDU1NTMoY29uZi5ncmFwaC5oZWFkX3BvcnQIODEwMC5j
b25mLmdyYXBoLnNlcnZlcjAuaG9zdApob3N0MC5jb25mLmdyYXBoLnNlcnZlcjAucG9ydAg4MTAx
OGNvbmYuZ3JhcGguc2VydmVyMC5yYW1fbGltaXQCMC5jb25mLmdyYXBoLnNlcnZlcjEuaG9zdApo
b3N0MS5jb25mLmdyYXBoLnNlcnZlcjEucG9ydAg4MTAyOGNvbmYuZ3JhcGguc2VydmVyMS5yYW1f
bGltaXQCMC5jb25mLmdyYXBoLnNlcnZlcjIuaG9zdApob3N0Mi5jb25mLmdyYXBoLnNlcnZlcjIu
cG9ydAg4MTAzOGNvbmYuZ3JhcGguc2VydmVyMi5yYW1fbGltaXQCMC5jb25mLmdyYXBoLnNlcnZl
cjMuaG9zdApob3N0My5jb25mLmdyYXBoLnNlcnZlcjMucG9ydAg4MTA0OGNvbmYuZ3JhcGguc2Vy
dmVyMy5yYW1fbGltaXQCMC5jb25mLmdyYXBoLnNlcnZlcjQuaG9zdApob3N0NC5jb25mLmdyYXBo
LnNlcnZlcjQucG9ydAg4MTA1OGNvbmYuZ3JhcGguc2VydmVyNC5yYW1fbGltaXQCMC5jb25mLmdy
YXBoLnNlcnZlcjUuaG9zdApob3N0NS5jb25mLmdyYXBoLnNlcnZlcjUucG9ydAg4MTA2OGNvbmYu
Z3JhcGguc2VydmVyNS5yYW1fbGltaXQCMC5jb25mLmdyYXBoLnNlcnZlcjYuaG9zdApob3N0Ni5j
b25mLmdyYXBoLnNlcnZlcjYucG9ydAg4MTA3OGNvbmYuZ3JhcGguc2VydmVyNi5yYW1fbGltaXQC
MC5jb25mLmdyYXBoLnNlcnZlcjcuaG9zdApob3N0Ny5jb25mLmdyYXBoLnNlcnZlcjcucG9ydAg4
MTA4OGNvbmYuZ3JhcGguc2VydmVyNy5yYW1fbGltaXQCMBxjb25mLmhhX3F1ZXVlcwAuY29uZi5o
YV9yaW5nX2hlYWRfbm9kZXMAKGNvbmYuaGVhZF9pcF9hZGRyZXNzGDE3Mi4zMS4zMy4zMBxjb25m
LmhlYWRfcG9ydAg5MTkxImNvbmYuaG1faHR0cF9wb3J0CDkzMDA2Y29uZi5ob3N0MF9hY2NlcHRz
X2ZhaWxvdmVyCkZBTFNFJGNvbmYuaG9zdDBfYWRkcmVzcxgxNzIuMzEuMzMuMzAeY29uZi5ob3N0
MF9ncHVzDjAsMSwyLDNEY29uZi5ob3N0MF9ob3N0X21hbmFnZXJfcHVibGljX3VybDBodHRwOi8v
MTcyLjMxLjMzLjMwOjkzMDAsY29uZi5ob3N0MF9wcml2YXRlX3VybCZodHRwOi8vMTcyLjMxLjMz
LjMwMmNvbmYuaG9zdDBfcHVibGljX2FkZHJlc3MYMTcyLjMxLjMzLjMwLGNvbmYuaG9zdDBfcHVi
bGljX3VybHMmaHR0cDovLzE3Mi4zMS4zMy4zMChjb25mLmhvc3QwX3JhbV9saW1pdBg2NTY3Njc3
OTk5OTk2Y29uZi5ob3N0MV9hY2NlcHRzX2ZhaWxvdmVyCkZBTFNFJGNvbmYuaG9zdDFfYWRkcmVz
cxgxNzIuMzEuMzMuMzEeY29uZi5ob3N0MV9ncHVzDjAsMSwyLDNEY29uZi5ob3N0MV9ob3N0X21h
bmFnZXJfcHVibGljX3VybDBodHRwOi8vMTcyLjMxLjMzLjMxOjkzMDAsY29uZi5ob3N0MV9wcml2
YXRlX3VybCZodHRwOi8vMTcyLjMxLjMzLjMxMmNvbmYuaG9zdDFfcHVibGljX2FkZHJlc3MYMTcy
LjMxLjMzLjMxLGNvbmYuaG9zdDFfcHVibGljX3VybHMmaHR0cDovLzE3Mi4zMS4zMy4zMShjb25m
Lmhvc3QxX3JhbV9saW1pdBg2NTY3Njc3OTk5OTk2Y29uZi5ob3N0Ml9hY2NlcHRzX2ZhaWxvdmVy
CkZBTFNFJGNvbmYuaG9zdDJfYWRkcmVzcxgxNzIuMzEuMzMuMzIeY29uZi5ob3N0Ml9ncHVzDjAs
MSwyLDNEY29uZi5ob3N0Ml9ob3N0X21hbmFnZXJfcHVibGljX3VybDBodHRwOi8vMTcyLjMxLjMz
LjMyOjkzMDAsY29uZi5ob3N0Ml9wcml2YXRlX3VybCZodHRwOi8vMTcyLjMxLjMzLjMyMmNvbmYu
aG9zdDJfcHVibGljX2FkZHJlc3MYMTcyLjMxLjMzLjMyLGNvbmYuaG9zdDJfcHVibGljX3VybHMm
aHR0cDovLzE3Mi4zMS4zMy4zMihjb25mLmhvc3QyX3JhbV9saW1pdBg2NTY3Njc3OTk5OTk2Y29u
Zi5ob3N0M19hY2NlcHRzX2ZhaWxvdmVyCkZBTFNFJGNvbmYuaG9zdDNfYWRkcmVzcxgxNzIuMzEu
MzMuMzMeY29uZi5ob3N0M19ncHVzDjAsMSwyLDNEY29uZi5ob3N0M19ob3N0X21hbmFnZXJfcHVi
bGljX3VybDBodHRwOi8vMTcyLjMxLjMzLjMzOjkzMDAsY29uZi5ob3N0M19wcml2YXRlX3VybCZo
dHRwOi8vMTcyLjMxLjMzLjMzMmNvbmYuaG9zdDNfcHVibGljX2FkZHJlc3MYMTcyLjMxLjMzLjMz
LGNvbmYuaG9zdDNfcHVibGljX3VybHMmaHR0cDovLzE3Mi4zMS4zMy4zMyhjb25mLmhvc3QzX3Jh
bV9saW1pdBg2NTY3Njc3OTk5OTk2Y29uZi5ob3N0NF9hY2NlcHRzX2ZhaWxvdmVyCkZBTFNFJGNv
bmYuaG9zdDRfYWRkcmVzcxgxNzIuMzEuMzMuMzQeY29uZi5ob3N0NF9ncHVzDjAsMSwyLDNEY29u
Zi5ob3N0NF9ob3N0X21hbmFnZXJfcHVibGljX3VybDBodHRwOi8vMTcyLjMxLjMzLjM0OjkzMDAs
Y29uZi5ob3N0NF9wcml2YXRlX3VybCZodHRwOi8vMTcyLjMxLjMzLjM0MmNvbmYuaG9zdDRfcHVi
bGljX2FkZHJlc3MYMTcyLjMxLjMzLjM0LGNvbmYuaG9zdDRfcHVibGljX3VybHMmaHR0cDovLzE3
Mi4zMS4zMy4zNChjb25mLmhvc3Q0X3JhbV9saW1pdBg2NTY3Njc3OTk5OTk2Y29uZi5ob3N0NV9h
Y2NlcHRzX2ZhaWxvdmVyCkZBTFNFJGNvbmYuaG9zdDVfYWRkcmVzcxgxNzIuMzEuMzMuMzUeY29u
Zi5ob3N0NV9ncHVzDjAsMSwyLDNEY29uZi5ob3N0NV9ob3N0X21hbmFnZXJfcHVibGljX3VybDBo
dHRwOi8vMTcyLjMxLjMzLjM1OjkzMDAsY29uZi5ob3N0NV9wcml2YXRlX3VybCZodHRwOi8vMTcy
LjMxLjMzLjM1MmNvbmYuaG9zdDVfcHVibGljX2FkZHJlc3MYMTcyLjMxLjMzLjM1LGNvbmYuaG9z
dDVfcHVibGljX3VybHMmaHR0cDovLzE3Mi4zMS4zMy4zNShjb25mLmhvc3Q1X3JhbV9saW1pdBg2
NTY3Njc3OTk5OTk2Y29uZi5ob3N0Nl9hY2NlcHRzX2ZhaWxvdmVyCkZBTFNFJGNvbmYuaG9zdDZf
YWRkcmVzcxgxNzIuMzEuMzMuMzYeY29uZi5ob3N0Nl9ncHVzDjAsMSwyLDNEY29uZi5ob3N0Nl9o
b3N0X21hbmFnZXJfcHVibGljX3VybDBodHRwOi8vMTcyLjMxLjMzLjM2OjkzMDAsY29uZi5ob3N0
Nl9wcml2YXRlX3VybCZodHRwOi8vMTcyLjMxLjMzLjM2MmNvbmYuaG9zdDZfcHVibGljX2FkZHJl
c3MYMTcyLjMxLjMzLjM2LGNvbmYuaG9zdDZfcHVibGljX3VybHMmaHR0cDovLzE3Mi4zMS4zMy4z
Nihjb25mLmhvc3Q2X3JhbV9saW1pdBg2NTY3Njc3OTk5OTk2Y29uZi5ob3N0N19hY2NlcHRzX2Zh
aWxvdmVyCkZBTFNFJGNvbmYuaG9zdDdfYWRkcmVzcxgxNzIuMzEuMzMuMzceY29uZi5ob3N0N19n
cHVzDjAsMSwyLDNEY29uZi5ob3N0N19ob3N0X21hbmFnZXJfcHVibGljX3VybDBodHRwOi8vMTcy
LjMxLjMzLjM3OjkzMDAsY29uZi5ob3N0N19wcml2YXRlX3VybCZodHRwOi8vMTcyLjMxLjMzLjM3
MmNvbmYuaG9zdDdfcHVibGljX2FkZHJlc3MYMTcyLjMxLjMzLjM3LGNvbmYuaG9zdDdfcHVibGlj
X3VybHMmaHR0cDovLzE3Mi4zMS4zMy4zNyhjb25mLmhvc3Q3X3JhbV9saW1pdBg2NTY3Njc3OTk5
OTkqY29uZi5odHRwZF9wcm94eV9wb3J0CDgwODI0Y29uZi5odHRwZF9wcm94eV91c2VfaHR0cHMK
RkFMU0U4Y29uZi5pbml0X3dpdGhfbm93X2F0X3dvcmtlcgpGQUxTRSpjb25mLmthZmthLmJhdGNo
X3NpemUIMTAwMC5jb25mLmthZmthLnBvbGxfdGltZW91dAIwKGNvbmYua2Fma2Eud2FpdF90aW1l
BDMwLmNvbmYua2VybmVsX29tcF90aHJlYWRzAjQ0Y29uZi5sb2FkX3ZlY3RvcnNfb25fc3RhcnQS
b25fZGVtYW5kHmNvbmYubG9ja19hdWRpdApGQUxTRTZjb25mLm1heF9hdXRvX3ZpZXdfdXBkYXRv
cnMCMzZjb25mLm1heF9jb25jdXJyZW50X2tlcm5lbHMCMDJjb25mLm1heF9nZXRfcmVjb3Jkc19z
aXplCjIwMDAwKmNvbmYubWF4X2hlYXRtYXBfc2l6ZQgzMDcyKmNvbmYubWF4X2h0dHBfdGhyZWFk
cwY1MTI6Y29uZi5tZXRhZGF0YV9zdG9yZV9zeW5jX21vZGUMbm9ybWFsKmNvbmYubWluX2h0dHBf
dGhyZWFkcwI4MGNvbmYubWluX3Bhc3N3b3JkX2xlbmd0aAIwIGNvbmYubWxfYXBpX3BvcnQIOTE4
N1xjb25mLm5wMS5idWlsZF9tYXRlcmlhbGl6ZWRfdmlld3Nfb25fbWlncmF0aW9uDGFsd2F5c0hj
b25mLm5wMS5idWlsZF9wa19pbmRleF9vbl9taWdyYXRpb24MYWx3YXlzRGNvbmYubnAxLmNyaXRp
Y2FsX3Jlc3RhcnRfYXR0ZW1wdHMCMTpjb25mLm5wMS5lbmFibGVfaGVhZF9mYWlsb3ZlcgpGQUxT
RT5jb25mLm5wMS5lbmFibGVfd29ya2VyX2ZhaWxvdmVyCkZBTFNFSmNvbmYubnAxLmZhaWxvdmVy
X2Rpc3RyaWJ1dGlvbl9wb2xpY3kIZmlsbERjb25mLm5wMS5sb2FkX3ZlY3RvcnNfb25fbWlncmF0
aW9uDGFsd2F5c0xjb25mLm5wMS5ub25fY3JpdGljYWxfcmVzdGFydF9hdHRlbXB0cwIzPGNvbmYu
bnAxLnJhbmtfcmVzdGFydF9hdHRlbXB0cwIxMmNvbmYubnAxLnJlc3RhcnRfaW50ZXJ2YWwENjA2
Y29uZi5ucDEuc3RvcmFnZV9hcGlfc2NyaXB0AChjb25mLm51bWJlcl9vZl9ob3N0cwI4KGNvbmYu
bnVtYmVyX29mX3JhbmtzAjk8Y29uZi5vcGVuZ2xfYW50aWFsaWFzaW5nX2xldmVsAjAsY29uZi5w
ZXJzaXN0X2RpcmVjdG9yeTAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC8sY29uZi5wZXJzaXN0X3N5
bmNfdGltZQI1NmNvbmYucG9pbnRfcmVuZGVyX3RocmVzaG9sZAwxMDAwMDBWY29uZi5wb3N0Z3Jl
c19wcm94eS5pZGxlX2Nvbm5lY3Rpb25fdGltZW91dAYzMDBUY29uZi5wb3N0Z3Jlc19wcm94eS5t
YXhfcXVldWVkX2Nvbm5lY3Rpb25zAjE+Y29uZi5wb3N0Z3Jlc19wcm94eS5tYXhfdGhyZWFkcwQ2
ND5jb25mLnBvc3RncmVzX3Byb3h5Lm1pbl90aHJlYWRzAjIwY29uZi5wb3N0Z3Jlc19wcm94eS5w
b3J0CDU0MzIuY29uZi5wb3N0Z3Jlc19wcm94eS5zc2wIVFJVRRxjb25mLnJhbmswX2dwdQIwKmNv
bmYucmFuazBfaXBfYWRkcmVzcxgxNzIuMzEuMzMuMzAqY29uZi5yYW5rMV9pcF9hZGRyZXNzGDE3
Mi4zMS4zMy4zMCpjb25mLnJhbmsyX2lwX2FkZHJlc3MYMTcyLjMxLjMzLjMxKmNvbmYucmFuazNf
aXBfYWRkcmVzcxgxNzIuMzEuMzMuMzIqY29uZi5yYW5rNF9pcF9hZGRyZXNzGDE3Mi4zMS4zMy4z
Mypjb25mLnJhbms1X2lwX2FkZHJlc3MYMTcyLjMxLjMzLjM0KmNvbmYucmFuazZfaXBfYWRkcmVz
cxgxNzIuMzEuMzMuMzUqY29uZi5yYW5rN19pcF9hZGRyZXNzGDE3Mi4zMS4zMy4zNipjb25mLnJh
bms4X2lwX2FkZHJlc3MYMTcyLjMxLjMzLjM3KGNvbmYucmVxdWVzdF90aW1lb3V0CDI0MDA2Y29u
Zi5yZXF1aXJlX2F1dGhlbnRpY2F0aW9uCFRSVUVeY29uZi5yZXNvdXJjZV9ncm91cC5kZWZhdWx0
Lm1heF9jcHVfY29uY3VycmVuY3kELTFaY29uZi5yZXNvdXJjZV9ncm91cC5kZWZhdWx0Lm1heF90
aWVyX3ByaW9yaXR5BDEwSmNvbmYucmVzb3VyY2VfZ3JvdXAuZGVmYXVsdC5yYW1fbGltaXQELTFa
Y29uZi5yZXNvdXJjZV9ncm91cC5kZWZhdWx0LnNjaGVkdWxlX3ByaW9yaXR5BDUwTGNvbmYucmVz
b3VyY2VfZ3JvdXAuZGVmYXVsdC52cmFtX2xpbWl0BC0xHGNvbmYucmluZ19uYW1lDmRlZmF1bHRW
Y29uZi5zZWN1cml0eS5leHRlcm5hbC5yYW5nZXIuY2FjaGVfbWludXRlcwQ2MFRjb25mLnNlY3Vy
aXR5LmV4dGVybmFsLnJhbmdlci5zZXJ2aWNlX25hbWUQa2luZXRpY2FCY29uZi5zZWN1cml0eS5l
eHRlcm5hbC5yYW5nZXIudXJsAGBjb25mLnNlY3VyaXR5LmV4dGVybmFsLnJhbmdlcl9hdXRob3Jp
emVyLmFkZHJlc3MyaXBjOi8vL3RtcC9ncHVkYi1yYW5nZXItMHRjb25mLnNlY3VyaXR5LmV4dGVy
bmFsLnJhbmdlcl9hdXRob3JpemVyLnJlbW90ZV9kZWJ1Z19wb3J0AjBgY29uZi5zZWN1cml0eS5l
eHRlcm5hbC5yYW5nZXJfYXV0aG9yaXplci50aW1lb3V0BjEyMCpjb25mLnNldF9tb25pdG9yX3Bv
cnQIOTAwMjZjb25mLnNldF9tb25pdG9yX3Byb3h5X3BvcnQIOTAwMzZjb25mLnNldF9tb25pdG9y
X3F1ZXVlX3NpemUIMTAwMChjb25mLnNoYWRvd19hZ2dfc2l6ZRI1MDAwMDAwMDAwY29uZi5zaGFk
b3dfY3ViZV9lbmFibGVkCFRSVUUuY29uZi5zaGFkb3dfZmlsdGVyX3NpemUSNTAwMDAwMDAwJmNv
bmYuc21fb21wX3RocmVhZHMCMiRjb25mLnNtc19kaXJlY3RvcnkwL21udC9kYXRhL2dwdWRiL3Bl
cnNpc3QvLmNvbmYuc21zX21heF9vcGVuX2ZpbGVzBjEyOEBjb25mLnNxbC5jb3N0X2Jhc2VkX29w
dGltaXphdGlvbgpGQUxTRTRjb25mLnNxbC5kaXN0cmlidXRlZF9qb2lucwhUUlVFPmNvbmYuc3Fs
LmRpc3RyaWJ1dGVkX29wZXJhdGlvbnMIVFJVRS5jb25mLnNxbC5lbmFibGVfcGxhbm5lcghUUlVF
NmNvbmYuc3FsLmZvcmNlX2JpbmFyeV9qb2lucwpGQUxTRTpjb25mLnNxbC5mb3JjZV9iaW5hcnlf
c2V0X29wcwpGQUxTRTZjb25mLnNxbC5tYXhfcGFyYWxsZWxfc3RlcHMCNEBjb25mLnNxbC5tYXhf
dmlld19uZXN0aW5nX2xldmVscwQxNjJjb25mLnNxbC5wYWdpbmdfdGFibGVfdHRsBDIwNmNvbmYu
c3FsLnBhcmFsbGVsX2V4ZWN1dGlvbghUUlVFMGNvbmYuc3FsLnBsYW5fY2FjaGVfc2l6ZQg0MDAw
MGNvbmYuc3FsLnBsYW5uZXIuYWRkcmVzcz5pcGM6Ly8vdG1wL2dwdWRiLXF1ZXJ5LWVuZ2luZS0w
NmNvbmYuc3FsLnBsYW5uZXIubWF4X21lbW9yeQg0MDk2NGNvbmYuc3FsLnBsYW5uZXIubWF4X3N0
YWNrAjZEY29uZi5zcWwucGxhbm5lci5yZW1vdGVfZGVidWdfcG9ydAIwMGNvbmYuc3FsLnBsYW5u
ZXIudGltZW91dAYxMjA0Y29uZi5zcWwucmVzdWx0X2NhY2hlX3NpemUINDAwMDRjb25mLnNxbC5y
ZXN1bHRzLmNhY2hlX3R0bAQ2MDBjb25mLnNxbC5yZXN1bHRzLmNhY2hpbmcIVFJVRUBjb25mLnNx
bC5ydWxlX2Jhc2VkX29wdGltaXphdGlvbghUUlVFPGNvbmYuc3VidGFza19jb25jdXJyZW5jeV9s
aW1pdAI0PmNvbmYuc3ltYm9sb2d5X3JlbmRlcl90aHJlc2hvbGQKMTAwMDBQY29uZi5zeXN0ZW1f
bWV0YWRhdGEuc3RhdHNfYWdncl9yb3djb3VudAoxMDAwMEhjb25mLnN5c3RlbV9tZXRhZGF0YS5z
dGF0c19hZ2dyX3RpbWUCMVJjb25mLnN5c3RlbV9tZXRhZGF0YS5zdGF0c19yZXRlbnRpb25fZGF5
cwQyMSZjb25mLnRhc2tjYWxjX2dwdS4xElswLDEsMiwzXSZjb25mLnRhc2tjYWxjX2dwdS4yElsw
LDEsMiwzXSZjb25mLnRhc2tjYWxjX2dwdS4zElswLDEsMiwzXSZjb25mLnRhc2tjYWxjX2dwdS40
ElswLDEsMiwzXSZjb25mLnRhc2tjYWxjX2dwdS41ElswLDEsMiwzXSZjb25mLnRhc2tjYWxjX2dw
dS42ElswLDEsMiwzXSZjb25mLnRhc2tjYWxjX2dwdS43ElswLDEsMiwzXSZjb25mLnRhc2tjYWxj
X2dwdS44ElswLDEsMiwzXSBjb25mLnRjc19wZXJfdG9tBDQwJmNvbmYudGVtcF9kaXJlY3RvcnkI
L3RtcDJjb25mLnRleHRfaW5kZXhfZGlyZWN0b3J5MC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0LzJj
b25mLnRleHRfaW5kaWNlc19wZXJfdG9tAjJMY29uZi50aWVyLmRpc2swLmRlZmF1bHQuaGlnaF93
YXRlcm1hcmsEOTA6Y29uZi50aWVyLmRpc2swLmRlZmF1bHQubGltaXQYNjAwMDAwMDAwMDAwSmNv
bmYudGllci5kaXNrMC5kZWZhdWx0Lmxvd193YXRlcm1hcmsEODA4Y29uZi50aWVyLmRpc2swLmRl
ZmF1bHQucGF0aEQvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC8vZGlza2NhY2hlYGNvbmYudGllci5k
aXNrMC5kZWZhdWx0LnN0b3JlX3BlcnNpc3RlbnRfb2JqZWN0cwpGQUxTRUhjb25mLnRpZXIuZGlz
azAucmFuazAuaGlnaF93YXRlcm1hcmsEOTA2Y29uZi50aWVyLmRpc2swLnJhbmswLmxpbWl0GDYw
MDAwMDAwMDAwMEZjb25mLnRpZXIuZGlzazAucmFuazAubG93X3dhdGVybWFyawQ4MDRjb25mLnRp
ZXIuZGlzazAucmFuazAucGF0aEQvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC8vZGlza2NhY2hlXGNv
bmYudGllci5kaXNrMC5yYW5rMC5zdG9yZV9wZXJzaXN0ZW50X29iamVjdHMKRkFMU0VIY29uZi50
aWVyLmRpc2swLnJhbmsxLmhpZ2hfd2F0ZXJtYXJrBDkwNmNvbmYudGllci5kaXNrMC5yYW5rMS5s
aW1pdBg2MDAwMDAwMDAwMDBGY29uZi50aWVyLmRpc2swLnJhbmsxLmxvd193YXRlcm1hcmsEODA0
Y29uZi50aWVyLmRpc2swLnJhbmsxLnBhdGhEL21udC9kYXRhL2dwdWRiL3BlcnNpc3QvL2Rpc2tj
YWNoZVxjb25mLnRpZXIuZGlzazAucmFuazEuc3RvcmVfcGVyc2lzdGVudF9vYmplY3RzCkZBTFNF
SGNvbmYudGllci5kaXNrMC5yYW5rMi5oaWdoX3dhdGVybWFyawQ5MDZjb25mLnRpZXIuZGlzazAu
cmFuazIubGltaXQYNjAwMDAwMDAwMDAwRmNvbmYudGllci5kaXNrMC5yYW5rMi5sb3dfd2F0ZXJt
YXJrBDgwNGNvbmYudGllci5kaXNrMC5yYW5rMi5wYXRoRC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0
Ly9kaXNrY2FjaGVcY29uZi50aWVyLmRpc2swLnJhbmsyLnN0b3JlX3BlcnNpc3RlbnRfb2JqZWN0
cwpGQUxTRUhjb25mLnRpZXIuZGlzazAucmFuazMuaGlnaF93YXRlcm1hcmsEOTA2Y29uZi50aWVy
LmRpc2swLnJhbmszLmxpbWl0GDYwMDAwMDAwMDAwMEZjb25mLnRpZXIuZGlzazAucmFuazMubG93
X3dhdGVybWFyawQ4MDRjb25mLnRpZXIuZGlzazAucmFuazMucGF0aEQvbW50L2RhdGEvZ3B1ZGIv
cGVyc2lzdC8vZGlza2NhY2hlXGNvbmYudGllci5kaXNrMC5yYW5rMy5zdG9yZV9wZXJzaXN0ZW50
X29iamVjdHMKRkFMU0VIY29uZi50aWVyLmRpc2swLnJhbms0LmhpZ2hfd2F0ZXJtYXJrBDkwNmNv
bmYudGllci5kaXNrMC5yYW5rNC5saW1pdBg2MDAwMDAwMDAwMDBGY29uZi50aWVyLmRpc2swLnJh
bms0Lmxvd193YXRlcm1hcmsEODA0Y29uZi50aWVyLmRpc2swLnJhbms0LnBhdGhEL21udC9kYXRh
L2dwdWRiL3BlcnNpc3QvL2Rpc2tjYWNoZVxjb25mLnRpZXIuZGlzazAucmFuazQuc3RvcmVfcGVy
c2lzdGVudF9vYmplY3RzCkZBTFNFSGNvbmYudGllci5kaXNrMC5yYW5rNS5oaWdoX3dhdGVybWFy
awQ5MDZjb25mLnRpZXIuZGlzazAucmFuazUubGltaXQYNjAwMDAwMDAwMDAwRmNvbmYudGllci5k
aXNrMC5yYW5rNS5sb3dfd2F0ZXJtYXJrBDgwNGNvbmYudGllci5kaXNrMC5yYW5rNS5wYXRoRC9t
bnQvZGF0YS9ncHVkYi9wZXJzaXN0Ly9kaXNrY2FjaGVcY29uZi50aWVyLmRpc2swLnJhbms1LnN0
b3JlX3BlcnNpc3RlbnRfb2JqZWN0cwpGQUxTRUhjb25mLnRpZXIuZGlzazAucmFuazYuaGlnaF93
YXRlcm1hcmsEOTA2Y29uZi50aWVyLmRpc2swLnJhbms2LmxpbWl0GDYwMDAwMDAwMDAwMEZjb25m
LnRpZXIuZGlzazAucmFuazYubG93X3dhdGVybWFyawQ4MDRjb25mLnRpZXIuZGlzazAucmFuazYu
cGF0aEQvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC8vZGlza2NhY2hlXGNvbmYudGllci5kaXNrMC5y
YW5rNi5zdG9yZV9wZXJzaXN0ZW50X29iamVjdHMKRkFMU0VIY29uZi50aWVyLmRpc2swLnJhbms3
LmhpZ2hfd2F0ZXJtYXJrBDkwNmNvbmYudGllci5kaXNrMC5yYW5rNy5saW1pdBg2MDAwMDAwMDAw
MDBGY29uZi50aWVyLmRpc2swLnJhbms3Lmxvd193YXRlcm1hcmsEODA0Y29uZi50aWVyLmRpc2sw
LnJhbms3LnBhdGhEL21udC9kYXRhL2dwdWRiL3BlcnNpc3QvL2Rpc2tjYWNoZVxjb25mLnRpZXIu
ZGlzazAucmFuazcuc3RvcmVfcGVyc2lzdGVudF9vYmplY3RzCkZBTFNFSGNvbmYudGllci5kaXNr
MC5yYW5rOC5oaWdoX3dhdGVybWFyawQ5MDZjb25mLnRpZXIuZGlzazAucmFuazgubGltaXQYNjAw
MDAwMDAwMDAwRmNvbmYudGllci5kaXNrMC5yYW5rOC5sb3dfd2F0ZXJtYXJrBDgwNGNvbmYudGll
ci5kaXNrMC5yYW5rOC5wYXRoRC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0Ly9kaXNrY2FjaGVcY29u
Zi50aWVyLmRpc2swLnJhbms4LnN0b3JlX3BlcnNpc3RlbnRfb2JqZWN0cwpGQUxTRVBjb25mLnRp
ZXIuZ2xvYmFsLmNvbmN1cnJlbnRfd2FpdF90aW1lb3V0BjYwMGpjb25mLnRpZXIuZ2xvYmFsLmRl
ZmVyX2NhY2hlX29iamVjdF9ldmljdGlvbnNfdG9fZGlzawhUUlVFUGNvbmYudGllci5wZXJzaXN0
LmRlZmF1bHQuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnBlcnNpc3QuZGVmYXVsdC5saW1p
dBo2NTAwMDAwMDAwMDAwTmNvbmYudGllci5wZXJzaXN0LmRlZmF1bHQubG93X3dhdGVybWFyawQ4
MDxjb25mLnRpZXIucGVyc2lzdC5kZWZhdWx0LnBhdGgwL21udC9kYXRhL2dwdWRiL3BlcnNpc3Qv
OmNvbmYudGllci5wZXJzaXN0LmdyYXBoMC5wYXRoMC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0Lzpj
b25mLnRpZXIucGVyc2lzdC5ncmFwaDEucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC86Y29u
Zi50aWVyLnBlcnNpc3QuZ3JhcGgyLnBhdGgwL21udC9kYXRhL2dwdWRiL3BlcnNpc3QvOmNvbmYu
dGllci5wZXJzaXN0LmdyYXBoMy5wYXRoMC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0Lzpjb25mLnRp
ZXIucGVyc2lzdC5ncmFwaDQucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC86Y29uZi50aWVy
LnBlcnNpc3QuZ3JhcGg1LnBhdGgwL21udC9kYXRhL2dwdWRiL3BlcnNpc3QvOmNvbmYudGllci5w
ZXJzaXN0LmdyYXBoNi5wYXRoMC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0Lzpjb25mLnRpZXIucGVy
c2lzdC5ncmFwaDcucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC9MY29uZi50aWVyLnBlcnNp
c3QucmFuazAuaGlnaF93YXRlcm1hcmsEOTA6Y29uZi50aWVyLnBlcnNpc3QucmFuazAubGltaXQa
NjUwMDAwMDAwMDAwMEpjb25mLnRpZXIucGVyc2lzdC5yYW5rMC5sb3dfd2F0ZXJtYXJrBDgwOGNv
bmYudGllci5wZXJzaXN0LnJhbmswLnBhdGgwL21udC9kYXRhL2dwdWRiL3BlcnNpc3QvTGNvbmYu
dGllci5wZXJzaXN0LnJhbmsxLmhpZ2hfd2F0ZXJtYXJrBDkwOmNvbmYudGllci5wZXJzaXN0LnJh
bmsxLmxpbWl0GjY1MDAwMDAwMDAwMDBKY29uZi50aWVyLnBlcnNpc3QucmFuazEubG93X3dhdGVy
bWFyawQ4MDhjb25mLnRpZXIucGVyc2lzdC5yYW5rMS5wYXRoMC9tbnQvZGF0YS9ncHVkYi9wZXJz
aXN0L0xjb25mLnRpZXIucGVyc2lzdC5yYW5rMi5oaWdoX3dhdGVybWFyawQ5MDpjb25mLnRpZXIu
cGVyc2lzdC5yYW5rMi5saW1pdBo2NTAwMDAwMDAwMDAwSmNvbmYudGllci5wZXJzaXN0LnJhbmsy
Lmxvd193YXRlcm1hcmsEODA4Y29uZi50aWVyLnBlcnNpc3QucmFuazIucGF0aDAvbW50L2RhdGEv
Z3B1ZGIvcGVyc2lzdC9MY29uZi50aWVyLnBlcnNpc3QucmFuazMuaGlnaF93YXRlcm1hcmsEOTA6
Y29uZi50aWVyLnBlcnNpc3QucmFuazMubGltaXQaNjUwMDAwMDAwMDAwMEpjb25mLnRpZXIucGVy
c2lzdC5yYW5rMy5sb3dfd2F0ZXJtYXJrBDgwOGNvbmYudGllci5wZXJzaXN0LnJhbmszLnBhdGgw
L21udC9kYXRhL2dwdWRiL3BlcnNpc3QvTGNvbmYudGllci5wZXJzaXN0LnJhbms0LmhpZ2hfd2F0
ZXJtYXJrBDkwOmNvbmYudGllci5wZXJzaXN0LnJhbms0LmxpbWl0GjY1MDAwMDAwMDAwMDBKY29u
Zi50aWVyLnBlcnNpc3QucmFuazQubG93X3dhdGVybWFyawQ4MDhjb25mLnRpZXIucGVyc2lzdC5y
YW5rNC5wYXRoMC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0L0xjb25mLnRpZXIucGVyc2lzdC5yYW5r
NS5oaWdoX3dhdGVybWFyawQ5MDpjb25mLnRpZXIucGVyc2lzdC5yYW5rNS5saW1pdBo2NTAwMDAw
MDAwMDAwSmNvbmYudGllci5wZXJzaXN0LnJhbms1Lmxvd193YXRlcm1hcmsEODA4Y29uZi50aWVy
LnBlcnNpc3QucmFuazUucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC9MY29uZi50aWVyLnBl
cnNpc3QucmFuazYuaGlnaF93YXRlcm1hcmsEOTA6Y29uZi50aWVyLnBlcnNpc3QucmFuazYubGlt
aXQaNjUwMDAwMDAwMDAwMEpjb25mLnRpZXIucGVyc2lzdC5yYW5rNi5sb3dfd2F0ZXJtYXJrBDgw
OGNvbmYudGllci5wZXJzaXN0LnJhbms2LnBhdGgwL21udC9kYXRhL2dwdWRiL3BlcnNpc3QvTGNv
bmYudGllci5wZXJzaXN0LnJhbms3LmhpZ2hfd2F0ZXJtYXJrBDkwOmNvbmYudGllci5wZXJzaXN0
LnJhbms3LmxpbWl0GjY1MDAwMDAwMDAwMDBKY29uZi50aWVyLnBlcnNpc3QucmFuazcubG93X3dh
dGVybWFyawQ4MDhjb25mLnRpZXIucGVyc2lzdC5yYW5rNy5wYXRoMC9tbnQvZGF0YS9ncHVkYi9w
ZXJzaXN0L0xjb25mLnRpZXIucGVyc2lzdC5yYW5rOC5oaWdoX3dhdGVybWFyawQ5MDpjb25mLnRp
ZXIucGVyc2lzdC5yYW5rOC5saW1pdBo2NTAwMDAwMDAwMDAwSmNvbmYudGllci5wZXJzaXN0LnJh
bms4Lmxvd193YXRlcm1hcmsEODA4Y29uZi50aWVyLnBlcnNpc3QucmFuazgucGF0aDAvbW50L2Rh
dGEvZ3B1ZGIvcGVyc2lzdC84Y29uZi50aWVyLnBlcnNpc3QudGV4dDEucGF0aDAvbW50L2RhdGEv
Z3B1ZGIvcGVyc2lzdC84Y29uZi50aWVyLnBlcnNpc3QudGV4dDIucGF0aDAvbW50L2RhdGEvZ3B1
ZGIvcGVyc2lzdC84Y29uZi50aWVyLnBlcnNpc3QudGV4dDMucGF0aDAvbW50L2RhdGEvZ3B1ZGIv
cGVyc2lzdC84Y29uZi50aWVyLnBlcnNpc3QudGV4dDQucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVy
c2lzdC84Y29uZi50aWVyLnBlcnNpc3QudGV4dDUucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lz
dC84Y29uZi50aWVyLnBlcnNpc3QudGV4dDYucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC84
Y29uZi50aWVyLnBlcnNpc3QudGV4dDcucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC84Y29u
Zi50aWVyLnBlcnNpc3QudGV4dDgucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC9IY29uZi50
aWVyLnJhbS5kZWZhdWx0LmhpZ2hfd2F0ZXJtYXJrBDkwNmNvbmYudGllci5yYW0uZGVmYXVsdC5s
aW1pdAQtMUZjb25mLnRpZXIucmFtLmRlZmF1bHQubG93X3dhdGVybWFyawQ4MERjb25mLnRpZXIu
cmFtLnJhbmswLmhpZ2hfd2F0ZXJtYXJrBDkwMmNvbmYudGllci5yYW0ucmFuazAubGltaXQWNzcy
NjY4MDAwMDBCY29uZi50aWVyLnJhbS5yYW5rMC5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazEuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rMS5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rMS5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazIuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rMi5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rMi5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazMuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rMy5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rMy5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazQuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rNC5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rNC5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazUuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rNS5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rNS5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazYuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rNi5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rNi5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazcuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rNy5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rNy5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazguaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rOC5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rOC5sb3dfd2F0ZXJtYXJrBDgwXGNvbmYudGllci52
cmFtLmRlZmF1bHQuYWxsX2dwdXMuaGlnaF93YXRlcm1hcmsEOTBKY29uZi50aWVyLnZyYW0uZGVm
YXVsdC5hbGxfZ3B1cy5saW1pdAQtMVpjb25mLnRpZXIudnJhbS5kZWZhdWx0LmFsbF9ncHVzLmxv
d193YXRlcm1hcmsEODBUY29uZi50aWVyLnZyYW0uZGVmYXVsdC5ncHUwLmhpZ2hfd2F0ZXJtYXJr
BDkwQmNvbmYudGllci52cmFtLmRlZmF1bHQuZ3B1MC5saW1pdAQtMVJjb25mLnRpZXIudnJhbS5k
ZWZhdWx0LmdwdTAubG93X3dhdGVybWFyawQ4MFRjb25mLnRpZXIudnJhbS5kZWZhdWx0LmdwdTEu
aGlnaF93YXRlcm1hcmsEOTBCY29uZi50aWVyLnZyYW0uZGVmYXVsdC5ncHUxLmxpbWl0BC0xUmNv
bmYudGllci52cmFtLmRlZmF1bHQuZ3B1MS5sb3dfd2F0ZXJtYXJrBDgwVGNvbmYudGllci52cmFt
LmRlZmF1bHQuZ3B1Mi5oaWdoX3dhdGVybWFyawQ5MEJjb25mLnRpZXIudnJhbS5kZWZhdWx0Lmdw
dTIubGltaXQELTFSY29uZi50aWVyLnZyYW0uZGVmYXVsdC5ncHUyLmxvd193YXRlcm1hcmsEODBU
Y29uZi50aWVyLnZyYW0uZGVmYXVsdC5ncHUzLmhpZ2hfd2F0ZXJtYXJrBDkwQmNvbmYudGllci52
cmFtLmRlZmF1bHQuZ3B1My5saW1pdAQtMVJjb25mLnRpZXIudnJhbS5kZWZhdWx0LmdwdTMubG93
X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rMC5HUFUwLmhpZ2hfd2F0ZXJtYXJrBDkw
PmNvbmYudGllci52cmFtLnJhbmswLkdQVTAubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazAu
R1BVMC5sb3dfd2F0ZXJtYXJrBDgwWGNvbmYudGllci52cmFtLnJhbmswLmFsbF9ncHVzLmhpZ2hf
d2F0ZXJtYXJrBDkwRmNvbmYudGllci52cmFtLnJhbmswLmFsbF9ncHVzLmxpbWl0BC0xVmNvbmYu
dGllci52cmFtLnJhbmswLmFsbF9ncHVzLmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0u
cmFuazAuZ3B1MC5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJhbS5yYW5rMC5ncHUwLmxp
bWl0BC0xTmNvbmYudGllci52cmFtLnJhbmswLmdwdTAubG93X3dhdGVybWFyawQ4MFBjb25mLnRp
ZXIudnJhbS5yYW5rMC5ncHUxLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbmsw
LmdwdTEubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazAuZ3B1MS5sb3dfd2F0ZXJtYXJrBDgw
UGNvbmYudGllci52cmFtLnJhbmswLmdwdTIuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZy
YW0ucmFuazAuZ3B1Mi5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rMC5ncHUyLmxvd193YXRl
cm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazAuZ3B1My5oaWdoX3dhdGVybWFyawQ5MD5jb25m
LnRpZXIudnJhbS5yYW5rMC5ncHUzLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbmswLmdwdTMu
bG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rMS5HUFUwLmhpZ2hfd2F0ZXJtYXJr
BDkwPmNvbmYudGllci52cmFtLnJhbmsxLkdQVTAubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFu
azEuR1BVMC5sb3dfd2F0ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbmsxLkdQVTEuaGlnaF93
YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0ucmFuazEuR1BVMS5saW1pdAQtMU5jb25mLnRpZXIu
dnJhbS5yYW5rMS5HUFUxLmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazEuR1BV
Mi5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJhbS5yYW5rMS5HUFUyLmxpbWl0BC0xTmNv
bmYudGllci52cmFtLnJhbmsxLkdQVTIubG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5y
YW5rMS5HUFUzLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbmsxLkdQVTMubGlt
aXQELTFOY29uZi50aWVyLnZyYW0ucmFuazEuR1BVMy5sb3dfd2F0ZXJtYXJrBDgwWGNvbmYudGll
ci52cmFtLnJhbmsxLmFsbF9ncHVzLmhpZ2hfd2F0ZXJtYXJrBDkwRmNvbmYudGllci52cmFtLnJh
bmsxLmFsbF9ncHVzLmxpbWl0BC0xVmNvbmYudGllci52cmFtLnJhbmsxLmFsbF9ncHVzLmxvd193
YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazEuZ3B1MC5oaWdoX3dhdGVybWFyawQ5MD5j
b25mLnRpZXIudnJhbS5yYW5rMS5ncHUwLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbmsxLmdw
dTAubG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rMS5ncHUxLmhpZ2hfd2F0ZXJt
YXJrBDkwPmNvbmYudGllci52cmFtLnJhbmsxLmdwdTEubGltaXQELTFOY29uZi50aWVyLnZyYW0u
cmFuazEuZ3B1MS5sb3dfd2F0ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbmsxLmdwdTIuaGln
aF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0ucmFuazEuZ3B1Mi5saW1pdAQtMU5jb25mLnRp
ZXIudnJhbS5yYW5rMS5ncHUyLmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazEu
Z3B1My5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJhbS5yYW5rMS5ncHUzLmxpbWl0BC0x
TmNvbmYudGllci52cmFtLnJhbmsxLmdwdTMubG93X3dhdGVybWFyawQ4MFhjb25mLnRpZXIudnJh
bS5yYW5rMi5hbGxfZ3B1cy5oaWdoX3dhdGVybWFyawQ5MEZjb25mLnRpZXIudnJhbS5yYW5rMi5h
bGxfZ3B1cy5saW1pdAQtMVZjb25mLnRpZXIudnJhbS5yYW5rMi5hbGxfZ3B1cy5sb3dfd2F0ZXJt
YXJrBDgwUGNvbmYudGllci52cmFtLnJhbmsyLmdwdTAuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50
aWVyLnZyYW0ucmFuazIuZ3B1MC5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rMi5ncHUwLmxv
d193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazIuZ3B1MS5oaWdoX3dhdGVybWFyawQ5
MD5jb25mLnRpZXIudnJhbS5yYW5rMi5ncHUxLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbmsy
LmdwdTEubG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rMi5ncHUyLmhpZ2hfd2F0
ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbmsyLmdwdTIubGltaXQELTFOY29uZi50aWVyLnZy
YW0ucmFuazIuZ3B1Mi5sb3dfd2F0ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbmsyLmdwdTMu
aGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0ucmFuazIuZ3B1My5saW1pdAQtMU5jb25m
LnRpZXIudnJhbS5yYW5rMi5ncHUzLmxvd193YXRlcm1hcmsEODBYY29uZi50aWVyLnZyYW0ucmFu
azMuYWxsX2dwdXMuaGlnaF93YXRlcm1hcmsEOTBGY29uZi50aWVyLnZyYW0ucmFuazMuYWxsX2dw
dXMubGltaXQELTFWY29uZi50aWVyLnZyYW0ucmFuazMuYWxsX2dwdXMubG93X3dhdGVybWFyawQ4
MFBjb25mLnRpZXIudnJhbS5yYW5rMy5ncHUwLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGllci52
cmFtLnJhbmszLmdwdTAubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazMuZ3B1MC5sb3dfd2F0
ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbmszLmdwdTEuaGlnaF93YXRlcm1hcmsEOTA+Y29u
Zi50aWVyLnZyYW0ucmFuazMuZ3B1MS5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rMy5ncHUx
Lmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazMuZ3B1Mi5oaWdoX3dhdGVybWFy
awQ5MD5jb25mLnRpZXIudnJhbS5yYW5rMy5ncHUyLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJh
bmszLmdwdTIubG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rMy5ncHUzLmhpZ2hf
d2F0ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbmszLmdwdTMubGltaXQELTFOY29uZi50aWVy
LnZyYW0ucmFuazMuZ3B1My5sb3dfd2F0ZXJtYXJrBDgwWGNvbmYudGllci52cmFtLnJhbms0LmFs
bF9ncHVzLmhpZ2hfd2F0ZXJtYXJrBDkwRmNvbmYudGllci52cmFtLnJhbms0LmFsbF9ncHVzLmxp
bWl0BC0xVmNvbmYudGllci52cmFtLnJhbms0LmFsbF9ncHVzLmxvd193YXRlcm1hcmsEODBQY29u
Zi50aWVyLnZyYW0ucmFuazQuZ3B1MC5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJhbS5y
YW5rNC5ncHUwLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbms0LmdwdTAubG93X3dhdGVybWFy
awQ4MFBjb25mLnRpZXIudnJhbS5yYW5rNC5ncHUxLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGll
ci52cmFtLnJhbms0LmdwdTEubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazQuZ3B1MS5sb3df
d2F0ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbms0LmdwdTIuaGlnaF93YXRlcm1hcmsEOTA+
Y29uZi50aWVyLnZyYW0ucmFuazQuZ3B1Mi5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rNC5n
cHUyLmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazQuZ3B1My5oaWdoX3dhdGVy
bWFyawQ5MD5jb25mLnRpZXIudnJhbS5yYW5rNC5ncHUzLmxpbWl0BC0xTmNvbmYudGllci52cmFt
LnJhbms0LmdwdTMubG93X3dhdGVybWFyawQ4MFhjb25mLnRpZXIudnJhbS5yYW5rNS5hbGxfZ3B1
cy5oaWdoX3dhdGVybWFyawQ5MEZjb25mLnRpZXIudnJhbS5yYW5rNS5hbGxfZ3B1cy5saW1pdAQt
MVZjb25mLnRpZXIudnJhbS5yYW5rNS5hbGxfZ3B1cy5sb3dfd2F0ZXJtYXJrBDgwUGNvbmYudGll
ci52cmFtLnJhbms1LmdwdTAuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0ucmFuazUu
Z3B1MC5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rNS5ncHUwLmxvd193YXRlcm1hcmsEODBQ
Y29uZi50aWVyLnZyYW0ucmFuazUuZ3B1MS5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJh
bS5yYW5rNS5ncHUxLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbms1LmdwdTEubG93X3dhdGVy
bWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rNS5ncHUyLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYu
dGllci52cmFtLnJhbms1LmdwdTIubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazUuZ3B1Mi5s
b3dfd2F0ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbms1LmdwdTMuaGlnaF93YXRlcm1hcmsE
OTA+Y29uZi50aWVyLnZyYW0ucmFuazUuZ3B1My5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5r
NS5ncHUzLmxvd193YXRlcm1hcmsEODBYY29uZi50aWVyLnZyYW0ucmFuazYuYWxsX2dwdXMuaGln
aF93YXRlcm1hcmsEOTBGY29uZi50aWVyLnZyYW0ucmFuazYuYWxsX2dwdXMubGltaXQELTFWY29u
Zi50aWVyLnZyYW0ucmFuazYuYWxsX2dwdXMubG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJh
bS5yYW5rNi5ncHUwLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbms2LmdwdTAu
bGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazYuZ3B1MC5sb3dfd2F0ZXJtYXJrBDgwUGNvbmYu
dGllci52cmFtLnJhbms2LmdwdTEuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0ucmFu
azYuZ3B1MS5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rNi5ncHUxLmxvd193YXRlcm1hcmsE
ODBQY29uZi50aWVyLnZyYW0ucmFuazYuZ3B1Mi5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIu
dnJhbS5yYW5rNi5ncHUyLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbms2LmdwdTIubG93X3dh
dGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rNi5ncHUzLmhpZ2hfd2F0ZXJtYXJrBDkwPmNv
bmYudGllci52cmFtLnJhbms2LmdwdTMubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazYuZ3B1
My5sb3dfd2F0ZXJtYXJrBDgwWGNvbmYudGllci52cmFtLnJhbms3LmFsbF9ncHVzLmhpZ2hfd2F0
ZXJtYXJrBDkwRmNvbmYudGllci52cmFtLnJhbms3LmFsbF9ncHVzLmxpbWl0BC0xVmNvbmYudGll
ci52cmFtLnJhbms3LmFsbF9ncHVzLmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFu
azcuZ3B1MC5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJhbS5yYW5rNy5ncHUwLmxpbWl0
BC0xTmNvbmYudGllci52cmFtLnJhbms3LmdwdTAubG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIu
dnJhbS5yYW5rNy5ncHUxLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbms3Lmdw
dTEubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazcuZ3B1MS5sb3dfd2F0ZXJtYXJrBDgwUGNv
bmYudGllci52cmFtLnJhbms3LmdwdTIuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0u
cmFuazcuZ3B1Mi5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rNy5ncHUyLmxvd193YXRlcm1h
cmsEODBQY29uZi50aWVyLnZyYW0ucmFuazcuZ3B1My5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRp
ZXIudnJhbS5yYW5rNy5ncHUzLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbms3LmdwdTMubG93
X3dhdGVybWFyawQ4MFhjb25mLnRpZXIudnJhbS5yYW5rOC5hbGxfZ3B1cy5oaWdoX3dhdGVybWFy
awQ5MEZjb25mLnRpZXIudnJhbS5yYW5rOC5hbGxfZ3B1cy5saW1pdAQtMVZjb25mLnRpZXIudnJh
bS5yYW5rOC5hbGxfZ3B1cy5sb3dfd2F0ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbms4Lmdw
dTAuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0ucmFuazguZ3B1MC5saW1pdAQtMU5j
b25mLnRpZXIudnJhbS5yYW5rOC5ncHUwLmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0u
cmFuazguZ3B1MS5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJhbS5yYW5rOC5ncHUxLmxp
bWl0BC0xTmNvbmYudGllci52cmFtLnJhbms4LmdwdTEubG93X3dhdGVybWFyawQ4MFBjb25mLnRp
ZXIudnJhbS5yYW5rOC5ncHUyLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbms4
LmdwdTIubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazguZ3B1Mi5sb3dfd2F0ZXJtYXJrBDgw
UGNvbmYudGllci52cmFtLnJhbms4LmdwdTMuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZy
YW0ucmFuazguZ3B1My5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rOC5ncHUzLmxvd193YXRl
cm1hcmsEODA0Y29uZi50aWVyX3N0cmF0ZWd5LmRlZmF1bHRCVlJBTSAyLCBSQU0gNSwgRElTSzAg
NSwgUEVSU0lTVCA1YGNvbmYudGllcl9zdHJhdGVneS5wcmVkaWNhdGVfZXZhbHVhdGlvbl9pbnRl
cnZhbAQ2MCRjb25mLnRvbXNfcGVyX3JhbmsCMSBjb25mLnRwc19wZXJfdG9tBDQwImNvbmYudHJp
Z2dlcl9wb3J0CDkwMDE+Y29uZi51bmlmaWVkX3NlY3VyaXR5X25hbWVzcGFjZQpGQUxTRTpjb25m
LnVzZV9leHRlcm5hbF90ZXh0X3NlcnZlcghUUlVFHGNvbmYudXNlX2h0dHBzCkZBTFNFLGNvbmYu
dmlkZW9fZGVmYXVsdF90dGwELTEoY29uZi52aWRlb19tYXhfY291bnQELTEyY29uZi52aWRlb190
ZW1wX2RpcmVjdG9yeSwvdG1wL2dwdWRiLXRlbXAtdmlkZW9zImNvbmYud2FsLmNoZWNrc3VtCFRS
VUUwY29uZi53YWwuZmx1c2hfZnJlcXVlbmN5BDYwMmNvbmYud2FsLm1heF9zZWdtZW50X3NpemUS
NTAwMDAwMDAwLGNvbmYud2FsLnNlZ21lbnRfY291bnQELTEoY29uZi53YWwuc3luY19wb2xpY3kK
Zmx1c2g2Y29uZi53b3JrZXJfaHR0cF9zZXJ2ZXJfaXBz6AExNzIuMzEuMzMuMzA7MTcyLjMxLjMz
LjMwOzE3Mi4zMS4zMy4zMTsxNzIuMzEuMzMuMzI7MTcyLjMxLjMzLjMzOzE3Mi4zMS4zMy4zNDsx
NzIuMzEuMzMuMzU7MTcyLjMxLjMzLjM2OzE3Mi4zMS4zMy4zNzpjb25mLndvcmtlcl9odHRwX3Nl
cnZlcl9wb3J0c1g5MTkxOzkxOTI7OTE5Mzs5MTk0OzkxOTU7OTE5Njs5MTk3OzkxOTg7OTE5OThj
b25mLndvcmtlcl9odHRwX3NlcnZlcl91cmxzwANodHRwOi8vMTcyLjMxLjMzLjMwOjkxOTE7aHR0
cDovLzE3Mi4zMS4zMy4zMDo5MTkyO2h0dHA6Ly8xNzIuMzEuMzMuMzE6OTE5MztodHRwOi8vMTcy
LjMxLjMzLjMyOjkxOTQ7aHR0cDovLzE3Mi4zMS4zMy4zMzo5MTk1O2h0dHA6Ly8xNzIuMzEuMzMu
MzQ6OTE5NjtodHRwOi8vMTcyLjMxLjMzLjM1OjkxOTc7aHR0cDovLzE3Mi4zMS4zMy4zNjo5MTk4
O2h0dHA6Ly8xNzIuMzEuMzMuMzc6OTE5OUhjb25mLndvcmtlcl9odHRwX3NlcnZlcl91cmxzX3By
aXZhdGXAA2h0dHA6Ly8xNzIuMzEuMzMuMzA6OTE5MTtodHRwOi8vMTcyLjMxLjMzLjMwOjkxOTI7
aHR0cDovLzE3Mi4zMS4zMy4zMTo5MTkzO2h0dHA6Ly8xNzIuMzEuMzMuMzI6OTE5NDtodHRwOi8v
MTcyLjMxLjMzLjMzOjkxOTU7aHR0cDovLzE3Mi4zMS4zMy4zNDo5MTk2O2h0dHA6Ly8xNzIuMzEu
MzMuMzU6OTE5NztodHRwOi8vMTcyLjMxLjMzLjM2OjkxOTg7aHR0cDovLzE3Mi4zMS4zMy4zNzo5
MTk5KHN5c3RlbS5mb250X2ZhbWlsaWVzpgFEZWphVnUgTWF0aCBUZVggR3lyZSxEZWphVnUgU2Fu
cyBNb25vLERlamFWdSBTYW5zLERlamFWdSBTZXJpZixTYW5zLFNlcmlmLE1vbm9zcGFjZTB2ZXJz
aW9uLmdwdWRiX2J1aWxkX2RhdGUoRmViIDE0IDIwMjQgMjM6NDk6MDFAdmVyc2lvbi5ncHVkYl9j
b21wdXRlX2NhcGFiaWxpdHkWNjA7NzA7ODA7ODY4dmVyc2lvbi5ncHVkYl9jb21wdXRlX2VuZ2lu
ZQhDVURBPnZlcnNpb24uZ3B1ZGJfY29yZV9saWJzX3ZlcnNpb24UMjAyNDAyMTMwMDR2ZXJzaW9u
LmdwdWRiX2NvcmVfdmVyc2lvbiw3LjIuMC4xLjIwMjQwMjE0MjEwOTA2NHZlcnNpb24uZ3B1ZGJf
ZmlsZV92ZXJzaW9uFDIwMjEwMzExMjAqdmVyc2lvbi5ncHVkYl92ZXJzaW9uUDkyMjYwYTMyOWNh
NDVjYjBlMzc3NzZjZjkxNDQ5NzE3OWY2MjExNDM0dmVyc2lvbi5ncHVkYl92ZXJzaW9uX2RhdGUy
MjAyNC0wMi0xNCAyMTowOTowNiAtMDUwMCx2ZXJzaW9uLnB5dGhvbl92ZXJzaW9uDjMuMTAuMTMA
AAA=
headers:
Access-Control-Allow-Origin:
- '*'
Access-Control-Expose-Headers:
- x-request-time-secs
Connection:
- Close
Content-Type:
- application/octet-stream
Date:
- Thu, 22 Feb 2024 00:26:36 GMT
Strict-Transport-Security:
- max-age=31536000; includeSubDomains
X-Content-Type-Options:
- nosniff
X-Frame-Options:
- SAMEORIGIN
X-XSS-Protection:
- 1; mode=block
x-request-time-secs:
- '0.00054'
status:
code: 200
message: OK
- request:
body: !!binary |
hAFHRU5FUkFURSBQUk9NUFQgV0lUSCBPUFRJT05TIChDT05URVhUX05BTUVTID0gJ2RlbW8udGVz
dF9sbG1fY3R4JykAAgxiaW5hcnkAAAA=
headers:
Accept:
- application/octet-stream
Content-type:
- application/octet-stream
authorization:
- DUMMY
method: POST
uri: http://172.31.33.30:9191/execute/sql
response:
body:
string: !!binary |
BE9LAChleGVjdXRlX3NxbF9yZXNwb25zZe4MAPYDeyJuYW1lIjoiZ2VuZXJpY19yZXNwb25zZSIs
InR5cGUiOiJyZWNvcmQiLCJmaWVsZHMiOlt7Im5hbWUiOiJjb2x1bW5fMSIsInR5cGUiOnsidHlw
ZSI6ImFycmF5IiwiaXRlbXMiOiJzdHJpbmcifX0seyJuYW1lIjoiY29sdW1uX2hlYWRlcnMiLCJ0
eXBlIjp7InR5cGUiOiJhcnJheSIsIml0ZW1zIjoic3RyaW5nIn19LHsibmFtZSI6ImNvbHVtbl9k
YXRhdHlwZXMiLCJ0eXBlIjp7InR5cGUiOiJhcnJheSIsIml0ZW1zIjoic3RyaW5nIn19XX2qBwL+
BnsicGF5bG9hZCI6eyJjb250ZXh0IjpbeyJ0YWJsZSI6ImRlbW8udGVzdF9wcm9maWxlcyIsImNv
bHVtbnMiOlsidXNlcm5hbWUgVkFSQ0hBUiAoMzIpIE5PVCBOVUxMIiwibmFtZSBWQVJDSEFSICgz
MikgTk9UIE5VTEwiLCJzZXggVkFSQ0hBUiAoMSkgTk9UIE5VTEwiLCJhZGRyZXNzIFZBUkNIQVIg
KDY0KSBOT1QgTlVMTCIsIm1haWwgVkFSQ0hBUiAoMzIpIE5PVCBOVUxMIiwiYmlydGhkYXRlIFRJ
TUVTVEFNUCBOT1QgTlVMTCJdLCJkZXNjcmlwdGlvbiI6IkNvbnRhaW5zIHVzZXIgcHJvZmlsZXMu
IiwicnVsZXMiOltdfSx7InNhbXBsZXMiOnsiSG93IG1hbnkgbWFsZSB1c2VycyBhcmUgdGhlcmU/
Ijoic2VsZWN0IGNvdW50KDEpIGFzIG51bV91c2Vyc1xuICAgICAgICAgICAgZnJvbSBkZW1vLnRl
c3RfcHJvZmlsZXNcbiAgICAgICAgICAgIHdoZXJlIHNleCA9ICcnTScnOyJ9fV19fQACDFByb21w
dAACDHN0cmluZwAAAgAACCBYLUtpbmV0aWNhLUdyb3VwBkRETApjb3VudAIwGmxhc3RfZW5kcG9p
bnQsL2dlbmVyYXRlL3NxbC9pbnRlcm5hbC50b3RhbF9udW1iZXJfb2ZfcmVjb3JkcwIwAAA=
headers:
Access-Control-Allow-Origin:
- '*'
Access-Control-Expose-Headers:
- x-request-time-secs
Connection:
- Close
Content-Type:
- application/octet-stream
Date:
- Thu, 22 Feb 2024 00:26:36 GMT
Strict-Transport-Security:
- max-age=31536000; includeSubDomains
X-Content-Type-Options:
- nosniff
X-Frame-Options:
- SAMEORIGIN
X-Kinetica-Group:
- DDL
X-XSS-Protection:
- 1; mode=block
x-request-time-secs:
- '0.00806'
status:
code: 200
message: OK
version: 1
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/cassettes/TestChatKinetica.test_setup.yaml | interactions:
- request:
body: "\0"
headers:
Accept:
- application/octet-stream
Content-type:
- application/octet-stream
authorization:
- DUMMY
method: POST
uri: http://172.31.33.30:9191/show/system/status
response:
body:
string: !!binary |
BE9LADZzaG93X3N5c3RlbV9zdGF0dXNfcmVzcG9uc2X8ugEUCmdyYXBovAl7ImNvdW50Ijo4LCJz
dGF0dXMiOlt7InZlcnNpb24iOjM5LCJzZXJ2ZXJfaWQiOjAsImhvc3RfaWQiOiIxNzIuMzEuMzMu
MzAiLCJzdGF0dXMiOiJydW5uaW5nIn0seyJ2ZXJzaW9uIjozOSwic2VydmVyX2lkIjoxLCJob3N0
X2lkIjoiMTcyLjMxLjMzLjMxIiwic3RhdHVzIjoicnVubmluZyJ9LHsidmVyc2lvbiI6MzksInNl
cnZlcl9pZCI6MiwiaG9zdF9pZCI6IjE3Mi4zMS4zMy4zMiIsInN0YXR1cyI6InJ1bm5pbmcifSx7
InZlcnNpb24iOjM5LCJzZXJ2ZXJfaWQiOjMsImhvc3RfaWQiOiIxNzIuMzEuMzMuMzMiLCJzdGF0
dXMiOiJydW5uaW5nIn0seyJ2ZXJzaW9uIjozOSwic2VydmVyX2lkIjo0LCJob3N0X2lkIjoiMTcy
LjMxLjMzLjM0Iiwic3RhdHVzIjoicnVubmluZyJ9LHsidmVyc2lvbiI6MzksInNlcnZlcl9pZCI6
NSwiaG9zdF9pZCI6IjE3Mi4zMS4zMy4zNSIsInN0YXR1cyI6InJ1bm5pbmcifSx7InZlcnNpb24i
OjM5LCJzZXJ2ZXJfaWQiOjYsImhvc3RfaWQiOiIxNzIuMzEuMzMuMzYiLCJzdGF0dXMiOiJydW5u
aW5nIn0seyJ2ZXJzaW9uIjozOSwic2VydmVyX2lkIjo3LCJob3N0X2lkIjoiMTcyLjMxLjMzLjM3
Iiwic3RhdHVzIjoicnVubmluZyJ9XX0eaGFfY2x1c3Rlcl9pbmZvzAN7ImhhX3JhbmtzX2luZm8i
Olt7InByaXZhdGVfdXJsIjoiaHR0cDovLzE3Mi4zMS4zMy4zMDo5MTkxIiwicHVibGljX3VybCI6
Imh0dHA6Ly8xNzIuMzEuMzMuMzA6OTE5MSIsImhhX3VybF9vdmVycmlkZSI6Imh0dHA6Ly8xNzIu
MzEuMzMuMzA6OTE5MSIsImFsdGVybmF0ZV91cmxzIjpbImh0dHA6Ly8xNzIuMzEuMzMuMzA6OTE5
MSJdfV0sInJlYWR5Ijp0cnVlLCJuYW1lIjoicHJvZHVjdGlvbi1uZXcifQpob3N0c/hLeyJjb3Vu
dCI6OCwic3RhdHVzIjpbeyJ2ZXJzaW9uIjoxMzMsImhvc3RfbnVtYmVyIjowLCJpZCI6IjE3Mi4z
MS4zMy4zMCIsImhvc3RuYW1lIjoiMzAwLTMwMy11MzAtdjEwMCIsInN0YXR1cyI6InJ1bm5pbmci
LCJodHRwZF9zdGF0dXMiOiJkaXNhYmxlZCIsInN0YXRzX3N0YXR1cyI6InJ1bm5pbmciLCJtbF9z
dGF0dXMiOiJkaXNhYmxlZCIsInF1ZXJ5X3BsYW5uZXJfc3RhdHVzIjoicnVubmluZyIsInJldmVh
bF9zdGF0dXMiOiJydW5uaW5nIiwidG90YWxfbWVtb3J5Ijo4MTAyMDEyNjAwMzIsInN0YXJ0X3Rp
bWUiOjE3MDgxMDM0NTIsIm5ldHdvcmtfaXBzIjpbIjE3Mi4zMS4zMy4zMCJdLCJncHVfaWRzIjpb
IkdQVS1hMDdjMGU0OC05NDhhLTQwNWMtNDNiNy03Mzc0ZDJkZGMwOGMiLCJHUFUtNTNlMWI3YjQt
NjhiOC1hMjhjLTEwZjMtZGI2YTA1OTdmYmI0IiwiR1BVLTQxZmY5MWZiLWVjYjktMGE1Yi1kNDdj
LWQ1YmI3ZWYxMDM4YSIsIkdQVS1hOTQwYjEyNi1iMWE0LTlmMDctNDRlMS02MzZjMzc5ODllY2Yi
XSwiYWNjZXB0c19mYWlsb3ZlciI6ImZhbHNlIiwiaG9zdF9yb2xlIjoibGVhZGVyIiwiaG9zdF90
ZXJtIjowLCJob3N0X2VsZWN0aW9uX3N0YXR1cyI6ImxlYWRlcl9lbGVjdGVkIn0seyJ2ZXJzaW9u
IjoxMTMsImhvc3RfbnVtYmVyIjoxLCJpZCI6IjE3Mi4zMS4zMy4zMSIsImhvc3RuYW1lIjoiMzAw
LTMwMy11MzEtdjEwMCIsInN0YXR1cyI6InJ1bm5pbmciLCJodHRwZF9zdGF0dXMiOiJkaXNhYmxl
ZCIsInN0YXRzX3N0YXR1cyI6InJ1bm5pbmciLCJtbF9zdGF0dXMiOiJkaXNhYmxlZCIsInF1ZXJ5
X3BsYW5uZXJfc3RhdHVzIjoic3RvcHBlZCIsInJldmVhbF9zdGF0dXMiOiJzdG9wcGVkIiwidG90
YWxfbWVtb3J5Ijo4MTAyMDEyMzU0NTYsInN0YXJ0X3RpbWUiOjE3MDgxMDM0NTIsIm5ldHdvcmtf
aXBzIjpbIjE3Mi4zMS4zMy4zMSJdLCJncHVfaWRzIjpbIkdQVS0xMzJmMWRkYi1hNWY0LTIwZDMt
MTEyMi02ZDM1OTlhZmNmMWQiLCJHUFUtM2JjN2JmY2QtOTVhOS1hNjI2LThlOGYtMzdiOTcxOWFh
OWRkIiwiR1BVLWZlMTBhZDNkLTg4M2QtYTU5MC1kNDA1LWUwYTU2OTNiMGFmMCIsIkdQVS02Yjgy
OTY5OS0wYjRjLTEyZjAtMGMyOC04Y2Y0NmMyNGMxODUiXSwiYWNjZXB0c19mYWlsb3ZlciI6ImZh
bHNlIiwiaG9zdF9yb2xlIjoiZm9sbG93ZXIiLCJob3N0X3Rlcm0iOjAsImhvc3RfZWxlY3Rpb25f
c3RhdHVzIjoibGVhZGVyX2VsZWN0ZWQifSx7InZlcnNpb24iOjExMywiaG9zdF9udW1iZXIiOjIs
ImlkIjoiMTcyLjMxLjMzLjMyIiwiaG9zdG5hbWUiOiIzMDAtMzAzLXUzMi12MTAwIiwic3RhdHVz
IjoicnVubmluZyIsImh0dHBkX3N0YXR1cyI6ImRpc2FibGVkIiwic3RhdHNfc3RhdHVzIjoicnVu
bmluZyIsIm1sX3N0YXR1cyI6ImRpc2FibGVkIiwicXVlcnlfcGxhbm5lcl9zdGF0dXMiOiJzdG9w
cGVkIiwicmV2ZWFsX3N0YXR1cyI6InN0b3BwZWQiLCJ0b3RhbF9tZW1vcnkiOjgxMDIwMTIzMTM2
MCwic3RhcnRfdGltZSI6MTcwODEwMzQ1MiwibmV0d29ya19pcHMiOlsiMTcyLjMxLjMzLjMyIl0s
ImdwdV9pZHMiOlsiR1BVLTA5MGM0NDMwLWRiYmUtMWUxYS03ZjdmLWExODI3ODNhZDIzMSIsIkdQ
VS1iZTE0YjVjZS1iNDExLTQ4Y2EtYTlmZi01YTA2YzdhNmYzOTgiLCJHUFUtNGIxYTU2ODgtMGU4
Yy1jYzk0LTgzM2ItYzJmMzllOTk1M2I4IiwiR1BVLTEyZWJhNDYzLTgzMmUtMTA4Yi1lY2IyLTVj
OWFmOGRhNjE2NCJdLCJhY2NlcHRzX2ZhaWxvdmVyIjoiZmFsc2UiLCJob3N0X3JvbGUiOiJmb2xs
b3dlciIsImhvc3RfdGVybSI6MCwiaG9zdF9lbGVjdGlvbl9zdGF0dXMiOiJsZWFkZXJfZWxlY3Rl
ZCJ9LHsidmVyc2lvbiI6MTEzLCJob3N0X251bWJlciI6MywiaWQiOiIxNzIuMzEuMzMuMzMiLCJo
b3N0bmFtZSI6IjMwMC0zMDMtdTMzLXYxMDAiLCJzdGF0dXMiOiJydW5uaW5nIiwiaHR0cGRfc3Rh
dHVzIjoiZGlzYWJsZWQiLCJzdGF0c19zdGF0dXMiOiJydW5uaW5nIiwibWxfc3RhdHVzIjoiZGlz
YWJsZWQiLCJxdWVyeV9wbGFubmVyX3N0YXR1cyI6InN0b3BwZWQiLCJyZXZlYWxfc3RhdHVzIjoi
c3RvcHBlZCIsInRvdGFsX21lbW9yeSI6ODEwMjAxMjM5NTUyLCJzdGFydF90aW1lIjoxNzA4MTAz
NDUyLCJuZXR3b3JrX2lwcyI6WyIxNzIuMzEuMzMuMzMiXSwiZ3B1X2lkcyI6WyJHUFUtMjYzMzFh
MDctMTc1Ni1mMDY2LTFlNWEtMzc1M2Y1ZTViYzc4IiwiR1BVLWE0MWFhMTg3LTQ1NmQtNjBiMy04
ZmM5LWI4YjMzZWFlMjFiYyIsIkdQVS01NWRlZjYxOS0wMTE2LWViZjctMzMwMy03ZDkzMmRmYzcw
ZmYiLCJHUFUtNGM5YWYzODgtYjlmYi03MWQ5LWZiNDUtODMwYTM4MTIwMGQzIl0sImFjY2VwdHNf
ZmFpbG92ZXIiOiJmYWxzZSIsImhvc3Rfcm9sZSI6ImZvbGxvd2VyIiwiaG9zdF90ZXJtIjowLCJo
b3N0X2VsZWN0aW9uX3N0YXR1cyI6ImxlYWRlcl9lbGVjdGVkIn0seyJ2ZXJzaW9uIjoxMTMsImhv
c3RfbnVtYmVyIjo0LCJpZCI6IjE3Mi4zMS4zMy4zNCIsImhvc3RuYW1lIjoiMzAwLTMwMy11MzQt
djEwMCIsInN0YXR1cyI6InJ1bm5pbmciLCJodHRwZF9zdGF0dXMiOiJkaXNhYmxlZCIsInN0YXRz
X3N0YXR1cyI6InJ1bm5pbmciLCJtbF9zdGF0dXMiOiJkaXNhYmxlZCIsInF1ZXJ5X3BsYW5uZXJf
c3RhdHVzIjoic3RvcHBlZCIsInJldmVhbF9zdGF0dXMiOiJzdG9wcGVkIiwidG90YWxfbWVtb3J5
Ijo4MTAyMDEyMTA4ODAsInN0YXJ0X3RpbWUiOjE3MDgxMDM0NTIsIm5ldHdvcmtfaXBzIjpbIjE3
Mi4zMS4zMy4zNCJdLCJncHVfaWRzIjpbIkdQVS02OGUwOWNmYy1mOWE0LTJhMTQtNTdhNC05NDgz
YjkxYzJkOWEiLCJHUFUtM2RmMjE2ZTgtZmU3NC0wNDdhLTk1YWMtNzJlMmNiZWNiNTIyIiwiR1BV
LTE0ZDQ0Yjk4LWIwNDItY2I4MS0xZGQzLTIwZDRmNjljODljYSIsIkdQVS0zNDA2NzMwYi1iZWFk
LWM1MGEtNDZlYi1lMGEyYzJiZjZlNzYiXSwiYWNjZXB0c19mYWlsb3ZlciI6ImZhbHNlIiwiaG9z
dF9yb2xlIjoiZm9sbG93ZXIiLCJob3N0X3Rlcm0iOjAsImhvc3RfZWxlY3Rpb25fc3RhdHVzIjoi
bGVhZGVyX2VsZWN0ZWQifSx7InZlcnNpb24iOjExMywiaG9zdF9udW1iZXIiOjUsImlkIjoiMTcy
LjMxLjMzLjM1IiwiaG9zdG5hbWUiOiIzMDAtMzAzLXUzNS12MTAwIiwic3RhdHVzIjoicnVubmlu
ZyIsImh0dHBkX3N0YXR1cyI6ImRpc2FibGVkIiwic3RhdHNfc3RhdHVzIjoicnVubmluZyIsIm1s
X3N0YXR1cyI6ImRpc2FibGVkIiwicXVlcnlfcGxhbm5lcl9zdGF0dXMiOiJzdG9wcGVkIiwicmV2
ZWFsX3N0YXR1cyI6InN0b3BwZWQiLCJ0b3RhbF9tZW1vcnkiOjgxMDIwMTI0Nzc0NCwic3RhcnRf
dGltZSI6MTcwODEwMzQ1MiwibmV0d29ya19pcHMiOlsiMTcyLjMxLjMzLjM1Il0sImdwdV9pZHMi
OlsiR1BVLWY3MzAyYWJjLTllYWEtMTRjOS1mNDI2LTE2M2RmM2RhOGMyNiIsIkdQVS03N2RkN2Q0
OC1mOTgwLWZkMDYtNzIyYy0xYzViOTMyMTgyMDMiLCJHUFUtNTUyYjIwYTUtNTdlNi00OTg2LWJl
MmItMmIzNzhmZDRiY2FhIiwiR1BVLTYzMDUzMTYyLTMwN2YtNTVjNS1hOTc0LTU4ZGZlODQzNDJi
MiJdLCJhY2NlcHRzX2ZhaWxvdmVyIjoiZmFsc2UiLCJob3N0X3JvbGUiOiJmb2xsb3dlciIsImhv
c3RfdGVybSI6MCwiaG9zdF9lbGVjdGlvbl9zdGF0dXMiOiJsZWFkZXJfZWxlY3RlZCJ9LHsidmVy
c2lvbiI6MTEzLCJob3N0X251bWJlciI6NiwiaWQiOiIxNzIuMzEuMzMuMzYiLCJob3N0bmFtZSI6
IjMwMC0zMDMtdTM2LXYxMDAiLCJzdGF0dXMiOiJydW5uaW5nIiwiaHR0cGRfc3RhdHVzIjoiZGlz
YWJsZWQiLCJzdGF0c19zdGF0dXMiOiJydW5uaW5nIiwibWxfc3RhdHVzIjoiZGlzYWJsZWQiLCJx
dWVyeV9wbGFubmVyX3N0YXR1cyI6InN0b3BwZWQiLCJyZXZlYWxfc3RhdHVzIjoic3RvcHBlZCIs
InRvdGFsX21lbW9yeSI6ODEwMjAxMjIzMTY4LCJzdGFydF90aW1lIjoxNzA4MTAzNDUyLCJuZXR3
b3JrX2lwcyI6WyIxNzIuMzEuMzMuMzYiXSwiZ3B1X2lkcyI6WyJHUFUtOGU5ZDRhZGItYzYxMS04
MmYwLTNmZTQtZjFmMmMzZTZhNDRmIiwiR1BVLTRjNmFiYWM5LTc0ZWMtZjc4Yy1mYWE5LTQ3NDZk
MDA1N2FiOCIsIkdQVS05NjI1YTc4Yy1lMmFlLTdmMDktOWNjZi1lZTA0OTk1MjYzMTAiLCJHUFUt
YWI3NTRhM2MtNjA4Ni1iYjUxLWU5NGEtM2NmNGExNDkwNWJhIl0sImFjY2VwdHNfZmFpbG92ZXIi
OiJmYWxzZSIsImhvc3Rfcm9sZSI6ImZvbGxvd2VyIiwiaG9zdF90ZXJtIjowLCJob3N0X2VsZWN0
aW9uX3N0YXR1cyI6ImxlYWRlcl9lbGVjdGVkIn0seyJ2ZXJzaW9uIjoxMTMsImhvc3RfbnVtYmVy
Ijo3LCJpZCI6IjE3Mi4zMS4zMy4zNyIsImhvc3RuYW1lIjoiMzAwLTMwMy11MzctdjEwMCIsInN0
YXR1cyI6InJ1bm5pbmciLCJodHRwZF9zdGF0dXMiOiJkaXNhYmxlZCIsInN0YXRzX3N0YXR1cyI6
InJ1bm5pbmciLCJtbF9zdGF0dXMiOiJkaXNhYmxlZCIsInF1ZXJ5X3BsYW5uZXJfc3RhdHVzIjoi
c3RvcHBlZCIsInJldmVhbF9zdGF0dXMiOiJzdG9wcGVkIiwidG90YWxfbWVtb3J5Ijo4MTAyMDEy
MzU0NTYsInN0YXJ0X3RpbWUiOjE3MDgxMDM0NTIsIm5ldHdvcmtfaXBzIjpbIjE3Mi4zMS4zMy4z
NyJdLCJncHVfaWRzIjpbIkdQVS1lYTE4ZDM4OC1lMmIzLTQyODMtYTZiNS1hMzIzYTQ4NzI1YTki
LCJHUFUtZGNkODRlZmItOTRjYS1iNDk3LThjMTUtN2EzMjY5NDBjMWViIiwiR1BVLTkxNjAxYTVl
LTk3M2YtZDFlNC02ZTFkLWY2NTUyOTRkMzQ2MCIsIkdQVS0wN2ZhNGRiNi05ZTU1LWI2MWYtOTA5
YS04NWM0ZDFiZWIwODgiXSwiYWNjZXB0c19mYWlsb3ZlciI6ImZhbHNlIiwiaG9zdF9yb2xlIjoi
Zm9sbG93ZXIiLCJob3N0X3Rlcm0iOjAsImhvc3RfZWxlY3Rpb25fc3RhdHVzIjoibGVhZGVyX2Vs
ZWN0ZWQifV19Fmh0dHBfc2VydmVyoAN7ImNvbm5lY3Rpb25zIjp7ImN1cnJlbnQiOjEsIm1heF9j
b25jdXJyZW50IjoxMTYsInF1ZXVlZCI6MCwibWF4X3F1ZXVlZF9hbGxvd2VkIjo2NTUzNiwidG90
YWwiOjMyNTg4MiwicmVmdXNlZCI6MCwidGhyZWFkcyI6Mn0sInRocmVhZHMiOnsidXNlZCI6Miwi
Y2FwYWNpdHkiOjUxMiwiYWxsb2NhdGVkIjo4LCJhdmFpbGFibGUiOjUxMCwic3RhY2tfc2l6ZSI6
MH19FG1pZ3JhdGlvbnMueyJjb3VudCI6MCwic3RhdHVzIjpbXX0KcmFua3PgUnsiY291bnQiOjks
InN0YXR1cyI6W3sidmVyc2lvbiI6MTE4LCJyYW5rIjowLCJyYW5rX2lkIjoiMCA6IDE3Mi4zMS4z
My4zMCA6IDMyNzc0MDYiLCJyYW5rX21vZGUiOiJydW4iLCJyYW5rX3N0YXR1cyI6InJ1bm5pbmci
LCJyZW1vdmFsX3N0YXR1cyI6Im5vbmUiLCJhcHBfdmVyc2lvbiI6IjcuMi4wLjEuMjAyNDAyMTQy
MTA5MDYiLCJwaWQiOjMyNzc0MDYsInN0YXJ0X3RpbWUiOjE3MDg0NDM2NTYsInN0YXJ0X3RpbWVf
c3RyIjoiVHVlIEZlYiAyMCAxNTo0MDo1NiAyMDI0Iiwic3RhcnRfY291bnQiOjEsImFjY2VwdGlu
Z19qb2JzIjp0cnVlLCJuZXdfcmFuayI6ZmFsc2UsInJlYWRfb25seSI6ZmFsc2UsImhvc3RuYW1l
IjoiMzAwLTMwMy11MzAtdjEwMCIsImhvc3RfaWQiOiIxNzIuMzEuMzMuMzAiLCJncHVfaWRzIjpb
IkdQVS1hMDdjMGU0OC05NDhhLTQwNWMtNDNiNy03Mzc0ZDJkZGMwOGMiXSwiZ3B1X2luZGV4Ijow
LCJncHVfaW5kaWNlcyI6WzBdfSx7InZlcnNpb24iOjEwMiwicmFuayI6MSwicmFua19pZCI6IjEg
OiAxNzIuMzEuMzMuMzAgOiAzMjc4NTYwIiwicmFua19tb2RlIjoicnVuIiwicmFua19zdGF0dXMi
OiJydW5uaW5nIiwicmVtb3ZhbF9zdGF0dXMiOiJub25lIiwiYXBwX3ZlcnNpb24iOiI3LjIuMC4x
LjIwMjQwMjE0MjEwOTA2IiwicGlkIjozMjc4NTYwLCJzdGFydF90aW1lIjoxNzA4NDQzNjU2LCJz
dGFydF90aW1lX3N0ciI6IlR1ZSBGZWIgMjAgMTU6NDA6NTYgMjAyNCIsInN0YXJ0X2NvdW50Ijox
LCJhY2NlcHRpbmdfam9icyI6dHJ1ZSwibmV3X3JhbmsiOmZhbHNlLCJyZWFkX29ubHkiOmZhbHNl
LCJob3N0bmFtZSI6IjMwMC0zMDMtdTMwLXYxMDAiLCJob3N0X2lkIjoiMTcyLjMxLjMzLjMwIiwi
Z3B1X2lkcyI6WyJHUFUtYTA3YzBlNDgtOTQ4YS00MDVjLTQzYjctNzM3NGQyZGRjMDhjIiwiR1BV
LTUzZTFiN2I0LTY4YjgtYTI4Yy0xMGYzLWRiNmEwNTk3ZmJiNCIsIkdQVS00MWZmOTFmYi1lY2I5
LTBhNWItZDQ3Yy1kNWJiN2VmMTAzOGEiLCJHUFUtYTk0MGIxMjYtYjFhNC05ZjA3LTQ0ZTEtNjM2
YzM3OTg5ZWNmIl0sImdwdV9pbmRleCI6MCwiZ3B1X2luZGljZXMiOlswLDEsMiwzXX0seyJ2ZXJz
aW9uIjoxMDIsInJhbmsiOjIsInJhbmtfaWQiOiIyIDogMTcyLjMxLjMzLjMxIDogMTU0NDUxOSIs
InJhbmtfbW9kZSI6InJ1biIsInJhbmtfc3RhdHVzIjoicnVubmluZyIsInJlbW92YWxfc3RhdHVz
Ijoibm9uZSIsImFwcF92ZXJzaW9uIjoiNy4yLjAuMS4yMDI0MDIxNDIxMDkwNiIsInBpZCI6MTU0
NDUxOSwic3RhcnRfdGltZSI6MTcwODQ0MzY1Niwic3RhcnRfdGltZV9zdHIiOiJUdWUgRmViIDIw
IDE1OjQwOjU2IDIwMjQiLCJzdGFydF9jb3VudCI6MSwiYWNjZXB0aW5nX2pvYnMiOnRydWUsIm5l
d19yYW5rIjpmYWxzZSwicmVhZF9vbmx5IjpmYWxzZSwiaG9zdG5hbWUiOiIzMDAtMzAzLXUzMS12
MTAwIiwiaG9zdF9pZCI6IjE3Mi4zMS4zMy4zMSIsImdwdV9pZHMiOlsiR1BVLTEzMmYxZGRiLWE1
ZjQtMjBkMy0xMTIyLTZkMzU5OWFmY2YxZCIsIkdQVS0zYmM3YmZjZC05NWE5LWE2MjYtOGU4Zi0z
N2I5NzE5YWE5ZGQiLCJHUFUtZmUxMGFkM2QtODgzZC1hNTkwLWQ0MDUtZTBhNTY5M2IwYWYwIiwi
R1BVLTZiODI5Njk5LTBiNGMtMTJmMC0wYzI4LThjZjQ2YzI0YzE4NSJdLCJncHVfaW5kZXgiOjAs
ImdwdV9pbmRpY2VzIjpbMCwxLDIsM119LHsidmVyc2lvbiI6MTAwLCJyYW5rIjozLCJyYW5rX2lk
IjoiMyA6IDE3Mi4zMS4zMy4zMiA6IDE0Mjk2ODUiLCJyYW5rX21vZGUiOiJydW4iLCJyYW5rX3N0
YXR1cyI6InJ1bm5pbmciLCJyZW1vdmFsX3N0YXR1cyI6Im5vbmUiLCJhcHBfdmVyc2lvbiI6Ijcu
Mi4wLjEuMjAyNDAyMTQyMTA5MDYiLCJwaWQiOjE0Mjk2ODUsInN0YXJ0X3RpbWUiOjE3MDg0NDM2
NTYsInN0YXJ0X3RpbWVfc3RyIjoiVHVlIEZlYiAyMCAxNTo0MDo1NiAyMDI0Iiwic3RhcnRfY291
bnQiOjEsImFjY2VwdGluZ19qb2JzIjp0cnVlLCJuZXdfcmFuayI6ZmFsc2UsInJlYWRfb25seSI6
ZmFsc2UsImhvc3RuYW1lIjoiMzAwLTMwMy11MzItdjEwMCIsImhvc3RfaWQiOiIxNzIuMzEuMzMu
MzIiLCJncHVfaWRzIjpbIkdQVS0wOTBjNDQzMC1kYmJlLTFlMWEtN2Y3Zi1hMTgyNzgzYWQyMzEi
LCJHUFUtYmUxNGI1Y2UtYjQxMS00OGNhLWE5ZmYtNWEwNmM3YTZmMzk4IiwiR1BVLTRiMWE1Njg4
LTBlOGMtY2M5NC04MzNiLWMyZjM5ZTk5NTNiOCIsIkdQVS0xMmViYTQ2My04MzJlLTEwOGItZWNi
Mi01YzlhZjhkYTYxNjQiXSwiZ3B1X2luZGV4IjowLCJncHVfaW5kaWNlcyI6WzAsMSwyLDNdfSx7
InZlcnNpb24iOjExNiwicmFuayI6NCwicmFua19pZCI6IjQgOiAxNzIuMzEuMzMuMzMgOiAxNjA4
NzYxIiwicmFua19tb2RlIjoicnVuIiwicmFua19zdGF0dXMiOiJydW5uaW5nIiwicmVtb3ZhbF9z
dGF0dXMiOiJub25lIiwiYXBwX3ZlcnNpb24iOiI3LjIuMC4xLjIwMjQwMjE0MjEwOTA2IiwicGlk
IjoxNjA4NzYxLCJzdGFydF90aW1lIjoxNzA4NDQzNjU1LCJzdGFydF90aW1lX3N0ciI6IlR1ZSBG
ZWIgMjAgMTU6NDA6NTUgMjAyNCIsInN0YXJ0X2NvdW50IjoxLCJhY2NlcHRpbmdfam9icyI6dHJ1
ZSwibmV3X3JhbmsiOmZhbHNlLCJyZWFkX29ubHkiOmZhbHNlLCJob3N0bmFtZSI6IjMwMC0zMDMt
dTMzLXYxMDAiLCJob3N0X2lkIjoiMTcyLjMxLjMzLjMzIiwiZ3B1X2lkcyI6WyJHUFUtMjYzMzFh
MDctMTc1Ni1mMDY2LTFlNWEtMzc1M2Y1ZTViYzc4IiwiR1BVLWE0MWFhMTg3LTQ1NmQtNjBiMy04
ZmM5LWI4YjMzZWFlMjFiYyIsIkdQVS01NWRlZjYxOS0wMTE2LWViZjctMzMwMy03ZDkzMmRmYzcw
ZmYiLCJHUFUtNGM5YWYzODgtYjlmYi03MWQ5LWZiNDUtODMwYTM4MTIwMGQzIl0sImdwdV9pbmRl
eCI6MCwiZ3B1X2luZGljZXMiOlswLDEsMiwzXX0seyJ2ZXJzaW9uIjoxMDIsInJhbmsiOjUsInJh
bmtfaWQiOiI1IDogMTcyLjMxLjMzLjM0IDogMTY2MDEwMiIsInJhbmtfbW9kZSI6InJ1biIsInJh
bmtfc3RhdHVzIjoicnVubmluZyIsInJlbW92YWxfc3RhdHVzIjoibm9uZSIsImFwcF92ZXJzaW9u
IjoiNy4yLjAuMS4yMDI0MDIxNDIxMDkwNiIsInBpZCI6MTY2MDEwMiwic3RhcnRfdGltZSI6MTcw
ODQ0MzY1Niwic3RhcnRfdGltZV9zdHIiOiJUdWUgRmViIDIwIDE1OjQwOjU2IDIwMjQiLCJzdGFy
dF9jb3VudCI6MSwiYWNjZXB0aW5nX2pvYnMiOnRydWUsIm5ld19yYW5rIjpmYWxzZSwicmVhZF9v
bmx5IjpmYWxzZSwiaG9zdG5hbWUiOiIzMDAtMzAzLXUzNC12MTAwIiwiaG9zdF9pZCI6IjE3Mi4z
MS4zMy4zNCIsImdwdV9pZHMiOlsiR1BVLTY4ZTA5Y2ZjLWY5YTQtMmExNC01N2E0LTk0ODNiOTFj
MmQ5YSIsIkdQVS0zZGYyMTZlOC1mZTc0LTA0N2EtOTVhYy03MmUyY2JlY2I1MjIiLCJHUFUtMTRk
NDRiOTgtYjA0Mi1jYjgxLTFkZDMtMjBkNGY2OWM4OWNhIiwiR1BVLTM0MDY3MzBiLWJlYWQtYzUw
YS00NmViLWUwYTJjMmJmNmU3NiJdLCJncHVfaW5kZXgiOjAsImdwdV9pbmRpY2VzIjpbMCwxLDIs
M119LHsidmVyc2lvbiI6MTAyLCJyYW5rIjo2LCJyYW5rX2lkIjoiNiA6IDE3Mi4zMS4zMy4zNSA6
IDEzNzk3MTUiLCJyYW5rX21vZGUiOiJydW4iLCJyYW5rX3N0YXR1cyI6InJ1bm5pbmciLCJyZW1v
dmFsX3N0YXR1cyI6Im5vbmUiLCJhcHBfdmVyc2lvbiI6IjcuMi4wLjEuMjAyNDAyMTQyMTA5MDYi
LCJwaWQiOjEzNzk3MTUsInN0YXJ0X3RpbWUiOjE3MDg0NDM2NTYsInN0YXJ0X3RpbWVfc3RyIjoi
VHVlIEZlYiAyMCAxNTo0MDo1NiAyMDI0Iiwic3RhcnRfY291bnQiOjEsImFjY2VwdGluZ19qb2Jz
Ijp0cnVlLCJuZXdfcmFuayI6ZmFsc2UsInJlYWRfb25seSI6ZmFsc2UsImhvc3RuYW1lIjoiMzAw
LTMwMy11MzUtdjEwMCIsImhvc3RfaWQiOiIxNzIuMzEuMzMuMzUiLCJncHVfaWRzIjpbIkdQVS1m
NzMwMmFiYy05ZWFhLTE0YzktZjQyNi0xNjNkZjNkYThjMjYiLCJHUFUtNzdkZDdkNDgtZjk4MC1m
ZDA2LTcyMmMtMWM1YjkzMjE4MjAzIiwiR1BVLTU1MmIyMGE1LTU3ZTYtNDk4Ni1iZTJiLTJiMzc4
ZmQ0YmNhYSIsIkdQVS02MzA1MzE2Mi0zMDdmLTU1YzUtYTk3NC01OGRmZTg0MzQyYjIiXSwiZ3B1
X2luZGV4IjowLCJncHVfaW5kaWNlcyI6WzAsMSwyLDNdfSx7InZlcnNpb24iOjEwMiwicmFuayI6
NywicmFua19pZCI6IjcgOiAxNzIuMzEuMzMuMzYgOiAxMzc5NTM5IiwicmFua19tb2RlIjoicnVu
IiwicmFua19zdGF0dXMiOiJydW5uaW5nIiwicmVtb3ZhbF9zdGF0dXMiOiJub25lIiwiYXBwX3Zl
cnNpb24iOiI3LjIuMC4xLjIwMjQwMjE0MjEwOTA2IiwicGlkIjoxMzc5NTM5LCJzdGFydF90aW1l
IjoxNzA4NDQzNjU2LCJzdGFydF90aW1lX3N0ciI6IlR1ZSBGZWIgMjAgMTU6NDA6NTYgMjAyNCIs
InN0YXJ0X2NvdW50IjoxLCJhY2NlcHRpbmdfam9icyI6dHJ1ZSwibmV3X3JhbmsiOmZhbHNlLCJy
ZWFkX29ubHkiOmZhbHNlLCJob3N0bmFtZSI6IjMwMC0zMDMtdTM2LXYxMDAiLCJob3N0X2lkIjoi
MTcyLjMxLjMzLjM2IiwiZ3B1X2lkcyI6WyJHUFUtOGU5ZDRhZGItYzYxMS04MmYwLTNmZTQtZjFm
MmMzZTZhNDRmIiwiR1BVLTRjNmFiYWM5LTc0ZWMtZjc4Yy1mYWE5LTQ3NDZkMDA1N2FiOCIsIkdQ
VS05NjI1YTc4Yy1lMmFlLTdmMDktOWNjZi1lZTA0OTk1MjYzMTAiLCJHUFUtYWI3NTRhM2MtNjA4
Ni1iYjUxLWU5NGEtM2NmNGExNDkwNWJhIl0sImdwdV9pbmRleCI6MCwiZ3B1X2luZGljZXMiOlsw
LDEsMiwzXX0seyJ2ZXJzaW9uIjoxMDIsInJhbmsiOjgsInJhbmtfaWQiOiI4IDogMTcyLjMxLjMz
LjM3IDogMTM5Nzg5MyIsInJhbmtfbW9kZSI6InJ1biIsInJhbmtfc3RhdHVzIjoicnVubmluZyIs
InJlbW92YWxfc3RhdHVzIjoibm9uZSIsImFwcF92ZXJzaW9uIjoiNy4yLjAuMS4yMDI0MDIxNDIx
MDkwNiIsInBpZCI6MTM5Nzg5Mywic3RhcnRfdGltZSI6MTcwODQ0MzY1Niwic3RhcnRfdGltZV9z
dHIiOiJUdWUgRmViIDIwIDE1OjQwOjU2IDIwMjQiLCJzdGFydF9jb3VudCI6MSwiYWNjZXB0aW5n
X2pvYnMiOnRydWUsIm5ld19yYW5rIjpmYWxzZSwicmVhZF9vbmx5IjpmYWxzZSwiaG9zdG5hbWUi
OiIzMDAtMzAzLXUzNy12MTAwIiwiaG9zdF9pZCI6IjE3Mi4zMS4zMy4zNyIsImdwdV9pZHMiOlsi
R1BVLWVhMThkMzg4LWUyYjMtNDI4My1hNmI1LWEzMjNhNDg3MjVhOSIsIkdQVS1kY2Q4NGVmYi05
NGNhLWI0OTctOGMxNS03YTMyNjk0MGMxZWIiLCJHUFUtOTE2MDFhNWUtOTczZi1kMWU0LTZlMWQt
ZjY1NTI5NGQzNDYwIiwiR1BVLTA3ZmE0ZGI2LTllNTUtYjYxZi05MDlhLTg1YzRkMWJlYjA4OCJd
LCJncHVfaW5kZXgiOjAsImdwdV9pbmRpY2VzIjpbMCwxLDIsM119XX0Oc3ltYm9scxZ7ImNvdW50
IjoxfQxzeXN0ZW2qA3siaWQiOiJLaW5ldGljYSAzMDAtMzAzLXUzMC12MTAwIiwic3RhcnRfdGlt
ZSI6MTcwODEwMzQ1Miwic3RhdHVzIjoicnVubmluZyIsImNsdXN0ZXJfbGVhZGVyIjoiMTcyLjMx
LjMzLjMwIiwidmVyc2lvbiI6MjUsImNsdXN0ZXJfb3BlcmF0aW9uX3J1bm5pbmciOiJmYWxzZSIs
ImNsdXN0ZXJfb3BlcmF0aW9uX3N0YXR1cyI6IiIsIm9mZmxpbmVfc3RhdHVzIjoiZmFsc2UifQh0
ZXh03AV7ImNvdW50Ijo4LCJzdGF0dXMiOlt7InZlcnNpb24iOjMzLCJyYW5rIjoxLCJzdGF0dXMi
OiJydW5uaW5nIn0seyJ2ZXJzaW9uIjozMywicmFuayI6Miwic3RhdHVzIjoicnVubmluZyJ9LHsi
dmVyc2lvbiI6MzMsInJhbmsiOjMsInN0YXR1cyI6InJ1bm5pbmcifSx7InZlcnNpb24iOjMzLCJy
YW5rIjo0LCJzdGF0dXMiOiJydW5uaW5nIn0seyJ2ZXJzaW9uIjozMywicmFuayI6NSwic3RhdHVz
IjoicnVubmluZyJ9LHsidmVyc2lvbiI6MzMsInJhbmsiOjYsInN0YXR1cyI6InJ1bm5pbmcifSx7
InZlcnNpb24iOjMzLCJyYW5rIjo3LCJzdGF0dXMiOiJydW5uaW5nIn0seyJ2ZXJzaW9uIjozMywi
cmFuayI6OCwic3RhdHVzIjoicnVubmluZyJ9XX0QdHJpZ2dlcnNeeyJ0b3RhbF9jb3VudCI6MCwi
cmFuZ2VfY291bnQiOjAsIm5haV9jb3VudCI6MH0AAAA=
headers:
Access-Control-Allow-Origin:
- '*'
Access-Control-Expose-Headers:
- x-request-time-secs
Connection:
- Close
Content-Type:
- application/octet-stream
Date:
- Thu, 22 Feb 2024 00:26:34 GMT
Strict-Transport-Security:
- max-age=31536000; includeSubDomains
X-Content-Type-Options:
- nosniff
X-Frame-Options:
- SAMEORIGIN
X-XSS-Protection:
- 1; mode=block
x-request-time-secs:
- '0.00067'
status:
code: 200
message: OK
- request:
body: "\0"
headers:
Accept:
- application/octet-stream
Content-type:
- application/octet-stream
authorization:
- DUMMY
method: POST
uri: http://172.31.33.30:9191/show/system/properties
response:
body:
string: !!binary |
BE9LAD5zaG93X3N5c3RlbV9wcm9wZXJ0aWVzX3Jlc3BvbnNlvPUCpAk8Y29uZi5haS5hcGkuY29u
bmVjdGlvbl90aW1lb3V0BDkwHmNvbmYuYWkuYXBpLmtleQAoY29uZi5haS5hcGkucHJvdmlkZXIW
a2luZXRpY2FsbG0eY29uZi5haS5hcGkudXJsSGh0dHA6Ly8xNzIuMzEuMzEuMTM6ODA1MC9zcWwv
c3VnZ2VzdDBjb25mLmFsZXJ0X2Rpc2tfYWJzb2x1dGUANGNvbmYuYWxlcnRfZGlza19wZXJjZW50
YWdlGDEsIDUsIDEwLCAyMBxjb25mLmFsZXJ0X2V4ZQAsY29uZi5hbGVydF9ob3N0X3N0YXR1cwhU
UlVFOmNvbmYuYWxlcnRfaG9zdF9zdGF0dXNfZmlsdGVyIGZhdGFsX2luaXRfZXJyb3I4Y29uZi5h
bGVydF9tYXhfc3RvcmVkX2FsZXJ0cwYxMDA0Y29uZi5hbGVydF9tZW1vcnlfYWJzb2x1dGUAOGNv
bmYuYWxlcnRfbWVtb3J5X3BlcmNlbnRhZ2UYMSwgNSwgMTAsIDIwNGNvbmYuYWxlcnRfcmFua19j
dWRhX2Vycm9yCFRSVUVEY29uZi5hbGVydF9yYW5rX2ZhbGxiYWNrX2FsbG9jYXRvcghUUlVFLGNv
bmYuYWxlcnRfcmFua19zdGF0dXMIVFJVRTpjb25mLmFsZXJ0X3Jhbmtfc3RhdHVzX2ZpbHRlclhm
YXRhbF9pbml0X2Vycm9yLCBub3RfcmVzcG9uZGluZywgdGVybWluYXRlZB5jb25mLmF1ZGl0X2Jv
ZHkKRkFMU0UeY29uZi5hdWRpdF9kYXRhCkZBTFNFJGNvbmYuYXVkaXRfaGVhZGVycwpGQUxTRT5j
b25mLmF1dG9fY3JlYXRlX2V4dGVybmFsX3VzZXJzCkZBTFNFTGNvbmYuYnVpbGRfbWF0ZXJpYWxp
emVkX3ZpZXdzX29uX3N0YXJ0Em9uX2RlbWFuZDhjb25mLmJ1aWxkX3BrX2luZGV4X29uX3N0YXJ0
Em9uX2RlbWFuZDhjb25mLmNodW5rX2NvbHVtbl9tYXhfbWVtb3J5EjUxMjAwMDAwMCpjb25mLmNo
dW5rX21heF9tZW1vcnkUODE5MjAwMDAwMB5jb25mLmNodW5rX3NpemUOODAwMDAwMCJjb25mLmNs
dXN0ZXJfbmFtZRxwcm9kdWN0aW9uLW5ld0Bjb25mLmNvbmN1cnJlbnRfa2VybmVsX2V4ZWN1dGlv
bghUUlVFOmNvbmYuZGVmYXVsdF9wcmltYXJ5X2tleV90eXBlDG1lbW9yeSBjb25mLmRlZmF1bHRf
dHRsBDIwLGNvbmYuZGlzYWJsZV9jbGVhcl9hbGwIVFJVRT5jb25mLmVncmVzc19wYXJxdWV0X2Nv
bXByZXNzaW9uDHNuYXBweSRjb25mLmVuYWJsZV9hbGVydHMIVFJVRSJjb25mLmVuYWJsZV9hdWRp
dApGQUxTRTJjb25mLmVuYWJsZV9hdXRob3JpemF0aW9uCFRSVUVGY29uZi5lbmFibGVfZXh0ZXJu
YWxfYXV0aGVudGljYXRpb24KRkFMU0UwY29uZi5lbmFibGVfZ3JhcGhfc2VydmVyCFRSVUUcY29u
Zi5lbmFibGVfaGEKRkFMU0UuY29uZi5lbmFibGVfaHR0cGRfcHJveHkKRkFMU0UcY29uZi5lbmFi
bGVfbWwKRkFMU0U2Y29uZi5lbmFibGVfb3BlbmdsX3JlbmRlcmVyCFRSVUVAY29uZi5lbmFibGVf
b3ZlcmxhcHBlZF9lcXVpX2pvaW4IVFJVRTRjb25mLmVuYWJsZV9wb3N0Z3Jlc19wcm94eQhUUlVF
PmNvbmYuZW5hYmxlX3ByZWRpY2F0ZV9lcXVpX2pvaW4IVFJVRSJjb25mLmVuYWJsZV9wcm9jcwhU
UlVFJGNvbmYuZW5hYmxlX3JldmVhbAhUUlVFMGNvbmYuZW5hYmxlX3N0YXRzX3NlcnZlcghUUlVF
LmNvbmYuZW5hYmxlX3RleHRfc2VhcmNoCFRSVUU8Y29uZi5lbmFibGVfdmVjdG9ydGlsZV9zZXJ2
aWNlCFRSVUUsY29uZi5lbmFibGVfdnJhbV9jYWNoZQhUUlVFPmNvbmYuZW5hYmxlX3dvcmtlcl9o
dHRwX3NlcnZlcnMIVFJVRTJjb25mLmV2ZW50X3NlcnZlcl9hZGRyZXNzGDE3Mi4zMS4zMy4zMDRj
b25mLmV2ZW50X3NlcnZlcl9pbnRlcm5hbApGQUxTRTpjb25mLmV4dGVybmFsX2ZpbGVzX2RpcmVj
dG9yeSAvbmZzL2RhdGEvcHVibGljFGNvbmYuZ21faXAYMTcyLjMxLjMzLjMwGmNvbmYuZ21fcG9y
dDEINTU1MiBjb25mLmdtX3B1Yl9wb3J0CDU1NTMoY29uZi5ncmFwaC5oZWFkX3BvcnQIODEwMC5j
b25mLmdyYXBoLnNlcnZlcjAuaG9zdApob3N0MC5jb25mLmdyYXBoLnNlcnZlcjAucG9ydAg4MTAx
OGNvbmYuZ3JhcGguc2VydmVyMC5yYW1fbGltaXQCMC5jb25mLmdyYXBoLnNlcnZlcjEuaG9zdApo
b3N0MS5jb25mLmdyYXBoLnNlcnZlcjEucG9ydAg4MTAyOGNvbmYuZ3JhcGguc2VydmVyMS5yYW1f
bGltaXQCMC5jb25mLmdyYXBoLnNlcnZlcjIuaG9zdApob3N0Mi5jb25mLmdyYXBoLnNlcnZlcjIu
cG9ydAg4MTAzOGNvbmYuZ3JhcGguc2VydmVyMi5yYW1fbGltaXQCMC5jb25mLmdyYXBoLnNlcnZl
cjMuaG9zdApob3N0My5jb25mLmdyYXBoLnNlcnZlcjMucG9ydAg4MTA0OGNvbmYuZ3JhcGguc2Vy
dmVyMy5yYW1fbGltaXQCMC5jb25mLmdyYXBoLnNlcnZlcjQuaG9zdApob3N0NC5jb25mLmdyYXBo
LnNlcnZlcjQucG9ydAg4MTA1OGNvbmYuZ3JhcGguc2VydmVyNC5yYW1fbGltaXQCMC5jb25mLmdy
YXBoLnNlcnZlcjUuaG9zdApob3N0NS5jb25mLmdyYXBoLnNlcnZlcjUucG9ydAg4MTA2OGNvbmYu
Z3JhcGguc2VydmVyNS5yYW1fbGltaXQCMC5jb25mLmdyYXBoLnNlcnZlcjYuaG9zdApob3N0Ni5j
b25mLmdyYXBoLnNlcnZlcjYucG9ydAg4MTA3OGNvbmYuZ3JhcGguc2VydmVyNi5yYW1fbGltaXQC
MC5jb25mLmdyYXBoLnNlcnZlcjcuaG9zdApob3N0Ny5jb25mLmdyYXBoLnNlcnZlcjcucG9ydAg4
MTA4OGNvbmYuZ3JhcGguc2VydmVyNy5yYW1fbGltaXQCMBxjb25mLmhhX3F1ZXVlcwAuY29uZi5o
YV9yaW5nX2hlYWRfbm9kZXMAKGNvbmYuaGVhZF9pcF9hZGRyZXNzGDE3Mi4zMS4zMy4zMBxjb25m
LmhlYWRfcG9ydAg5MTkxImNvbmYuaG1faHR0cF9wb3J0CDkzMDA2Y29uZi5ob3N0MF9hY2NlcHRz
X2ZhaWxvdmVyCkZBTFNFJGNvbmYuaG9zdDBfYWRkcmVzcxgxNzIuMzEuMzMuMzAeY29uZi5ob3N0
MF9ncHVzDjAsMSwyLDNEY29uZi5ob3N0MF9ob3N0X21hbmFnZXJfcHVibGljX3VybDBodHRwOi8v
MTcyLjMxLjMzLjMwOjkzMDAsY29uZi5ob3N0MF9wcml2YXRlX3VybCZodHRwOi8vMTcyLjMxLjMz
LjMwMmNvbmYuaG9zdDBfcHVibGljX2FkZHJlc3MYMTcyLjMxLjMzLjMwLGNvbmYuaG9zdDBfcHVi
bGljX3VybHMmaHR0cDovLzE3Mi4zMS4zMy4zMChjb25mLmhvc3QwX3JhbV9saW1pdBg2NTY3Njc3
OTk5OTk2Y29uZi5ob3N0MV9hY2NlcHRzX2ZhaWxvdmVyCkZBTFNFJGNvbmYuaG9zdDFfYWRkcmVz
cxgxNzIuMzEuMzMuMzEeY29uZi5ob3N0MV9ncHVzDjAsMSwyLDNEY29uZi5ob3N0MV9ob3N0X21h
bmFnZXJfcHVibGljX3VybDBodHRwOi8vMTcyLjMxLjMzLjMxOjkzMDAsY29uZi5ob3N0MV9wcml2
YXRlX3VybCZodHRwOi8vMTcyLjMxLjMzLjMxMmNvbmYuaG9zdDFfcHVibGljX2FkZHJlc3MYMTcy
LjMxLjMzLjMxLGNvbmYuaG9zdDFfcHVibGljX3VybHMmaHR0cDovLzE3Mi4zMS4zMy4zMShjb25m
Lmhvc3QxX3JhbV9saW1pdBg2NTY3Njc3OTk5OTk2Y29uZi5ob3N0Ml9hY2NlcHRzX2ZhaWxvdmVy
CkZBTFNFJGNvbmYuaG9zdDJfYWRkcmVzcxgxNzIuMzEuMzMuMzIeY29uZi5ob3N0Ml9ncHVzDjAs
MSwyLDNEY29uZi5ob3N0Ml9ob3N0X21hbmFnZXJfcHVibGljX3VybDBodHRwOi8vMTcyLjMxLjMz
LjMyOjkzMDAsY29uZi5ob3N0Ml9wcml2YXRlX3VybCZodHRwOi8vMTcyLjMxLjMzLjMyMmNvbmYu
aG9zdDJfcHVibGljX2FkZHJlc3MYMTcyLjMxLjMzLjMyLGNvbmYuaG9zdDJfcHVibGljX3VybHMm
aHR0cDovLzE3Mi4zMS4zMy4zMihjb25mLmhvc3QyX3JhbV9saW1pdBg2NTY3Njc3OTk5OTk2Y29u
Zi5ob3N0M19hY2NlcHRzX2ZhaWxvdmVyCkZBTFNFJGNvbmYuaG9zdDNfYWRkcmVzcxgxNzIuMzEu
MzMuMzMeY29uZi5ob3N0M19ncHVzDjAsMSwyLDNEY29uZi5ob3N0M19ob3N0X21hbmFnZXJfcHVi
bGljX3VybDBodHRwOi8vMTcyLjMxLjMzLjMzOjkzMDAsY29uZi5ob3N0M19wcml2YXRlX3VybCZo
dHRwOi8vMTcyLjMxLjMzLjMzMmNvbmYuaG9zdDNfcHVibGljX2FkZHJlc3MYMTcyLjMxLjMzLjMz
LGNvbmYuaG9zdDNfcHVibGljX3VybHMmaHR0cDovLzE3Mi4zMS4zMy4zMyhjb25mLmhvc3QzX3Jh
bV9saW1pdBg2NTY3Njc3OTk5OTk2Y29uZi5ob3N0NF9hY2NlcHRzX2ZhaWxvdmVyCkZBTFNFJGNv
bmYuaG9zdDRfYWRkcmVzcxgxNzIuMzEuMzMuMzQeY29uZi5ob3N0NF9ncHVzDjAsMSwyLDNEY29u
Zi5ob3N0NF9ob3N0X21hbmFnZXJfcHVibGljX3VybDBodHRwOi8vMTcyLjMxLjMzLjM0OjkzMDAs
Y29uZi5ob3N0NF9wcml2YXRlX3VybCZodHRwOi8vMTcyLjMxLjMzLjM0MmNvbmYuaG9zdDRfcHVi
bGljX2FkZHJlc3MYMTcyLjMxLjMzLjM0LGNvbmYuaG9zdDRfcHVibGljX3VybHMmaHR0cDovLzE3
Mi4zMS4zMy4zNChjb25mLmhvc3Q0X3JhbV9saW1pdBg2NTY3Njc3OTk5OTk2Y29uZi5ob3N0NV9h
Y2NlcHRzX2ZhaWxvdmVyCkZBTFNFJGNvbmYuaG9zdDVfYWRkcmVzcxgxNzIuMzEuMzMuMzUeY29u
Zi5ob3N0NV9ncHVzDjAsMSwyLDNEY29uZi5ob3N0NV9ob3N0X21hbmFnZXJfcHVibGljX3VybDBo
dHRwOi8vMTcyLjMxLjMzLjM1OjkzMDAsY29uZi5ob3N0NV9wcml2YXRlX3VybCZodHRwOi8vMTcy
LjMxLjMzLjM1MmNvbmYuaG9zdDVfcHVibGljX2FkZHJlc3MYMTcyLjMxLjMzLjM1LGNvbmYuaG9z
dDVfcHVibGljX3VybHMmaHR0cDovLzE3Mi4zMS4zMy4zNShjb25mLmhvc3Q1X3JhbV9saW1pdBg2
NTY3Njc3OTk5OTk2Y29uZi5ob3N0Nl9hY2NlcHRzX2ZhaWxvdmVyCkZBTFNFJGNvbmYuaG9zdDZf
YWRkcmVzcxgxNzIuMzEuMzMuMzYeY29uZi5ob3N0Nl9ncHVzDjAsMSwyLDNEY29uZi5ob3N0Nl9o
b3N0X21hbmFnZXJfcHVibGljX3VybDBodHRwOi8vMTcyLjMxLjMzLjM2OjkzMDAsY29uZi5ob3N0
Nl9wcml2YXRlX3VybCZodHRwOi8vMTcyLjMxLjMzLjM2MmNvbmYuaG9zdDZfcHVibGljX2FkZHJl
c3MYMTcyLjMxLjMzLjM2LGNvbmYuaG9zdDZfcHVibGljX3VybHMmaHR0cDovLzE3Mi4zMS4zMy4z
Nihjb25mLmhvc3Q2X3JhbV9saW1pdBg2NTY3Njc3OTk5OTk2Y29uZi5ob3N0N19hY2NlcHRzX2Zh
aWxvdmVyCkZBTFNFJGNvbmYuaG9zdDdfYWRkcmVzcxgxNzIuMzEuMzMuMzceY29uZi5ob3N0N19n
cHVzDjAsMSwyLDNEY29uZi5ob3N0N19ob3N0X21hbmFnZXJfcHVibGljX3VybDBodHRwOi8vMTcy
LjMxLjMzLjM3OjkzMDAsY29uZi5ob3N0N19wcml2YXRlX3VybCZodHRwOi8vMTcyLjMxLjMzLjM3
MmNvbmYuaG9zdDdfcHVibGljX2FkZHJlc3MYMTcyLjMxLjMzLjM3LGNvbmYuaG9zdDdfcHVibGlj
X3VybHMmaHR0cDovLzE3Mi4zMS4zMy4zNyhjb25mLmhvc3Q3X3JhbV9saW1pdBg2NTY3Njc3OTk5
OTkqY29uZi5odHRwZF9wcm94eV9wb3J0CDgwODI0Y29uZi5odHRwZF9wcm94eV91c2VfaHR0cHMK
RkFMU0U4Y29uZi5pbml0X3dpdGhfbm93X2F0X3dvcmtlcgpGQUxTRSpjb25mLmthZmthLmJhdGNo
X3NpemUIMTAwMC5jb25mLmthZmthLnBvbGxfdGltZW91dAIwKGNvbmYua2Fma2Eud2FpdF90aW1l
BDMwLmNvbmYua2VybmVsX29tcF90aHJlYWRzAjQ0Y29uZi5sb2FkX3ZlY3RvcnNfb25fc3RhcnQS
b25fZGVtYW5kHmNvbmYubG9ja19hdWRpdApGQUxTRTZjb25mLm1heF9hdXRvX3ZpZXdfdXBkYXRv
cnMCMzZjb25mLm1heF9jb25jdXJyZW50X2tlcm5lbHMCMDJjb25mLm1heF9nZXRfcmVjb3Jkc19z
aXplCjIwMDAwKmNvbmYubWF4X2hlYXRtYXBfc2l6ZQgzMDcyKmNvbmYubWF4X2h0dHBfdGhyZWFk
cwY1MTI6Y29uZi5tZXRhZGF0YV9zdG9yZV9zeW5jX21vZGUMbm9ybWFsKmNvbmYubWluX2h0dHBf
dGhyZWFkcwI4MGNvbmYubWluX3Bhc3N3b3JkX2xlbmd0aAIwIGNvbmYubWxfYXBpX3BvcnQIOTE4
N1xjb25mLm5wMS5idWlsZF9tYXRlcmlhbGl6ZWRfdmlld3Nfb25fbWlncmF0aW9uDGFsd2F5c0hj
b25mLm5wMS5idWlsZF9wa19pbmRleF9vbl9taWdyYXRpb24MYWx3YXlzRGNvbmYubnAxLmNyaXRp
Y2FsX3Jlc3RhcnRfYXR0ZW1wdHMCMTpjb25mLm5wMS5lbmFibGVfaGVhZF9mYWlsb3ZlcgpGQUxT
RT5jb25mLm5wMS5lbmFibGVfd29ya2VyX2ZhaWxvdmVyCkZBTFNFSmNvbmYubnAxLmZhaWxvdmVy
X2Rpc3RyaWJ1dGlvbl9wb2xpY3kIZmlsbERjb25mLm5wMS5sb2FkX3ZlY3RvcnNfb25fbWlncmF0
aW9uDGFsd2F5c0xjb25mLm5wMS5ub25fY3JpdGljYWxfcmVzdGFydF9hdHRlbXB0cwIzPGNvbmYu
bnAxLnJhbmtfcmVzdGFydF9hdHRlbXB0cwIxMmNvbmYubnAxLnJlc3RhcnRfaW50ZXJ2YWwENjA2
Y29uZi5ucDEuc3RvcmFnZV9hcGlfc2NyaXB0AChjb25mLm51bWJlcl9vZl9ob3N0cwI4KGNvbmYu
bnVtYmVyX29mX3JhbmtzAjk8Y29uZi5vcGVuZ2xfYW50aWFsaWFzaW5nX2xldmVsAjAsY29uZi5w
ZXJzaXN0X2RpcmVjdG9yeTAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC8sY29uZi5wZXJzaXN0X3N5
bmNfdGltZQI1NmNvbmYucG9pbnRfcmVuZGVyX3RocmVzaG9sZAwxMDAwMDBWY29uZi5wb3N0Z3Jl
c19wcm94eS5pZGxlX2Nvbm5lY3Rpb25fdGltZW91dAYzMDBUY29uZi5wb3N0Z3Jlc19wcm94eS5t
YXhfcXVldWVkX2Nvbm5lY3Rpb25zAjE+Y29uZi5wb3N0Z3Jlc19wcm94eS5tYXhfdGhyZWFkcwQ2
ND5jb25mLnBvc3RncmVzX3Byb3h5Lm1pbl90aHJlYWRzAjIwY29uZi5wb3N0Z3Jlc19wcm94eS5w
b3J0CDU0MzIuY29uZi5wb3N0Z3Jlc19wcm94eS5zc2wIVFJVRRxjb25mLnJhbmswX2dwdQIwKmNv
bmYucmFuazBfaXBfYWRkcmVzcxgxNzIuMzEuMzMuMzAqY29uZi5yYW5rMV9pcF9hZGRyZXNzGDE3
Mi4zMS4zMy4zMCpjb25mLnJhbmsyX2lwX2FkZHJlc3MYMTcyLjMxLjMzLjMxKmNvbmYucmFuazNf
aXBfYWRkcmVzcxgxNzIuMzEuMzMuMzIqY29uZi5yYW5rNF9pcF9hZGRyZXNzGDE3Mi4zMS4zMy4z
Mypjb25mLnJhbms1X2lwX2FkZHJlc3MYMTcyLjMxLjMzLjM0KmNvbmYucmFuazZfaXBfYWRkcmVz
cxgxNzIuMzEuMzMuMzUqY29uZi5yYW5rN19pcF9hZGRyZXNzGDE3Mi4zMS4zMy4zNipjb25mLnJh
bms4X2lwX2FkZHJlc3MYMTcyLjMxLjMzLjM3KGNvbmYucmVxdWVzdF90aW1lb3V0CDI0MDA2Y29u
Zi5yZXF1aXJlX2F1dGhlbnRpY2F0aW9uCFRSVUVeY29uZi5yZXNvdXJjZV9ncm91cC5kZWZhdWx0
Lm1heF9jcHVfY29uY3VycmVuY3kELTFaY29uZi5yZXNvdXJjZV9ncm91cC5kZWZhdWx0Lm1heF90
aWVyX3ByaW9yaXR5BDEwSmNvbmYucmVzb3VyY2VfZ3JvdXAuZGVmYXVsdC5yYW1fbGltaXQELTFa
Y29uZi5yZXNvdXJjZV9ncm91cC5kZWZhdWx0LnNjaGVkdWxlX3ByaW9yaXR5BDUwTGNvbmYucmVz
b3VyY2VfZ3JvdXAuZGVmYXVsdC52cmFtX2xpbWl0BC0xHGNvbmYucmluZ19uYW1lDmRlZmF1bHRW
Y29uZi5zZWN1cml0eS5leHRlcm5hbC5yYW5nZXIuY2FjaGVfbWludXRlcwQ2MFRjb25mLnNlY3Vy
aXR5LmV4dGVybmFsLnJhbmdlci5zZXJ2aWNlX25hbWUQa2luZXRpY2FCY29uZi5zZWN1cml0eS5l
eHRlcm5hbC5yYW5nZXIudXJsAGBjb25mLnNlY3VyaXR5LmV4dGVybmFsLnJhbmdlcl9hdXRob3Jp
emVyLmFkZHJlc3MyaXBjOi8vL3RtcC9ncHVkYi1yYW5nZXItMHRjb25mLnNlY3VyaXR5LmV4dGVy
bmFsLnJhbmdlcl9hdXRob3JpemVyLnJlbW90ZV9kZWJ1Z19wb3J0AjBgY29uZi5zZWN1cml0eS5l
eHRlcm5hbC5yYW5nZXJfYXV0aG9yaXplci50aW1lb3V0BjEyMCpjb25mLnNldF9tb25pdG9yX3Bv
cnQIOTAwMjZjb25mLnNldF9tb25pdG9yX3Byb3h5X3BvcnQIOTAwMzZjb25mLnNldF9tb25pdG9y
X3F1ZXVlX3NpemUIMTAwMChjb25mLnNoYWRvd19hZ2dfc2l6ZRI1MDAwMDAwMDAwY29uZi5zaGFk
b3dfY3ViZV9lbmFibGVkCFRSVUUuY29uZi5zaGFkb3dfZmlsdGVyX3NpemUSNTAwMDAwMDAwJmNv
bmYuc21fb21wX3RocmVhZHMCMiRjb25mLnNtc19kaXJlY3RvcnkwL21udC9kYXRhL2dwdWRiL3Bl
cnNpc3QvLmNvbmYuc21zX21heF9vcGVuX2ZpbGVzBjEyOEBjb25mLnNxbC5jb3N0X2Jhc2VkX29w
dGltaXphdGlvbgpGQUxTRTRjb25mLnNxbC5kaXN0cmlidXRlZF9qb2lucwhUUlVFPmNvbmYuc3Fs
LmRpc3RyaWJ1dGVkX29wZXJhdGlvbnMIVFJVRS5jb25mLnNxbC5lbmFibGVfcGxhbm5lcghUUlVF
NmNvbmYuc3FsLmZvcmNlX2JpbmFyeV9qb2lucwpGQUxTRTpjb25mLnNxbC5mb3JjZV9iaW5hcnlf
c2V0X29wcwpGQUxTRTZjb25mLnNxbC5tYXhfcGFyYWxsZWxfc3RlcHMCNEBjb25mLnNxbC5tYXhf
dmlld19uZXN0aW5nX2xldmVscwQxNjJjb25mLnNxbC5wYWdpbmdfdGFibGVfdHRsBDIwNmNvbmYu
c3FsLnBhcmFsbGVsX2V4ZWN1dGlvbghUUlVFMGNvbmYuc3FsLnBsYW5fY2FjaGVfc2l6ZQg0MDAw
MGNvbmYuc3FsLnBsYW5uZXIuYWRkcmVzcz5pcGM6Ly8vdG1wL2dwdWRiLXF1ZXJ5LWVuZ2luZS0w
NmNvbmYuc3FsLnBsYW5uZXIubWF4X21lbW9yeQg0MDk2NGNvbmYuc3FsLnBsYW5uZXIubWF4X3N0
YWNrAjZEY29uZi5zcWwucGxhbm5lci5yZW1vdGVfZGVidWdfcG9ydAIwMGNvbmYuc3FsLnBsYW5u
ZXIudGltZW91dAYxMjA0Y29uZi5zcWwucmVzdWx0X2NhY2hlX3NpemUINDAwMDRjb25mLnNxbC5y
ZXN1bHRzLmNhY2hlX3R0bAQ2MDBjb25mLnNxbC5yZXN1bHRzLmNhY2hpbmcIVFJVRUBjb25mLnNx
bC5ydWxlX2Jhc2VkX29wdGltaXphdGlvbghUUlVFPGNvbmYuc3VidGFza19jb25jdXJyZW5jeV9s
aW1pdAI0PmNvbmYuc3ltYm9sb2d5X3JlbmRlcl90aHJlc2hvbGQKMTAwMDBQY29uZi5zeXN0ZW1f
bWV0YWRhdGEuc3RhdHNfYWdncl9yb3djb3VudAoxMDAwMEhjb25mLnN5c3RlbV9tZXRhZGF0YS5z
dGF0c19hZ2dyX3RpbWUCMVJjb25mLnN5c3RlbV9tZXRhZGF0YS5zdGF0c19yZXRlbnRpb25fZGF5
cwQyMSZjb25mLnRhc2tjYWxjX2dwdS4xElswLDEsMiwzXSZjb25mLnRhc2tjYWxjX2dwdS4yElsw
LDEsMiwzXSZjb25mLnRhc2tjYWxjX2dwdS4zElswLDEsMiwzXSZjb25mLnRhc2tjYWxjX2dwdS40
ElswLDEsMiwzXSZjb25mLnRhc2tjYWxjX2dwdS41ElswLDEsMiwzXSZjb25mLnRhc2tjYWxjX2dw
dS42ElswLDEsMiwzXSZjb25mLnRhc2tjYWxjX2dwdS43ElswLDEsMiwzXSZjb25mLnRhc2tjYWxj
X2dwdS44ElswLDEsMiwzXSBjb25mLnRjc19wZXJfdG9tBDQwJmNvbmYudGVtcF9kaXJlY3RvcnkI
L3RtcDJjb25mLnRleHRfaW5kZXhfZGlyZWN0b3J5MC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0LzJj
b25mLnRleHRfaW5kaWNlc19wZXJfdG9tAjJMY29uZi50aWVyLmRpc2swLmRlZmF1bHQuaGlnaF93
YXRlcm1hcmsEOTA6Y29uZi50aWVyLmRpc2swLmRlZmF1bHQubGltaXQYNjAwMDAwMDAwMDAwSmNv
bmYudGllci5kaXNrMC5kZWZhdWx0Lmxvd193YXRlcm1hcmsEODA4Y29uZi50aWVyLmRpc2swLmRl
ZmF1bHQucGF0aEQvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC8vZGlza2NhY2hlYGNvbmYudGllci5k
aXNrMC5kZWZhdWx0LnN0b3JlX3BlcnNpc3RlbnRfb2JqZWN0cwpGQUxTRUhjb25mLnRpZXIuZGlz
azAucmFuazAuaGlnaF93YXRlcm1hcmsEOTA2Y29uZi50aWVyLmRpc2swLnJhbmswLmxpbWl0GDYw
MDAwMDAwMDAwMEZjb25mLnRpZXIuZGlzazAucmFuazAubG93X3dhdGVybWFyawQ4MDRjb25mLnRp
ZXIuZGlzazAucmFuazAucGF0aEQvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC8vZGlza2NhY2hlXGNv
bmYudGllci5kaXNrMC5yYW5rMC5zdG9yZV9wZXJzaXN0ZW50X29iamVjdHMKRkFMU0VIY29uZi50
aWVyLmRpc2swLnJhbmsxLmhpZ2hfd2F0ZXJtYXJrBDkwNmNvbmYudGllci5kaXNrMC5yYW5rMS5s
aW1pdBg2MDAwMDAwMDAwMDBGY29uZi50aWVyLmRpc2swLnJhbmsxLmxvd193YXRlcm1hcmsEODA0
Y29uZi50aWVyLmRpc2swLnJhbmsxLnBhdGhEL21udC9kYXRhL2dwdWRiL3BlcnNpc3QvL2Rpc2tj
YWNoZVxjb25mLnRpZXIuZGlzazAucmFuazEuc3RvcmVfcGVyc2lzdGVudF9vYmplY3RzCkZBTFNF
SGNvbmYudGllci5kaXNrMC5yYW5rMi5oaWdoX3dhdGVybWFyawQ5MDZjb25mLnRpZXIuZGlzazAu
cmFuazIubGltaXQYNjAwMDAwMDAwMDAwRmNvbmYudGllci5kaXNrMC5yYW5rMi5sb3dfd2F0ZXJt
YXJrBDgwNGNvbmYudGllci5kaXNrMC5yYW5rMi5wYXRoRC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0
Ly9kaXNrY2FjaGVcY29uZi50aWVyLmRpc2swLnJhbmsyLnN0b3JlX3BlcnNpc3RlbnRfb2JqZWN0
cwpGQUxTRUhjb25mLnRpZXIuZGlzazAucmFuazMuaGlnaF93YXRlcm1hcmsEOTA2Y29uZi50aWVy
LmRpc2swLnJhbmszLmxpbWl0GDYwMDAwMDAwMDAwMEZjb25mLnRpZXIuZGlzazAucmFuazMubG93
X3dhdGVybWFyawQ4MDRjb25mLnRpZXIuZGlzazAucmFuazMucGF0aEQvbW50L2RhdGEvZ3B1ZGIv
cGVyc2lzdC8vZGlza2NhY2hlXGNvbmYudGllci5kaXNrMC5yYW5rMy5zdG9yZV9wZXJzaXN0ZW50
X29iamVjdHMKRkFMU0VIY29uZi50aWVyLmRpc2swLnJhbms0LmhpZ2hfd2F0ZXJtYXJrBDkwNmNv
bmYudGllci5kaXNrMC5yYW5rNC5saW1pdBg2MDAwMDAwMDAwMDBGY29uZi50aWVyLmRpc2swLnJh
bms0Lmxvd193YXRlcm1hcmsEODA0Y29uZi50aWVyLmRpc2swLnJhbms0LnBhdGhEL21udC9kYXRh
L2dwdWRiL3BlcnNpc3QvL2Rpc2tjYWNoZVxjb25mLnRpZXIuZGlzazAucmFuazQuc3RvcmVfcGVy
c2lzdGVudF9vYmplY3RzCkZBTFNFSGNvbmYudGllci5kaXNrMC5yYW5rNS5oaWdoX3dhdGVybWFy
awQ5MDZjb25mLnRpZXIuZGlzazAucmFuazUubGltaXQYNjAwMDAwMDAwMDAwRmNvbmYudGllci5k
aXNrMC5yYW5rNS5sb3dfd2F0ZXJtYXJrBDgwNGNvbmYudGllci5kaXNrMC5yYW5rNS5wYXRoRC9t
bnQvZGF0YS9ncHVkYi9wZXJzaXN0Ly9kaXNrY2FjaGVcY29uZi50aWVyLmRpc2swLnJhbms1LnN0
b3JlX3BlcnNpc3RlbnRfb2JqZWN0cwpGQUxTRUhjb25mLnRpZXIuZGlzazAucmFuazYuaGlnaF93
YXRlcm1hcmsEOTA2Y29uZi50aWVyLmRpc2swLnJhbms2LmxpbWl0GDYwMDAwMDAwMDAwMEZjb25m
LnRpZXIuZGlzazAucmFuazYubG93X3dhdGVybWFyawQ4MDRjb25mLnRpZXIuZGlzazAucmFuazYu
cGF0aEQvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC8vZGlza2NhY2hlXGNvbmYudGllci5kaXNrMC5y
YW5rNi5zdG9yZV9wZXJzaXN0ZW50X29iamVjdHMKRkFMU0VIY29uZi50aWVyLmRpc2swLnJhbms3
LmhpZ2hfd2F0ZXJtYXJrBDkwNmNvbmYudGllci5kaXNrMC5yYW5rNy5saW1pdBg2MDAwMDAwMDAw
MDBGY29uZi50aWVyLmRpc2swLnJhbms3Lmxvd193YXRlcm1hcmsEODA0Y29uZi50aWVyLmRpc2sw
LnJhbms3LnBhdGhEL21udC9kYXRhL2dwdWRiL3BlcnNpc3QvL2Rpc2tjYWNoZVxjb25mLnRpZXIu
ZGlzazAucmFuazcuc3RvcmVfcGVyc2lzdGVudF9vYmplY3RzCkZBTFNFSGNvbmYudGllci5kaXNr
MC5yYW5rOC5oaWdoX3dhdGVybWFyawQ5MDZjb25mLnRpZXIuZGlzazAucmFuazgubGltaXQYNjAw
MDAwMDAwMDAwRmNvbmYudGllci5kaXNrMC5yYW5rOC5sb3dfd2F0ZXJtYXJrBDgwNGNvbmYudGll
ci5kaXNrMC5yYW5rOC5wYXRoRC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0Ly9kaXNrY2FjaGVcY29u
Zi50aWVyLmRpc2swLnJhbms4LnN0b3JlX3BlcnNpc3RlbnRfb2JqZWN0cwpGQUxTRVBjb25mLnRp
ZXIuZ2xvYmFsLmNvbmN1cnJlbnRfd2FpdF90aW1lb3V0BjYwMGpjb25mLnRpZXIuZ2xvYmFsLmRl
ZmVyX2NhY2hlX29iamVjdF9ldmljdGlvbnNfdG9fZGlzawhUUlVFUGNvbmYudGllci5wZXJzaXN0
LmRlZmF1bHQuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnBlcnNpc3QuZGVmYXVsdC5saW1p
dBo2NTAwMDAwMDAwMDAwTmNvbmYudGllci5wZXJzaXN0LmRlZmF1bHQubG93X3dhdGVybWFyawQ4
MDxjb25mLnRpZXIucGVyc2lzdC5kZWZhdWx0LnBhdGgwL21udC9kYXRhL2dwdWRiL3BlcnNpc3Qv
OmNvbmYudGllci5wZXJzaXN0LmdyYXBoMC5wYXRoMC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0Lzpj
b25mLnRpZXIucGVyc2lzdC5ncmFwaDEucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC86Y29u
Zi50aWVyLnBlcnNpc3QuZ3JhcGgyLnBhdGgwL21udC9kYXRhL2dwdWRiL3BlcnNpc3QvOmNvbmYu
dGllci5wZXJzaXN0LmdyYXBoMy5wYXRoMC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0Lzpjb25mLnRp
ZXIucGVyc2lzdC5ncmFwaDQucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC86Y29uZi50aWVy
LnBlcnNpc3QuZ3JhcGg1LnBhdGgwL21udC9kYXRhL2dwdWRiL3BlcnNpc3QvOmNvbmYudGllci5w
ZXJzaXN0LmdyYXBoNi5wYXRoMC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0Lzpjb25mLnRpZXIucGVy
c2lzdC5ncmFwaDcucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC9MY29uZi50aWVyLnBlcnNp
c3QucmFuazAuaGlnaF93YXRlcm1hcmsEOTA6Y29uZi50aWVyLnBlcnNpc3QucmFuazAubGltaXQa
NjUwMDAwMDAwMDAwMEpjb25mLnRpZXIucGVyc2lzdC5yYW5rMC5sb3dfd2F0ZXJtYXJrBDgwOGNv
bmYudGllci5wZXJzaXN0LnJhbmswLnBhdGgwL21udC9kYXRhL2dwdWRiL3BlcnNpc3QvTGNvbmYu
dGllci5wZXJzaXN0LnJhbmsxLmhpZ2hfd2F0ZXJtYXJrBDkwOmNvbmYudGllci5wZXJzaXN0LnJh
bmsxLmxpbWl0GjY1MDAwMDAwMDAwMDBKY29uZi50aWVyLnBlcnNpc3QucmFuazEubG93X3dhdGVy
bWFyawQ4MDhjb25mLnRpZXIucGVyc2lzdC5yYW5rMS5wYXRoMC9tbnQvZGF0YS9ncHVkYi9wZXJz
aXN0L0xjb25mLnRpZXIucGVyc2lzdC5yYW5rMi5oaWdoX3dhdGVybWFyawQ5MDpjb25mLnRpZXIu
cGVyc2lzdC5yYW5rMi5saW1pdBo2NTAwMDAwMDAwMDAwSmNvbmYudGllci5wZXJzaXN0LnJhbmsy
Lmxvd193YXRlcm1hcmsEODA4Y29uZi50aWVyLnBlcnNpc3QucmFuazIucGF0aDAvbW50L2RhdGEv
Z3B1ZGIvcGVyc2lzdC9MY29uZi50aWVyLnBlcnNpc3QucmFuazMuaGlnaF93YXRlcm1hcmsEOTA6
Y29uZi50aWVyLnBlcnNpc3QucmFuazMubGltaXQaNjUwMDAwMDAwMDAwMEpjb25mLnRpZXIucGVy
c2lzdC5yYW5rMy5sb3dfd2F0ZXJtYXJrBDgwOGNvbmYudGllci5wZXJzaXN0LnJhbmszLnBhdGgw
L21udC9kYXRhL2dwdWRiL3BlcnNpc3QvTGNvbmYudGllci5wZXJzaXN0LnJhbms0LmhpZ2hfd2F0
ZXJtYXJrBDkwOmNvbmYudGllci5wZXJzaXN0LnJhbms0LmxpbWl0GjY1MDAwMDAwMDAwMDBKY29u
Zi50aWVyLnBlcnNpc3QucmFuazQubG93X3dhdGVybWFyawQ4MDhjb25mLnRpZXIucGVyc2lzdC5y
YW5rNC5wYXRoMC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0L0xjb25mLnRpZXIucGVyc2lzdC5yYW5r
NS5oaWdoX3dhdGVybWFyawQ5MDpjb25mLnRpZXIucGVyc2lzdC5yYW5rNS5saW1pdBo2NTAwMDAw
MDAwMDAwSmNvbmYudGllci5wZXJzaXN0LnJhbms1Lmxvd193YXRlcm1hcmsEODA4Y29uZi50aWVy
LnBlcnNpc3QucmFuazUucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC9MY29uZi50aWVyLnBl
cnNpc3QucmFuazYuaGlnaF93YXRlcm1hcmsEOTA6Y29uZi50aWVyLnBlcnNpc3QucmFuazYubGlt
aXQaNjUwMDAwMDAwMDAwMEpjb25mLnRpZXIucGVyc2lzdC5yYW5rNi5sb3dfd2F0ZXJtYXJrBDgw
OGNvbmYudGllci5wZXJzaXN0LnJhbms2LnBhdGgwL21udC9kYXRhL2dwdWRiL3BlcnNpc3QvTGNv
bmYudGllci5wZXJzaXN0LnJhbms3LmhpZ2hfd2F0ZXJtYXJrBDkwOmNvbmYudGllci5wZXJzaXN0
LnJhbms3LmxpbWl0GjY1MDAwMDAwMDAwMDBKY29uZi50aWVyLnBlcnNpc3QucmFuazcubG93X3dh
dGVybWFyawQ4MDhjb25mLnRpZXIucGVyc2lzdC5yYW5rNy5wYXRoMC9tbnQvZGF0YS9ncHVkYi9w
ZXJzaXN0L0xjb25mLnRpZXIucGVyc2lzdC5yYW5rOC5oaWdoX3dhdGVybWFyawQ5MDpjb25mLnRp
ZXIucGVyc2lzdC5yYW5rOC5saW1pdBo2NTAwMDAwMDAwMDAwSmNvbmYudGllci5wZXJzaXN0LnJh
bms4Lmxvd193YXRlcm1hcmsEODA4Y29uZi50aWVyLnBlcnNpc3QucmFuazgucGF0aDAvbW50L2Rh
dGEvZ3B1ZGIvcGVyc2lzdC84Y29uZi50aWVyLnBlcnNpc3QudGV4dDEucGF0aDAvbW50L2RhdGEv
Z3B1ZGIvcGVyc2lzdC84Y29uZi50aWVyLnBlcnNpc3QudGV4dDIucGF0aDAvbW50L2RhdGEvZ3B1
ZGIvcGVyc2lzdC84Y29uZi50aWVyLnBlcnNpc3QudGV4dDMucGF0aDAvbW50L2RhdGEvZ3B1ZGIv
cGVyc2lzdC84Y29uZi50aWVyLnBlcnNpc3QudGV4dDQucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVy
c2lzdC84Y29uZi50aWVyLnBlcnNpc3QudGV4dDUucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lz
dC84Y29uZi50aWVyLnBlcnNpc3QudGV4dDYucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC84
Y29uZi50aWVyLnBlcnNpc3QudGV4dDcucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC84Y29u
Zi50aWVyLnBlcnNpc3QudGV4dDgucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC9IY29uZi50
aWVyLnJhbS5kZWZhdWx0LmhpZ2hfd2F0ZXJtYXJrBDkwNmNvbmYudGllci5yYW0uZGVmYXVsdC5s
aW1pdAQtMUZjb25mLnRpZXIucmFtLmRlZmF1bHQubG93X3dhdGVybWFyawQ4MERjb25mLnRpZXIu
cmFtLnJhbmswLmhpZ2hfd2F0ZXJtYXJrBDkwMmNvbmYudGllci5yYW0ucmFuazAubGltaXQWNzcy
NjY4MDAwMDBCY29uZi50aWVyLnJhbS5yYW5rMC5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazEuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rMS5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rMS5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazIuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rMi5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rMi5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazMuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rMy5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rMy5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazQuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rNC5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rNC5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazUuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rNS5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rNS5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazYuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rNi5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rNi5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazcuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rNy5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rNy5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazguaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rOC5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rOC5sb3dfd2F0ZXJtYXJrBDgwXGNvbmYudGllci52
cmFtLmRlZmF1bHQuYWxsX2dwdXMuaGlnaF93YXRlcm1hcmsEOTBKY29uZi50aWVyLnZyYW0uZGVm
YXVsdC5hbGxfZ3B1cy5saW1pdAQtMVpjb25mLnRpZXIudnJhbS5kZWZhdWx0LmFsbF9ncHVzLmxv
d193YXRlcm1hcmsEODBUY29uZi50aWVyLnZyYW0uZGVmYXVsdC5ncHUwLmhpZ2hfd2F0ZXJtYXJr
BDkwQmNvbmYudGllci52cmFtLmRlZmF1bHQuZ3B1MC5saW1pdAQtMVJjb25mLnRpZXIudnJhbS5k
ZWZhdWx0LmdwdTAubG93X3dhdGVybWFyawQ4MFRjb25mLnRpZXIudnJhbS5kZWZhdWx0LmdwdTEu
aGlnaF93YXRlcm1hcmsEOTBCY29uZi50aWVyLnZyYW0uZGVmYXVsdC5ncHUxLmxpbWl0BC0xUmNv
bmYudGllci52cmFtLmRlZmF1bHQuZ3B1MS5sb3dfd2F0ZXJtYXJrBDgwVGNvbmYudGllci52cmFt
LmRlZmF1bHQuZ3B1Mi5oaWdoX3dhdGVybWFyawQ5MEJjb25mLnRpZXIudnJhbS5kZWZhdWx0Lmdw
dTIubGltaXQELTFSY29uZi50aWVyLnZyYW0uZGVmYXVsdC5ncHUyLmxvd193YXRlcm1hcmsEODBU
Y29uZi50aWVyLnZyYW0uZGVmYXVsdC5ncHUzLmhpZ2hfd2F0ZXJtYXJrBDkwQmNvbmYudGllci52
cmFtLmRlZmF1bHQuZ3B1My5saW1pdAQtMVJjb25mLnRpZXIudnJhbS5kZWZhdWx0LmdwdTMubG93
X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rMC5HUFUwLmhpZ2hfd2F0ZXJtYXJrBDkw
PmNvbmYudGllci52cmFtLnJhbmswLkdQVTAubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazAu
R1BVMC5sb3dfd2F0ZXJtYXJrBDgwWGNvbmYudGllci52cmFtLnJhbmswLmFsbF9ncHVzLmhpZ2hf
d2F0ZXJtYXJrBDkwRmNvbmYudGllci52cmFtLnJhbmswLmFsbF9ncHVzLmxpbWl0BC0xVmNvbmYu
dGllci52cmFtLnJhbmswLmFsbF9ncHVzLmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0u
cmFuazAuZ3B1MC5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJhbS5yYW5rMC5ncHUwLmxp
bWl0BC0xTmNvbmYudGllci52cmFtLnJhbmswLmdwdTAubG93X3dhdGVybWFyawQ4MFBjb25mLnRp
ZXIudnJhbS5yYW5rMC5ncHUxLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbmsw
LmdwdTEubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazAuZ3B1MS5sb3dfd2F0ZXJtYXJrBDgw
UGNvbmYudGllci52cmFtLnJhbmswLmdwdTIuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZy
YW0ucmFuazAuZ3B1Mi5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rMC5ncHUyLmxvd193YXRl
cm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazAuZ3B1My5oaWdoX3dhdGVybWFyawQ5MD5jb25m
LnRpZXIudnJhbS5yYW5rMC5ncHUzLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbmswLmdwdTMu
bG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rMS5HUFUwLmhpZ2hfd2F0ZXJtYXJr
BDkwPmNvbmYudGllci52cmFtLnJhbmsxLkdQVTAubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFu
azEuR1BVMC5sb3dfd2F0ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbmsxLkdQVTEuaGlnaF93
YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0ucmFuazEuR1BVMS5saW1pdAQtMU5jb25mLnRpZXIu
dnJhbS5yYW5rMS5HUFUxLmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazEuR1BV
Mi5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJhbS5yYW5rMS5HUFUyLmxpbWl0BC0xTmNv
bmYudGllci52cmFtLnJhbmsxLkdQVTIubG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5y
YW5rMS5HUFUzLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbmsxLkdQVTMubGlt
aXQELTFOY29uZi50aWVyLnZyYW0ucmFuazEuR1BVMy5sb3dfd2F0ZXJtYXJrBDgwWGNvbmYudGll
ci52cmFtLnJhbmsxLmFsbF9ncHVzLmhpZ2hfd2F0ZXJtYXJrBDkwRmNvbmYudGllci52cmFtLnJh
bmsxLmFsbF9ncHVzLmxpbWl0BC0xVmNvbmYudGllci52cmFtLnJhbmsxLmFsbF9ncHVzLmxvd193
YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazEuZ3B1MC5oaWdoX3dhdGVybWFyawQ5MD5j
b25mLnRpZXIudnJhbS5yYW5rMS5ncHUwLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbmsxLmdw
dTAubG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rMS5ncHUxLmhpZ2hfd2F0ZXJt
YXJrBDkwPmNvbmYudGllci52cmFtLnJhbmsxLmdwdTEubGltaXQELTFOY29uZi50aWVyLnZyYW0u
cmFuazEuZ3B1MS5sb3dfd2F0ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbmsxLmdwdTIuaGln
aF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0ucmFuazEuZ3B1Mi5saW1pdAQtMU5jb25mLnRp
ZXIudnJhbS5yYW5rMS5ncHUyLmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazEu
Z3B1My5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJhbS5yYW5rMS5ncHUzLmxpbWl0BC0x
TmNvbmYudGllci52cmFtLnJhbmsxLmdwdTMubG93X3dhdGVybWFyawQ4MFhjb25mLnRpZXIudnJh
bS5yYW5rMi5hbGxfZ3B1cy5oaWdoX3dhdGVybWFyawQ5MEZjb25mLnRpZXIudnJhbS5yYW5rMi5h
bGxfZ3B1cy5saW1pdAQtMVZjb25mLnRpZXIudnJhbS5yYW5rMi5hbGxfZ3B1cy5sb3dfd2F0ZXJt
YXJrBDgwUGNvbmYudGllci52cmFtLnJhbmsyLmdwdTAuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50
aWVyLnZyYW0ucmFuazIuZ3B1MC5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rMi5ncHUwLmxv
d193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazIuZ3B1MS5oaWdoX3dhdGVybWFyawQ5
MD5jb25mLnRpZXIudnJhbS5yYW5rMi5ncHUxLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbmsy
LmdwdTEubG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rMi5ncHUyLmhpZ2hfd2F0
ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbmsyLmdwdTIubGltaXQELTFOY29uZi50aWVyLnZy
YW0ucmFuazIuZ3B1Mi5sb3dfd2F0ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbmsyLmdwdTMu
aGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0ucmFuazIuZ3B1My5saW1pdAQtMU5jb25m
LnRpZXIudnJhbS5yYW5rMi5ncHUzLmxvd193YXRlcm1hcmsEODBYY29uZi50aWVyLnZyYW0ucmFu
azMuYWxsX2dwdXMuaGlnaF93YXRlcm1hcmsEOTBGY29uZi50aWVyLnZyYW0ucmFuazMuYWxsX2dw
dXMubGltaXQELTFWY29uZi50aWVyLnZyYW0ucmFuazMuYWxsX2dwdXMubG93X3dhdGVybWFyawQ4
MFBjb25mLnRpZXIudnJhbS5yYW5rMy5ncHUwLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGllci52
cmFtLnJhbmszLmdwdTAubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazMuZ3B1MC5sb3dfd2F0
ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbmszLmdwdTEuaGlnaF93YXRlcm1hcmsEOTA+Y29u
Zi50aWVyLnZyYW0ucmFuazMuZ3B1MS5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rMy5ncHUx
Lmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazMuZ3B1Mi5oaWdoX3dhdGVybWFy
awQ5MD5jb25mLnRpZXIudnJhbS5yYW5rMy5ncHUyLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJh
bmszLmdwdTIubG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rMy5ncHUzLmhpZ2hf
d2F0ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbmszLmdwdTMubGltaXQELTFOY29uZi50aWVy
LnZyYW0ucmFuazMuZ3B1My5sb3dfd2F0ZXJtYXJrBDgwWGNvbmYudGllci52cmFtLnJhbms0LmFs
bF9ncHVzLmhpZ2hfd2F0ZXJtYXJrBDkwRmNvbmYudGllci52cmFtLnJhbms0LmFsbF9ncHVzLmxp
bWl0BC0xVmNvbmYudGllci52cmFtLnJhbms0LmFsbF9ncHVzLmxvd193YXRlcm1hcmsEODBQY29u
Zi50aWVyLnZyYW0ucmFuazQuZ3B1MC5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJhbS5y
YW5rNC5ncHUwLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbms0LmdwdTAubG93X3dhdGVybWFy
awQ4MFBjb25mLnRpZXIudnJhbS5yYW5rNC5ncHUxLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGll
ci52cmFtLnJhbms0LmdwdTEubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazQuZ3B1MS5sb3df
d2F0ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbms0LmdwdTIuaGlnaF93YXRlcm1hcmsEOTA+
Y29uZi50aWVyLnZyYW0ucmFuazQuZ3B1Mi5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rNC5n
cHUyLmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazQuZ3B1My5oaWdoX3dhdGVy
bWFyawQ5MD5jb25mLnRpZXIudnJhbS5yYW5rNC5ncHUzLmxpbWl0BC0xTmNvbmYudGllci52cmFt
LnJhbms0LmdwdTMubG93X3dhdGVybWFyawQ4MFhjb25mLnRpZXIudnJhbS5yYW5rNS5hbGxfZ3B1
cy5oaWdoX3dhdGVybWFyawQ5MEZjb25mLnRpZXIudnJhbS5yYW5rNS5hbGxfZ3B1cy5saW1pdAQt
MVZjb25mLnRpZXIudnJhbS5yYW5rNS5hbGxfZ3B1cy5sb3dfd2F0ZXJtYXJrBDgwUGNvbmYudGll
ci52cmFtLnJhbms1LmdwdTAuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0ucmFuazUu
Z3B1MC5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rNS5ncHUwLmxvd193YXRlcm1hcmsEODBQ
Y29uZi50aWVyLnZyYW0ucmFuazUuZ3B1MS5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJh
bS5yYW5rNS5ncHUxLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbms1LmdwdTEubG93X3dhdGVy
bWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rNS5ncHUyLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYu
dGllci52cmFtLnJhbms1LmdwdTIubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazUuZ3B1Mi5s
b3dfd2F0ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbms1LmdwdTMuaGlnaF93YXRlcm1hcmsE
OTA+Y29uZi50aWVyLnZyYW0ucmFuazUuZ3B1My5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5r
NS5ncHUzLmxvd193YXRlcm1hcmsEODBYY29uZi50aWVyLnZyYW0ucmFuazYuYWxsX2dwdXMuaGln
aF93YXRlcm1hcmsEOTBGY29uZi50aWVyLnZyYW0ucmFuazYuYWxsX2dwdXMubGltaXQELTFWY29u
Zi50aWVyLnZyYW0ucmFuazYuYWxsX2dwdXMubG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJh
bS5yYW5rNi5ncHUwLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbms2LmdwdTAu
bGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazYuZ3B1MC5sb3dfd2F0ZXJtYXJrBDgwUGNvbmYu
dGllci52cmFtLnJhbms2LmdwdTEuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0ucmFu
azYuZ3B1MS5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rNi5ncHUxLmxvd193YXRlcm1hcmsE
ODBQY29uZi50aWVyLnZyYW0ucmFuazYuZ3B1Mi5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIu
dnJhbS5yYW5rNi5ncHUyLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbms2LmdwdTIubG93X3dh
dGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rNi5ncHUzLmhpZ2hfd2F0ZXJtYXJrBDkwPmNv
bmYudGllci52cmFtLnJhbms2LmdwdTMubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazYuZ3B1
My5sb3dfd2F0ZXJtYXJrBDgwWGNvbmYudGllci52cmFtLnJhbms3LmFsbF9ncHVzLmhpZ2hfd2F0
ZXJtYXJrBDkwRmNvbmYudGllci52cmFtLnJhbms3LmFsbF9ncHVzLmxpbWl0BC0xVmNvbmYudGll
ci52cmFtLnJhbms3LmFsbF9ncHVzLmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFu
azcuZ3B1MC5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJhbS5yYW5rNy5ncHUwLmxpbWl0
BC0xTmNvbmYudGllci52cmFtLnJhbms3LmdwdTAubG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIu
dnJhbS5yYW5rNy5ncHUxLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbms3Lmdw
dTEubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazcuZ3B1MS5sb3dfd2F0ZXJtYXJrBDgwUGNv
bmYudGllci52cmFtLnJhbms3LmdwdTIuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0u
cmFuazcuZ3B1Mi5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rNy5ncHUyLmxvd193YXRlcm1h
cmsEODBQY29uZi50aWVyLnZyYW0ucmFuazcuZ3B1My5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRp
ZXIudnJhbS5yYW5rNy5ncHUzLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbms3LmdwdTMubG93
X3dhdGVybWFyawQ4MFhjb25mLnRpZXIudnJhbS5yYW5rOC5hbGxfZ3B1cy5oaWdoX3dhdGVybWFy
awQ5MEZjb25mLnRpZXIudnJhbS5yYW5rOC5hbGxfZ3B1cy5saW1pdAQtMVZjb25mLnRpZXIudnJh
bS5yYW5rOC5hbGxfZ3B1cy5sb3dfd2F0ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbms4Lmdw
dTAuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0ucmFuazguZ3B1MC5saW1pdAQtMU5j
b25mLnRpZXIudnJhbS5yYW5rOC5ncHUwLmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0u
cmFuazguZ3B1MS5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJhbS5yYW5rOC5ncHUxLmxp
bWl0BC0xTmNvbmYudGllci52cmFtLnJhbms4LmdwdTEubG93X3dhdGVybWFyawQ4MFBjb25mLnRp
ZXIudnJhbS5yYW5rOC5ncHUyLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbms4
LmdwdTIubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazguZ3B1Mi5sb3dfd2F0ZXJtYXJrBDgw
UGNvbmYudGllci52cmFtLnJhbms4LmdwdTMuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZy
YW0ucmFuazguZ3B1My5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rOC5ncHUzLmxvd193YXRl
cm1hcmsEODA0Y29uZi50aWVyX3N0cmF0ZWd5LmRlZmF1bHRCVlJBTSAyLCBSQU0gNSwgRElTSzAg
NSwgUEVSU0lTVCA1YGNvbmYudGllcl9zdHJhdGVneS5wcmVkaWNhdGVfZXZhbHVhdGlvbl9pbnRl
cnZhbAQ2MCRjb25mLnRvbXNfcGVyX3JhbmsCMSBjb25mLnRwc19wZXJfdG9tBDQwImNvbmYudHJp
Z2dlcl9wb3J0CDkwMDE+Y29uZi51bmlmaWVkX3NlY3VyaXR5X25hbWVzcGFjZQpGQUxTRTpjb25m
LnVzZV9leHRlcm5hbF90ZXh0X3NlcnZlcghUUlVFHGNvbmYudXNlX2h0dHBzCkZBTFNFLGNvbmYu
dmlkZW9fZGVmYXVsdF90dGwELTEoY29uZi52aWRlb19tYXhfY291bnQELTEyY29uZi52aWRlb190
ZW1wX2RpcmVjdG9yeSwvdG1wL2dwdWRiLXRlbXAtdmlkZW9zImNvbmYud2FsLmNoZWNrc3VtCFRS
VUUwY29uZi53YWwuZmx1c2hfZnJlcXVlbmN5BDYwMmNvbmYud2FsLm1heF9zZWdtZW50X3NpemUS
NTAwMDAwMDAwLGNvbmYud2FsLnNlZ21lbnRfY291bnQELTEoY29uZi53YWwuc3luY19wb2xpY3kK
Zmx1c2g2Y29uZi53b3JrZXJfaHR0cF9zZXJ2ZXJfaXBz6AExNzIuMzEuMzMuMzA7MTcyLjMxLjMz
LjMwOzE3Mi4zMS4zMy4zMTsxNzIuMzEuMzMuMzI7MTcyLjMxLjMzLjMzOzE3Mi4zMS4zMy4zNDsx
NzIuMzEuMzMuMzU7MTcyLjMxLjMzLjM2OzE3Mi4zMS4zMy4zNzpjb25mLndvcmtlcl9odHRwX3Nl
cnZlcl9wb3J0c1g5MTkxOzkxOTI7OTE5Mzs5MTk0OzkxOTU7OTE5Njs5MTk3OzkxOTg7OTE5OThj
b25mLndvcmtlcl9odHRwX3NlcnZlcl91cmxzwANodHRwOi8vMTcyLjMxLjMzLjMwOjkxOTE7aHR0
cDovLzE3Mi4zMS4zMy4zMDo5MTkyO2h0dHA6Ly8xNzIuMzEuMzMuMzE6OTE5MztodHRwOi8vMTcy
LjMxLjMzLjMyOjkxOTQ7aHR0cDovLzE3Mi4zMS4zMy4zMzo5MTk1O2h0dHA6Ly8xNzIuMzEuMzMu
MzQ6OTE5NjtodHRwOi8vMTcyLjMxLjMzLjM1OjkxOTc7aHR0cDovLzE3Mi4zMS4zMy4zNjo5MTk4
O2h0dHA6Ly8xNzIuMzEuMzMuMzc6OTE5OUhjb25mLndvcmtlcl9odHRwX3NlcnZlcl91cmxzX3By
aXZhdGXAA2h0dHA6Ly8xNzIuMzEuMzMuMzA6OTE5MTtodHRwOi8vMTcyLjMxLjMzLjMwOjkxOTI7
aHR0cDovLzE3Mi4zMS4zMy4zMTo5MTkzO2h0dHA6Ly8xNzIuMzEuMzMuMzI6OTE5NDtodHRwOi8v
MTcyLjMxLjMzLjMzOjkxOTU7aHR0cDovLzE3Mi4zMS4zMy4zNDo5MTk2O2h0dHA6Ly8xNzIuMzEu
MzMuMzU6OTE5NztodHRwOi8vMTcyLjMxLjMzLjM2OjkxOTg7aHR0cDovLzE3Mi4zMS4zMy4zNzo5
MTk5KHN5c3RlbS5mb250X2ZhbWlsaWVzpgFEZWphVnUgTWF0aCBUZVggR3lyZSxEZWphVnUgU2Fu
cyBNb25vLERlamFWdSBTYW5zLERlamFWdSBTZXJpZixTYW5zLFNlcmlmLE1vbm9zcGFjZTB2ZXJz
aW9uLmdwdWRiX2J1aWxkX2RhdGUoRmViIDE0IDIwMjQgMjM6NDk6MDFAdmVyc2lvbi5ncHVkYl9j
b21wdXRlX2NhcGFiaWxpdHkWNjA7NzA7ODA7ODY4dmVyc2lvbi5ncHVkYl9jb21wdXRlX2VuZ2lu
ZQhDVURBPnZlcnNpb24uZ3B1ZGJfY29yZV9saWJzX3ZlcnNpb24UMjAyNDAyMTMwMDR2ZXJzaW9u
LmdwdWRiX2NvcmVfdmVyc2lvbiw3LjIuMC4xLjIwMjQwMjE0MjEwOTA2NHZlcnNpb24uZ3B1ZGJf
ZmlsZV92ZXJzaW9uFDIwMjEwMzExMjAqdmVyc2lvbi5ncHVkYl92ZXJzaW9uUDkyMjYwYTMyOWNh
NDVjYjBlMzc3NzZjZjkxNDQ5NzE3OWY2MjExNDM0dmVyc2lvbi5ncHVkYl92ZXJzaW9uX2RhdGUy
MjAyNC0wMi0xNCAyMTowOTowNiAtMDUwMCx2ZXJzaW9uLnB5dGhvbl92ZXJzaW9uDjMuMTAuMTMA
AAA=
headers:
Access-Control-Allow-Origin:
- '*'
Access-Control-Expose-Headers:
- x-request-time-secs
Connection:
- Close
Content-Type:
- application/octet-stream
Date:
- Thu, 22 Feb 2024 00:26:34 GMT
Strict-Transport-Security:
- max-age=31536000; includeSubDomains
X-Content-Type-Options:
- nosniff
X-Frame-Options:
- SAMEORIGIN
X-XSS-Protection:
- 1; mode=block
x-request-time-secs:
- '0.00063'
status:
code: 200
message: OK
- request:
body: "$demo.test_profiles\0"
headers:
Accept:
- application/octet-stream
Content-type:
- application/octet-stream
authorization:
- DUMMY
method: POST
uri: http://172.31.33.30:9191/has/table
response:
body:
string: "\x04OK\0$has_table_response*$demo.test_profiles\x01\0\0"
headers:
Access-Control-Allow-Origin:
- '*'
Access-Control-Expose-Headers:
- x-request-time-secs
Connection:
- Close
Content-Type:
- application/octet-stream
Date:
- Thu, 22 Feb 2024 00:26:35 GMT
Strict-Transport-Security:
- max-age=31536000; includeSubDomains
X-Content-Type-Options:
- nosniff
X-Frame-Options:
- SAMEORIGIN
X-XSS-Protection:
- 1; mode=block
x-request-time-secs:
- '0.00011'
status:
code: 200
message: OK
- request:
body: "$demo.test_profiles\0\x02,no_error_if_not_exists\btrue\0"
headers:
Accept:
- application/octet-stream
Content-type:
- application/octet-stream
authorization:
- DUMMY
method: POST
uri: http://172.31.33.30:9191/clear/table
response:
body:
string: "\x04OK\0(clear_table_response($demo.test_profiles\0\0"
headers:
Access-Control-Allow-Origin:
- '*'
Access-Control-Expose-Headers:
- x-request-time-secs
Connection:
- Close
Content-Type:
- application/octet-stream
Date:
- Thu, 22 Feb 2024 00:26:35 GMT
Strict-Transport-Security:
- max-age=31536000; includeSubDomains
X-Content-Type-Options:
- nosniff
X-Frame-Options:
- SAMEORIGIN
X-XSS-Protection:
- 1; mode=block
x-request-time-secs:
- '0.00982'
status:
code: 200
message: OK
- request:
body: "$demo.test_profiles\0"
headers:
Accept:
- application/octet-stream
Content-type:
- application/octet-stream
authorization:
- DUMMY
method: POST
uri: http://172.31.33.30:9191/has/table
response:
body:
string: "\x04OK\0$has_table_response*$demo.test_profiles\0\0\0"
headers:
Access-Control-Allow-Origin:
- '*'
Access-Control-Expose-Headers:
- x-request-time-secs
Connection:
- Close
Content-Type:
- application/octet-stream
Date:
- Thu, 22 Feb 2024 00:26:35 GMT
Strict-Transport-Security:
- max-age=31536000; includeSubDomains
X-Content-Type-Options:
- nosniff
X-Frame-Options:
- SAMEORIGIN
X-XSS-Protection:
- 1; mode=block
x-request-time-secs:
- '0.00009'
status:
code: 200
message: OK
- request:
body: !!binary |
qAR7InR5cGUiOiAicmVjb3JkIiwgIm5hbWUiOiAidHlwZV9uYW1lIiwgImZpZWxkcyI6IFt7InR5
cGUiOiAic3RyaW5nIiwgIm5hbWUiOiAidXNlcm5hbWUifSwgeyJ0eXBlIjogInN0cmluZyIsICJu
YW1lIjogIm5hbWUifSwgeyJ0eXBlIjogInN0cmluZyIsICJuYW1lIjogInNleCJ9LCB7InR5cGUi
OiAic3RyaW5nIiwgIm5hbWUiOiAiYWRkcmVzcyJ9LCB7InR5cGUiOiAic3RyaW5nIiwgIm5hbWUi
OiAibWFpbCJ9LCB7InR5cGUiOiAibG9uZyIsICJuYW1lIjogImJpcnRoZGF0ZSJ9XX0ADBB1c2Vy
bmFtZQIMY2hhcjMyAAhuYW1lAgxjaGFyMzIABnNleAIKY2hhcjEADmFkZHJlc3MCDGNoYXI2NAAI
bWFpbAIMY2hhcjMyABJiaXJ0aGRhdGUCEnRpbWVzdGFtcAAAAA==
headers:
Accept:
- application/octet-stream
Content-type:
- application/octet-stream
authorization:
- DUMMY
method: POST
uri: http://172.31.33.30:9191/create/type
response:
body:
string: !!binary |
BE9LAChjcmVhdGVfdHlwZV9yZXNwb25zZaQGKDE4MjU3ODY5MTYzMjc0NDI1MTYz8AN7InR5cGUi
OiJyZWNvcmQiLCJuYW1lIjoidHlwZV9uYW1lIiwiZmllbGRzIjpbeyJuYW1lIjoidXNlcm5hbWUi
LCJ0eXBlIjoic3RyaW5nIn0seyJuYW1lIjoibmFtZSIsInR5cGUiOiJzdHJpbmcifSx7Im5hbWUi
OiJzZXgiLCJ0eXBlIjoic3RyaW5nIn0seyJuYW1lIjoiYWRkcmVzcyIsInR5cGUiOiJzdHJpbmci
fSx7Im5hbWUiOiJtYWlsIiwidHlwZSI6InN0cmluZyJ9LHsibmFtZSI6ImJpcnRoZGF0ZSIsInR5
cGUiOiJsb25nIn1dfQAMDmFkZHJlc3MECGRhdGEMY2hhcjY0ABJiaXJ0aGRhdGUECGRhdGESdGlt
ZXN0YW1wAAhtYWlsBAhkYXRhDGNoYXIzMgAIbmFtZQQIZGF0YQxjaGFyMzIABnNleAQIZGF0YQpj
aGFyMQAQdXNlcm5hbWUECGRhdGEMY2hhcjMyAAAAAA==
headers:
Access-Control-Allow-Origin:
- '*'
Access-Control-Expose-Headers:
- x-request-time-secs
Connection:
- Close
Content-Type:
- application/octet-stream
Date:
- Thu, 22 Feb 2024 00:26:35 GMT
Strict-Transport-Security:
- max-age=31536000; includeSubDomains
X-Content-Type-Options:
- nosniff
X-Frame-Options:
- SAMEORIGIN
X-XSS-Protection:
- 1; mode=block
x-request-time-secs:
- '0.00335'
status:
code: 200
message: OK
- request:
body: "$demo.test_profiles(18257869163274425163\b\x1Ais_replicated\nfalse$no_error_if_exists\nfalse\x1Ais_collection\nfalse6disallow_homogeneous_tables\nfalse\0"
headers:
Accept:
- application/octet-stream
Content-type:
- application/octet-stream
authorization:
- DUMMY
method: POST
uri: http://172.31.33.30:9191/create/table
response:
body:
string: !!binary |
BE9LACpjcmVhdGVfdGFibGVfcmVzcG9uc2WmASRkZW1vLnRlc3RfcHJvZmlsZXMoMTgyNTc4Njkx
NjMyNzQ0MjUxNjMAAihxdWFsaWZpZWRfdGFibGVfbmFtZSRkZW1vLnRlc3RfcHJvZmlsZXMAAA==
headers:
Access-Control-Allow-Origin:
- '*'
Access-Control-Expose-Headers:
- x-request-time-secs
Connection:
- Close
Content-Type:
- application/octet-stream
Date:
- Thu, 22 Feb 2024 00:26:35 GMT
Strict-Transport-Security:
- max-age=31536000; includeSubDomains
X-Content-Type-Options:
- nosniff
X-Frame-Options:
- SAMEORIGIN
X-XSS-Protection:
- 1; mode=block
x-request-time-secs:
- '0.01371'
status:
code: 200
message: OK
- request:
body: "$demo.test_profiles\x02\x12get_sizes\btrue\0"
headers:
Accept:
- application/octet-stream
Content-type:
- application/octet-stream
authorization:
- DUMMY
method: POST
uri: http://172.31.33.30:9191/show/table
response:
body:
string: !!binary |
BE9LACZzaG93X3RhYmxlX3Jlc3BvbnNlghYkZGVtby50ZXN0X3Byb2ZpbGVzAiRkZW1vLnRlc3Rf
cHJvZmlsZXMAAgAAAigxODI1Nzg2OTE2MzI3NDQyNTE2MwAC8AN7InR5cGUiOiJyZWNvcmQiLCJu
YW1lIjoidHlwZV9uYW1lIiwiZmllbGRzIjpbeyJuYW1lIjoidXNlcm5hbWUiLCJ0eXBlIjoic3Ry
aW5nIn0seyJuYW1lIjoibmFtZSIsInR5cGUiOiJzdHJpbmcifSx7Im5hbWUiOiJzZXgiLCJ0eXBl
Ijoic3RyaW5nIn0seyJuYW1lIjoiYWRkcmVzcyIsInR5cGUiOiJzdHJpbmcifSx7Im5hbWUiOiJt
YWlsIiwidHlwZSI6InN0cmluZyJ9LHsibmFtZSI6ImJpcnRoZGF0ZSIsInR5cGUiOiJsb25nIn1d
fQACAAACDA5hZGRyZXNzBAhkYXRhDGNoYXI2NAASYmlydGhkYXRlBAhkYXRhEnRpbWVzdGFtcAAI
bWFpbAQIZGF0YQxjaGFyMzIACG5hbWUECGRhdGEMY2hhcjMyAAZzZXgECGRhdGEKY2hhcjEAEHVz
ZXJuYW1lBAhkYXRhDGNoYXIzMgAAAAI6ImF0dHJpYnV0ZV9pbmRleGVzACBjb2xsZWN0aW9uX25h
bWVzCGRlbW8wZGF0YXNvdXJjZV9zdWJzY3JpcHRpb25zABhmb3JlaWduX2tleXMAJGdsb2JhbF9h
Y2Nlc3NfbW9kZRRyZWFkX3dyaXRlLGlzX2F1dG9tYXRpY19wYXJ0aXRpb24KZmFsc2UQaXNfZGly
dHkAImlzX3ZpZXdfcGVyc2lzdGVkACJsYXN0X3JlZnJlc2hfdGltZQAob3duZXJfcmVzb3VyY2Vf
Z3JvdXA+a2luZXRpY2FfZGVmYXVsdF9yZXNvdXJjZV9ncm91cCpwYXJ0aXRpb25fZGVmaW5pdGlv
bnMANHBhcnRpdGlvbl9kZWZpbml0aW9uc19qc29uBHt9HHBhcnRpdGlvbl9rZXlzABxwYXJ0aXRp
b25fdHlwZQhOT05FIHByaW1hcnlfa2V5X3R5cGUMbWVtb3J5GHJlY29yZF9ieXRlcwYxNjkccmVm
cmVzaF9tZXRob2QAJnJlbWFpbmluZ190YWJsZV90dGwELTEicmVxdWVzdF9hdnJvX2pzb27iA3si
dGFibGVfbmFtZSI6ImRlbW8udGVzdF9wcm9maWxlcyIsInR5cGVfaWQiOiIxODI1Nzg2OTE2MzI3
NDQyNTE2MyIsIm9wdGlvbnMiOnsiZGlzYWxsb3dfaG9tb2dlbmVvdXNfdGFibGVzIjoiZmFsc2Ui
LCJmb3JlaWduX2tleXMiOiIiLCJpc19jb2xsZWN0aW9uIjoiZmFsc2UiLCJpc19yZXBsaWNhdGVk
IjoiZmFsc2UiLCJub19lcnJvcl9pZl9leGlzdHMiOiJmYWxzZSIsInByaW1hcnlfa2V5X3R5cGUi
OiJtZW1vcnkifX0icmVxdWVzdF9hdnJvX3R5cGUQaXNfdGFibGUWc2NoZW1hX25hbWUIZGVtbyZz
dHJhdGVneV9kZWZpbml0aW9uUiggKCBWUkFNIDIsIFJBTSA1LCBESVNLMCA1LCBQRVJTSVNUIDUg
KSApGnRhYmxlX21vbml0b3IEe30SdGFibGVfdHRsBC0xFnRvdGFsX2J5dGVzAjA4dXNlcl9jaHVu
a19jb2x1bW5fbWF4X21lbW9yeRI1MTIwMDAwMDAqdXNlcl9jaHVua19tYXhfbWVtb3J5FDgxOTIw
MDAwMDAedXNlcl9jaHVua19zaXplDjgwMDAwMDAedmlld190YWJsZV9uYW1lAAAAAgAAAgAAAgAA
AAAAAAAAAAAAAAA=
headers:
Access-Control-Allow-Origin:
- '*'
Access-Control-Expose-Headers:
- x-request-time-secs
Connection:
- Close
Content-Type:
- application/octet-stream
Date:
- Thu, 22 Feb 2024 00:26:35 GMT
Strict-Transport-Security:
- max-age=31536000; includeSubDomains
X-Content-Type-Options:
- nosniff
X-Frame-Options:
- SAMEORIGIN
X-XSS-Protection:
- 1; mode=block
x-request-time-secs:
- '0.00283'
status:
code: 200
message: OK
- request:
body: !!binary |
JGRlbW8udGVzdF9wcm9maWxlc8gB0gESZWR1YXJkbzY5FEhhbGV5IEJlY2sCRmg1OTgzNiBDYXJs
YSBDYXVzZXdheSBTdWl0ZSA5MzkKUG9ydCBFdWdlbmUsIElOIDMyNDg3LG1lbHRvbmRlbmlzZUB5
YWhvby5jb22A8MKF7jLSARBsYmFycmVyYR5Kb3NodWEgU3RlcGhlbnMCTWYzMTA4IENocmlzdGlu
YSBGb3JnZXMKUG9ydCBUaW1vdGh5Y2hlc3RlciwgS1kgNzI5ODAmZXJpY2E4MEBob3RtYWlsLmNv
bf+vur7vU6gBDmJidXJ0b24YUGF1bGEgS2Fpc2VyAkY+VW5pdCA3NDA1IEJveCAzMDUyCkRQTyBB
RSAwOTg1OCx0aW1vdGh5cG90dHNAZ21haWwuY29t/9/b3NtCygESbWVsaXNzYTQ5FldlbmR5IFJl
ZXNlAkZqNjQwOCBDaHJpc3RvcGhlciBIaWxsIEFwdC4gNDU5Ck5ldyBCZW5qYW1pbiwgTkogMTUw
OTYgZGFkYW1zQGdtYWlsLmNvbYCQ/bWOItIBGm1lbGlzc2FjYXJ0ZXIWTWFudWVsIFJpb3MCTV4y
MjQxIEJlbGwgR2FyZGVucyBTdWl0ZSA3MjMKU2NvdHRzaWRlLCBDQSAzODQ2Myx3aWxsaWFtYXlh
bGFAZ21haWwuY29t/9//ptpHuAEOamFtZXMyNh5QYXRyaWNpYSBQb3R0ZXICRlA3OTc3IEpvbmF0
aGFuIE1lYWRvdwpKZXJyeXNpZGUsIE9IIDU1MjA1JGpwYXRyaWNrQGdtYWlsLmNvbYCgj76jRtoB
IHZhbmVzc2F2YWxlbnRpbmUeQW50aG9ueSBTaW1wc29uAk1QNDA1IE1hdGhldyBJc2xhbmQKUG9y
dCBSZWJlY2NhLCBBWiAwMzEwNDRrcmlzdGluZXBhcmtlckBob3RtYWlsLmNvbf+flLmJKuIBHmFu
ZGVyc29uYnJpZGdldBxKb25hdGhhbiBQYXluZQJNZjc4MzkzIFdpbGxpYW1zIEhlaWdodHMgQXB0
LiA3NzYKUG9ydCBMaXNhLCBHVSAxMjExMSpqYXNvbmhhcnZleUBnbWFpbC5jb23/76L1hVPEARZt
b29yZWFuZHJldxpXZW5keSBSYW1pcmV6AkZUODA4OSBHb256YWxleiBGaWVsZHMKSm9yZGFudmls
bGUsIEtTIDIyODI0KG1hdGhldzA1QGhvdG1haWwuY29tgKCXlPcz4AEYZmxveWRjeW50aGlhFEp1
ZHkgT3J0aXoCRnA1ODk4NCBXcmlnaHQgVGhyb3VnaHdheSBTdWl0ZSA3MjUKVGltb3RoeW1vdXRo
LCBPSyAwOTEzMSxwYXR0b25lcmljQGhvdG1haWwuY29t/6+gjvAB0gESc2hhbm5vbjc0GldpbGxp
YW0gRGF2aXMCTWAxMDQgRG95bGUgSnVuY3Rpb24gQXB0LiAyMDIKRWFzdCBTdGFjeSwgTUkgOTM0
NzMuZ3JhbnR0aG9tcHNvbkBnbWFpbC5jb22AoP+KthO+ARJsYW5lamFjb2IaQ2hlbHNlYSBXZWVr
cwJGUjMyOTEgV2lsbGlhbXMgUGxhY2UKTmV3IExpbmRzYXksIE9LIDQ0ODA1KGhoZXJuYW5kZXpA
eWFob28uY29t/5/v+M4EtgEabWVsaXNzYWJ1dGxlchZBbGV4YSBLZWxseQJGSDE5MDQgQnVya2Ug
Um9hZHMKUG9ydCBBbm5lLCBERSA4MTI1MiZkb3VnbGFzMzhAeWFob28uY29tgOCXgvVdygEMdHJv
eTkzHEV0aGFuIExhd3JlbmNlAk1qODUzNDUgRWRnYXIgRHJpdmVzIFN1aXRlIDQxMApTb3V0aCBT
aGFuZXRvbiwgTVQgMDM3NjggcHBlcmV6QGdtYWlsLmNvbYCQ3pScC8gBGmNocmlzdG9waGVyMjQW
S2VsbHkgV2F0dHMCRlIzNDkgQ2FzdHJvIEJyb29rClBvcnQgS2VsbHlidXJ5LCBORCAwMTY5NC5u
ZXd0b25hZHJpYW5hQHlhaG9vLmNvbf+/yOH0D+IBDnF3cmlnaHQcS2VsbHkgRnJhbmtsaW4CRmQ5
OTUgR2FyY2lhIExhbmQgQXB0LiAyMjAKTm9ydGggVHJhY2V5dG9uLCBTRCA2NTk1NTxrYXRoZXJp
bmVhbGV4YW5kZXJAaG90bWFpbC5jb22AsJ+VhxrSARpuaWNvbGViYWxkd2luGkFuZ2VsYSBIb3J0
b24CRlAxMTgyNyBKZXNzaWNhIEZvcnQKTGFrZSBNb3JnYW4sIE1TIDE4OTA1NndpbGxpYW1zcmVi
ZWNjYUBob3RtYWlsLmNvbf+Poen5IdgBDmJyaWFuMTIiU3RlZmFuaWUgV2lsbGlhbXMCRmQ2ODc3
MiBCcml0dGFueSBJc2xlIFN1aXRlIDMzMwpIb3J0b25iZXJnLCBPUiA5NTg3MSxncmFoYW1qYXNt
aW5AZ21haWwuY29t/5+RoKhotAEQemhhcmRpbmccQnJhbmRvbiBNb3Jlbm8CTUoxODc2NCBPbHNv
biBHYXJkZW5zClJpY2V0b24sIEFTIDIyNjg5JnpjYXN0aWxsb0BnbWFpbC5jb22AsJfj5hfMAQxz
bWNrZWUYRXJpY2EgUm9tZXJvAkZsNDA4MDEgU255ZGVyIEdhcmRlbiBTdWl0ZSA2MzIKTmV3IFNh
bXVlbGxhbmQsIE1QIDU3MjA4JHRhbWFyYTc5QHlhaG9vLmNvbf/vz/etKNoBFG1mcmVkZXJpY2se
S2V2aW4gU3RldmVuc29uAk1kNzgzODUgTWljaGVsbGUgQ3Jvc3Nyb2FkCkVhc3QgSmFtZXNidXJn
aCwgRk0gMjcyODcsYW5kcmV3amVuc2VuQHlhaG9vLmNvbYCQzrehXpQBFGtpbWJlcmx5NjYWTWFy
aWEgV29vZHMCRixVU05WIEJlbGwKRlBPIEFFIDc3NTcxJmplc3VzNDZAaG90bWFpbC5jb23/3/vs
wyK6AQx6bnVuZXoaVGltb3RoeSBLaXJieQJNWDc0NTAgV2FnbmVyIFZpYSBTdWl0ZSA0NDkKTGVl
dmlsbGUsIE5DIDQ4NzEwJmRvdWdsYXM0MUB5YWhvby5jb23/n7vhIrABCGhsZWUQTGVlIFJpY2UC
TVAzMjMgU3BlbmNlciBSaXZlcgpOb3J0aCBBc2hsZXksIE9IIDM5Mzc1MHNoZWxieWdyZWVuZUBo
b3RtYWlsLmNvbYCA9dixJ6gBEmJyb3duYW5uYRpKZW5uaWZlciBSb3dlAkY+VW5pdCA3NjgwIEJv
eCA3NzEzCkRQTyBBRSA4NzQ4MiZyZWJlY2NhOTlAeWFob28uY29tgPClxoA+0AEQa3Jvc2FyaW8g
TmljaG9sYXMgQ2Fycm9sbAJNYDE5NCBFZHdhcmRzIEhvbGxvdwpFYXN0IFN0ZXBoZW5jaGVzdGVy
LCBQUiA0MzI3NyhtYW51ZWw2NUBob3RtYWlsLmNvbYCg/JWIPcYBGGxhdXJhbWF5bmFyZBZFbWls
eSBBbGxlbgJGYDIwNSBGbG9yZXMgTWVhZG93IFN1aXRlIDkwNwpMYWtlIEtldmluLCBISSAxMjQ0
NyB2YnVybnNAZ21haWwuY29tgODIvrNY2gEUZXJpY2Rlbm5pcx5IZWF0aGVyIEphY2tzb24CRmQz
NDUwIE1pZGRsZXRvbiBXYWxrIFN1aXRlIDg1MApMYWtlIEVyaWNhLCBNVCA0MDg0Nyx3aGl0ZW5h
bmN5QGhvdG1haWwuY29tgJDMhvANugEQcXNhbmNoZXoaSm9obiBXaWxsaWFtcwJNVjgzNTIgQWd1
aWxhciBTcXVhcmUKSG93YXJkYm9yb3VnaCwgQ1QgMTYyODAiaXRvcnJlc0BnbWFpbC5jb22A8KzU
vhXIAQxoc21pdGgYQWxsZW4gV2Fsa2VyAk1qODIxMDMgVHJhY3kgR2xlbnMgU3VpdGUgNDI5Ckxh
a2UgS2F0aWVzaGlyZSwgR1UgOTI0MDQiZWhhcnJpc0BnbWFpbC5jb23/75XYzBrCAQ5lZGFuaWVs
GkRvbmFsZCBIZXN0ZXICTVY4NTM0IE1lbGVuZGV6IEdyZWVucwpCZW5uZXR0bW91dGgsIE1IIDY5
OTM5LGpvc2h1YW9icmllbkBnbWFpbC5jb23//8310WC8ARpoaWdnaW5zcm9iZXJ0Gk1hcnkgUmlj
aGFyZHMCRko3NDIgTWlyYW5kYSBMb2NrcwpFYXN0IExpc2EsIFNEIDM5MjgwJnhoYW5zZW5AaG90
bWFpbC5jb22AkMvIzRa+ARZncmFudGhvcnRvbhpNZWxpc3NhIERpeG9uAkZQOTk0MCBCdWNoYW5h
biBUcmFpbApQZXRlcnNzdGFkLCBPUiA5OTg4MyZ2bWl0Y2hlbGxAZ21haWwuY29tgODI1cZdzAEQ
c2hlaWxhMjIiVHJldm9yIFZhbGVuenVlbGECTWA4MDYgTWFzb24gQ2lyY2xlcyBTdWl0ZSAwMjIK
U2FtdWVsdG93biwgQVMgNTYyOTAiaG9sbHk3NEBnbWFpbC5jb22AsP+bsj+sAQxnaW5hMzQgVGlt
b3RoeSBNYXJzaGFsbAJNPjUxNyBDb3ggUnVlCk1leWVyYnVyeSwgSUwgMDc5MDAqbWVsdmluc21p
dGhAeWFob28uY29tgODtm9sR6gEUam9uZXN3YW5kYSZBbGV4aXMgTWFydGluZXogRERTAk12NzE4
MjEgVG9ycmVzIEhhcmJvcnMgU3VpdGUgOTI2Ck5vcnRoIERhdmlkY2hlc3RlciwgTUQgOTkxNjUk
bWZyb3N0QGhvdG1haWwuY29t/9/B8RXMAQx2bWNjb3kcTWljaGFlbCBLZWxsZXkCTVo4NTA2IE1h
cmN1cyBVbmlvbnMKRWFzdCBBbmRyZXdoYXZlbiwgRkwgMTk4NDcyYmxha2VtYXJnYXJldEBob3Rt
YWlsLmNvbf+v69mnKtYBGGhlbnJ5Y29sbGlucxxEYXJsZW5lIE11cnJheQJGZDM5MDMgUmhvZGVz
IFR1cm5waWtlIFN1aXRlIDI4OApCdXJuc2xhbmQsIFdJIDkyMTIzJmplYW5iYXRlc0BnbWFpbC5j
b23/v9iK7yPQARRqZW5uaWZlcjQ4Fkphc29uIFBlcmV6Ak1YNTgxOCBEb3VnaGVydHkgRmllbGRz
ClZhbmVzc2Ftb3V0aCwgTkUgMjU5MDQ2cGhpbGxpcHN0aW1vdGh5QGhvdG1haWwuY29tgOD6uJRK
0AEUZ2FyeXRvcnJlcxxKZXN1cyBSaW9zIEpyLgJNZjMyNjgwIEZlcmd1c29uIFNxdWFyZXMgQXB0
LiAzNTQKSm9obmJ1cmdoLCBDQSA0NjQ2OCJjaW5keTQ4QHlhaG9vLmNvbYCQv+aJDsgBCmtyZWVk
Ik1pY2hhZWwgUm9kcmlndWV6Ak1gOTU5NTUgRXJpYyBSb2FkIEFwdC4gMjA1ClN0ZXZlbmJvcm91
Z2gsIEFaIDc5MDEzJGtyb2FjaEBob3RtYWlsLmNvbYDgv//DMNABGnJpY2hhcmRtYXJ0aW4WRG9u
YWxkIEh1bnQCTVgzMzE0MyBCYWtlciBTaG9yZQpFYXN0IFJvYmVydHZpbGxlLCBDTyA1MjA1NTBo
aW5lc3ZhbmVzc2FAaG90bWFpbC5jb22AwLb+wVXiARplcmlrYXJleW5vbGRzHENhcnJpZSBTaGVs
dG9uAkZoNDgzMDkgTWlzdHkgUGFyayBTdWl0ZSAwMzkKTmV3IFJvYmVydGJ1cmdoLCBOTSA1MjY2
OSx0ZXJyaXdhbGxhY2VAeWFob28uY29tgJCj/NEV0AEadmljdG9yaWFtb29yZR5DaHJpc3R5IEpv
aG5zb24CRloxNjQ0MSBBbmdlbGEgSnVuY3Rpb24KTm9ydGggQ3J5c3RhbCwgT1IgMjU3MzAmbGVl
cmFjaGVsQHlhaG9vLmNvbf+vkaDrHa4BEnN1c2Fucm9zZRhNYXJpYSBQYXJrZXICRk45OTMgUm9i
aW5zb24gU2hvcmVzCkxha2UgTG9yaSwgTkogOTUyNzIeYXNoYXdAZ21haWwuY29tgNCOyZwP1gEQ
c2FyYWR1a2UcVGhvbWFzIFJ1c3NlbGwCTWg1MDA4MCBEYXZpZCBEcml2ZSBBcHQuIDgxOApQb3J0
IFJpY2tleWJ1cnksIEFTIDQwNjk2KmdyaWNoYXJkc29uQHlhaG9vLmNvbYCQhduOPtIBFGJyZW50
c21pdGgaTWljaGFlbCBMb3BlegJNZDY0MyBFcmljYSBTdHJhdmVudWUgU3VpdGUgNzU5Ck5ldyBC
cm9va2UsIElBIDkwNzA3KGtlbGx5ZGF2aWRAZ21haWwuY29tgNChtvEn2AEOdHJhY3k1MxpDaGVs
c2VhIFNpbmdoAkZkNjIyNDcgTWNpbnR5cmUgQ2VudGVyIEFwdC4gNDI5CkZvbGV5cG9ydCwgTk0g
MjMyNzI0dmVsYXpxdWV6Y2FpdGxpbkB5YWhvby5jb23/j9rKuDiyAQ5yYWxwaDI1GExhdXJhIE5l
bHNvbgJGUjE4MjggQ2FzdGlsbG8gUG9pbnRzCkxha2UgTmFuY3ksIE5ZIDk1ODAwInZ3aWxzb25A
eWFob28uY29tgODv/d8zmAESdG9kZGFsbGVuGkpvaG4gTWFyc2hhbGwCTTBVU1MgUnVzc2VsbApG
UE8gQUEgMTgxNjMkZWdvb2R3aW5AeWFob28uY29t/8+izbNsyAEQbmF0aGFuNjMYU3RhY2V5IEhh
cmR5AkZkNDE2NCBNb3JhbiBQYXRoIFN1aXRlIDcwOQpXZXN0IEd3ZW5kb2x5biwgVE4gMDc0ODYk
YWx5c3NhMzZAeWFob28uY29t/5+u4sIk8AEccGFtZWxhcmljaGFyZHMwRHIuIE5hdGhhbmllbCBD
YXJ0ZXIgRFZNAk1iNjcwIFJvbWFuIEdyZWVucyBTdWl0ZSA4NDIKU2ltbW9uc2hhdmVuLCBDQSA4
MzM4OSpza2lubmVyam9obkBnbWFpbC5jb23/75mS4z/SARpzdGFjeWNhbGRlcm9uIExhcnJ5IEhh
cnJpbmd0b24CTVI0NDQ5OCBIaWxsIEdyZWVuClNvdXRoIFBhdHJpY2lhLCBXQSA2NDY2Mi5jYXRo
ZXJpbmU4OEBob3RtYWlsLmNvbf/fzNfDWegBHGNhbWVyb25lbGxpb3R0HFRpbW90aHkgV2Fsa2Vy
Ak1wOTE2NzMgRWRkaWUgU2hvcmVzIFN1aXRlIDczNwpTb3V0aCBNaWNoYWVsc3RhZCwgTkMgMzIz
NTgodnNhbmNoZXpAaG90bWFpbC5jb23/393V82msAQ5kb25uYTY1GERhbmllbCBZYXRlcwJNPlVu
aXQgMjgwOSBCb3ggNDE2NwpEUE8gQVAgMTI3NTIwY2hhZG1jbGF1Z2hsaW5AZ21haWwuY29t//+W
sd5cxgEaamVubmlmZXJhbGxlbhpEb25hbGQgTWlsbGVyAk1INzc0IExvcGV6IFdlbGwKTG9yaWNo
ZXN0ZXIsIEFSIDc5NTk4MmRhdmlkZmF1bGtuZXJAaG90bWFpbC5jb22AgLje4g/UAQ5ob2xseTk2
HlN0ZXZlbiBDYW1wYmVsbAJNYDg4MjQgRG91Z2xhcyBDbHViIFN1aXRlIDg1MwpBbHlzc2FmdXJ0
LCBNSSAyNTEyMjBodW50ZXJzdGV2ZW5AaG90bWFpbC5jb23/z86r+RbKARRrZXZpbmdyZWVuGlNo
YW5lIENoYXBtYW4CTVY4MzMgU3RlZWxlIEV4dGVuc2lvbnMKQ2FicmVyYWJ1cnksIFJJIDY2MDQ0
LmplcmVteWpvaG5zb25AeWFob28uY29tgOCvm4Ms2gEYZ2FyeW1pdGNoZWxsGlJhbHBoIEphY2tz
b24CTWI1MzQ4IFdyaWdodCBXZWxscyBBcHQuIDMxNwpNaWNoZWxsZWZ1cnQsIE5FIDIyNTg0LmJy
aWFuZGVsZW9uQGhvdG1haWwuY29tgKDfzKcp0AEYcGFycmlzaGtheWxhFkpvaG4gRmllbGRzAk1o
MDY0IERhbmllbCBWaXN0YSBBcHQuIDY4NApXZXN0IFJpY2hhcmRzaWRlLCBEQyA2MzM0MSJrZWxs
eTUyQGdtYWlsLmNvbYDwpdDzXOQBHmNhc3RpbGxvY2FtZXJvbhpCcm9va2UgSGFybW9uAkZyMzgz
OTkgQ29jaHJhbiBDb3VydHMgQXB0LiA2ODkKV2VzdCBNaWNoZWxsZXZpZXcsIEFaIDgzMzQzInlj
aGF2ZXpAZ21haWwuY29tgPC078sSwAEOaG9icmllbhZQZXRlciBNZXllcgJNYjEwOCBNZWxpc3Nh
IEJ5cGFzcyBBcHQuIDY2NApQaGlsaXB2aWxsZSwgUkkgNzg1MDUiYW1iZXIyMkBnbWFpbC5jb23/
7/PYvzK8AQxyc21pdGgYRXJpYyBIYXdraW5zAk1eMDM5MTIgSm9uZXMgUGF0aCBTdWl0ZSA5NjQK
RWFzdCBEYXZpZCwgR1UgNTAwMDUiYWFyb243OEBnbWFpbC5jb23/n5G3u23SARZhbGV4YW5kZXI0
MBhUaW5hIFJhbWlyZXoCRmQxMjIgU3VtbWVycyBSYW5jaCBTdWl0ZSAzODkKTm9ydGggUm9iZXJ0
LCBORSA4NDgzNihrZWxseXNhcmFoQGdtYWlsLmNvbf+vopSoD7IBDmxhdXJhNjMaTWFydGluIEty
YW1lcgJNUDkxODIgU3VzYW4gU3RhdGlvbgpNZWxpc3Nhc3RhZCwgS1kgNzY4NzEicXRheWxvckBn
bWFpbC5jb23/j/qpzUbEAQxjYXJsMTkYQW1hbmRhIFBvdHRzAkZkOTg5OTQgQ2xhcmtlIENpcmNs
ZSBBcHQuIDc1NQpNb3JhbGVzcG9ydCwgQ1QgMjcxMTckbmNvY2hyYW5AeWFob28uY29t/9+Q07Bi
tAEOdGFtbXk1OBpKb25hdGhhbiBNYXlvAk1SNTc0NjMgU2ltcHNvbiBXYWxrClJhbWlyZXp2aWxs
ZSwgV1kgMzMwNTUibGF1cmEzMkB5YWhvby5jb22A0IT3gzP2AR5jb2xsaW5zbWljaGVsbGUgSGVh
dGhlciBDYWxsYWhhbgJGajI3MiBGb2xleSBWYWxsZXkgU3VpdGUgMzQyClBvcnQgQ2hhcmxlc3Np
ZGUsIE1PIDExNDQyNnN0ZXBoYW5pZWFybm9sZEBob3RtYWlsLmNvbf+PhaP1FLwBEGZveHRvbnlh
FEpvaG4gUmV5ZXMCTVIyNTA4NSBKYXNtaW5lIExvY2tzCldlc3QgQnJpYW5hLCBBTCAzNTk5MS5t
YXR0aGV3Y29vcGVyQGdtYWlsLmNvbYDAi6zfB74BHHJob25kYXJpY2hhcmRzGEFuZHJldyBNb29y
ZQJNUjI0MSBXaWxsaWFtcyBPcmNoYXJkClN0b2tlc3N0YWQsIElBIDE1NTIzIHdyYW1vc0BnbWFp
bC5jb22A4J/E4g3kARx3aWxsaWFtbmljaG9scxZMYXVyYSBTdG9uZQJGbDY3MjIgQ2hyaXN0b3Bo
ZXIgU3F1YXJlClBvcnQgQ2hyaXN0b3BoZXJ2aWV3LCBQQSA1OTg3Mi5tb3JyaXNtZWxhbmllQGdt
YWlsLmNvbYCg//OiDswBEGFuZHJldzI0FlJvZG5leSBUb2RkAk1wNTk0NjUgQnJlbmRhIFNwdXJz
IEFwdC4gMjE3Cldlc3QgQ2hyaXN0aW5hYnVyeSwgU0QgMzgwMjceYW15MDlAZ21haWwuY29tgNDA
wNA5zgEQa3lsZWJlbGwWR2VvcmdlIFlhbmcCTWA0NjA4MSBIYW5uYWggVHJhaWwgU3VpdGUgNDEx
Ckxha2UgQ29yeSwgQVIgNjIwNTcwbW9saW5hdGF5bG9yQGhvdG1haWwuY29t/6+p7uVN0AEOd2Fu
ZGE5OR5KZXNzaWNhIEhlcnJlcmECRmQ5Njc5IFRob21hcyBWaWxsYWdlIEFwdC4gNjI0Cldlc3Qg
VGF5bG9yLCBLWSA1Njc5OShyb2JlcnQyMUBob3RtYWlsLmNvbf+f093DOsABDnltb2xpbmEcQW5n
ZWxhIFN1bW1lcnMCRl42MjUwIEdyYW50IEdyZWVuIEFwdC4gOTU1Ck5pY29sZW1vdXRoLCBNUCAx
MjU1OSBlcmluMzhAZ21haWwuY29t/8/C545r2AESYWRyaWFuYTAyGERhdmlkIE1vcmdhbgJNajk4
MzE0IFNtaXRoIEZvcmQgU3VpdGUgNTA2ClNvdXRoIFdhcnJlbnNpZGUsIEFMIDE0MTgxLGJyb3du
Y2xheXRvbkBnbWFpbC5jb22AoOqhnAzUARJ3aWxsaWFtNTceRWxpemFiZXRoIFdlbGxzAkZgMDQ4
OSBTdGFjeSBTaG9hbHMgQXB0LiAwMTEKQXJ0aHVybW91dGgsIE1UIDUxNjU5LHJhbWlyZXpiaWxs
eUBnbWFpbC5jb22A4M2s+VW4AQxkbW9yYW4eQXNobGV5IEpvaG5zdG9uAkZUMDUxNTEgVmVyb25p
Y2EgTWlzc2lvbgpBbWJlcnRvd24sIE9IIDAyOTExInl3aWxzb25AZ21haWwuY29t/7+nxL0KxAEc
amFja3Nvbmtlbm5ldGgcTWljaGVhbCBNdXJwaHkCTVIzNjYxIENhcmxzb24gSXNsYW5kClNvdXRo
IERhdmlkLCBGTSAwOTYzNyJhcml2ZXJhQGdtYWlsLmNvbf+fifunTMgBEmJyaWRnZXQ1OBhEYXZp
ZCBNaWxsZXICTWAzNjY4IEhlbGVuIFBsYWlucyBBcHQuIDY5MgpOZXcgQnJpZGdldCwgTkggNjA0
NzUmZnJhbmtob29kQHlhaG9vLmNvbYCQsMexWN4BGGFwcmlsamltZW5lehhHcmVnb3J5IEhvbHQC
TWQ0NTI3IEplZmZyZXkgQ2lyY2xlcwpMYWtlIEVsaXphYmV0aHNoaXJlLCBBSyAxNzgyODJnYXJj
aWFkb3VnbGFzQGhvdG1haWwuY29t/4+l/9xbxAEgbWFyZ2FyZXRtYXJ0aW5lehxDbGlmZm9yZCBC
YWtlcgJNTDI5MTEzIFRob21hcyBSb3V0ZQpNYXlvaGF2ZW4sIE1EIDYwMjE3JGpwZXJraW5zQGdt
YWlsLmNvbYDwwNGPG+IBEGxlZHdhcmRzJk1ycy4gSnVsaWUgQnJhZHNoYXcCRmg1Njg3IFdpbnRl
cnMgSG9sbG93IEFwdC4gMzQ0Cldlc3QgQmVuamFtaW4sIFJJIDcxMDY4LG9kb25uZWxsdG9ueUB5
YWhvby5jb23//6LetQPIAQ5sYXVyYTI4GERvbm5hIE1hZGRlbgJGYDE5MjAgTmljaG9scyBEYWxl
IFN1aXRlIDI2NApCcm93bmJ1cmdoLCBNVCA3OTM1OCp0aW1vdGh5aGlsbEBnbWFpbC5jb23//4fn
jGTgARZodWZmbWF1cmVlbiBBbnRob255IE1hcnRpbmV6Ak1gNDcyNCBNZWx2aW4gTG9vcCBTdWl0
ZSAxNzAKRmlubGV5dmlsbGUsIE5KIDI5NjExMmNocmlzdG9waGVyOTdAaG90bWFpbC5jb23/v+3C
tVnMARh3YWx0ZXJzZGF2aWQkU2FtYW50aGEgTGV3aXMgRERTAkZOOTQ1MiBPc2Jvcm5lIExpZ2h0
cwpFdmVseW50b24sIE1BIDc0MjMzKnJlYmVrYWgzMkBob3RtYWlsLmNvbYCAkq+yIdoBGGFsbGVu
amFzbWluZR5Kb3NodWEgV2lsbGlhbXMCTV4wMTEzNyBMaXNhIElzbGFuZHMgU3VpdGUgMDE3CkJp
bGxiZXJnLCBPUiAxNzI4MC5zdXNhbm1hcnRpbmV6QGdtYWlsLmNvbf+/tYXZbcIBHGtyaXN0aW5i
YXJuZXR0HkRlYm9yYWggV2FsbGFjZQJGSjc3NzM3IEhpY2tzIExvYWYKQ2FycmlldG93biwgREUg
MTQ2MjYmcGF0cmljazgyQGdtYWlsLmNvbYCQ/fDUErgBFmhhcnJpc3NhcmFoHEFudGhvbnkgSGVu
c29uAk1ONDMzNTQgQXNobGV5IExhbmQKV2VzdCBKb3NodWEsIE5ZIDg3NjExInh3YXRzb25AZ21h
aWwuY29t//+ghiTWAR5zYW11ZWxyb2RyaWd1ZXoYUm9iZXJ0IEFkYW1zAk1mMTY0IEhheWVzIEV4
dGVuc2lvbnMgQXB0LiAyNTEKVG9ycmVzbW91dGgsIERFIDc3NDUwImRjaGF2ZXpAZ21haWwuY29t
gNCPyctOsAEMbGhlbnJ5GEVyaW4gQmFyYWphcwJGUDA4OTggVGF5bG9yIFJhZGlhbApHYXJkbmVy
c3RhZCwgT1IgNDg5NDIkaWZyZWVtYW5AeWFob28uY29tgJD7w7xSxgEYY2xhcmttYXR0aGV3Gkp1
bGllIENvbGxpbnMCRlQ4OTEgRGVhbiBDbHViCldlc3QgR2VvcmdlYm9yb3VnaCwgUFIgODkxMDEo
d2VzbGV5NzlAaG90bWFpbC5jb22AkKKnnBm6ARRnb21lemVkd2luHk5pY2hvbGFzIFRob21hcwJN
UDk4NiBLYXRocnluIEN1cnZlClNvdXRoIExhdXJlbiwgS1MgMDgwNTYgeWxld2lzQHlhaG9vLmNv
bYCgrsuvH6oBDGpzaG9ydBpCZWNreSBKb2huc29uAkZKNDk5MCBDb2xpbiBTcHVyCkpvaG5zb25i
ZXJnLCBJTiAwNTU2OCJhbWFsb25lQGdtYWlsLmNvbf/PoIy1bsgBFmRhdmlkZ3JpbWVzFEphbmV0
IExhcmECRl42NTU5NiBHYXJjaWEgRmllbGRzClBvcnQgRGF2aWRjaGVzdGVyLCBOViAwNjYzOCht
ZXJlZGl0aDEwQHlhaG9vLmNvbYDQ7umHAdYBGGJhbGxhcmRzdXNhbhpTY290dCBGcmF6aWVyAk1g
Njg1IEplbm55IFNob3JlIEFwdC4gMTczClN0YWNleWNoZXN0ZXIsIEtTIDA0NDcyLGxhdXJhZ2Fy
ZG5lckB5YWhvby5jb22A8JOKrja2ARxjaGVsc2VhYWd1aXJyZRRHYXJ5IFd5YXR0Ak1KOTkxIEJl
bmRlciBDb3ZlcwpFYXN0IE1pc3R5LCBNRCA0MjE5MSR5cmljaGFyZEBnbWFpbC5jb22AwI6e4Bbg
ASJlbGl6YWJldGhyb2JpbnNvbhhEYXZpZCBMYXJzb24CTWg3MTU1MCBKZW5uaWZlciBMaWdodHMg
U3VpdGUgMzY3CkdhcmNpYWZ1cnQsIE1BIDA3Mjg0JnNhcmFoNTVAaG90bWFpbC5jb22A0LrFuxbW
ARhqYWNxdWVsaW5lMjcSTWFyeSBTb3RvAkZkMDA2OSBIZXJuYW5kZXogQnJvb2sgQXB0LiA1MDcK
SGFyb2xkcG9ydCwgTkQgMDk0NjAwYmFsZHdpbmtlbGx5QGhvdG1haWwuY29t///nlJg8xAEKcnJv
c2UYQ29keSBTcGVuY2VyAk1kMDU3MTMgTWlsbGVyIFVuaW9uIEFwdC4gOTQ3ClphY2hhcnltb3V0
aCwgTUEgMDIwNjkmZHJvYmluc29uQHlhaG9vLmNvbf/voKqUNgAADGJpbmFyeQA=
headers:
Accept:
- application/octet-stream
Content-type:
- application/octet-stream
authorization:
- DUMMY
method: POST
uri: http://172.31.33.30:9191/insert/records
response:
body:
string: !!binary |
BE9LAC5pbnNlcnRfcmVjb3Jkc19yZXNwb25zZQoAyAEAAAA=
headers:
Access-Control-Allow-Origin:
- '*'
Access-Control-Expose-Headers:
- x-request-time-secs
Connection:
- Close
Content-Type:
- application/octet-stream
Date:
- Thu, 22 Feb 2024 00:26:35 GMT
Strict-Transport-Security:
- max-age=31536000; includeSubDomains
X-Content-Type-Options:
- nosniff
X-Frame-Options:
- SAMEORIGIN
X-XSS-Protection:
- 1; mode=block
x-request-time-secs:
- '0.00419'
status:
code: 200
message: OK
- request:
body: "$demo.test_profiles\x02\x12get_sizes\btrue\0"
headers:
Accept:
- application/octet-stream
Content-type:
- application/octet-stream
authorization:
- DUMMY
method: POST
uri: http://172.31.33.30:9191/show/table
response:
body:
string: !!binary |
BE9LACZzaG93X3RhYmxlX3Jlc3BvbnNlihYkZGVtby50ZXN0X3Byb2ZpbGVzAiRkZW1vLnRlc3Rf
cHJvZmlsZXMAAgAAAigxODI1Nzg2OTE2MzI3NDQyNTE2MwAC8AN7InR5cGUiOiJyZWNvcmQiLCJu
YW1lIjoidHlwZV9uYW1lIiwiZmllbGRzIjpbeyJuYW1lIjoidXNlcm5hbWUiLCJ0eXBlIjoic3Ry
aW5nIn0seyJuYW1lIjoibmFtZSIsInR5cGUiOiJzdHJpbmcifSx7Im5hbWUiOiJzZXgiLCJ0eXBl
Ijoic3RyaW5nIn0seyJuYW1lIjoiYWRkcmVzcyIsInR5cGUiOiJzdHJpbmcifSx7Im5hbWUiOiJt
YWlsIiwidHlwZSI6InN0cmluZyJ9LHsibmFtZSI6ImJpcnRoZGF0ZSIsInR5cGUiOiJsb25nIn1d
fQACAAACDA5hZGRyZXNzBAhkYXRhDGNoYXI2NAASYmlydGhkYXRlBAhkYXRhEnRpbWVzdGFtcAAI
bWFpbAQIZGF0YQxjaGFyMzIACG5hbWUECGRhdGEMY2hhcjMyAAZzZXgECGRhdGEKY2hhcjEAEHVz
ZXJuYW1lBAhkYXRhDGNoYXIzMgAAAAI6ImF0dHJpYnV0ZV9pbmRleGVzACBjb2xsZWN0aW9uX25h
bWVzCGRlbW8wZGF0YXNvdXJjZV9zdWJzY3JpcHRpb25zABhmb3JlaWduX2tleXMAJGdsb2JhbF9h
Y2Nlc3NfbW9kZRRyZWFkX3dyaXRlLGlzX2F1dG9tYXRpY19wYXJ0aXRpb24KZmFsc2UQaXNfZGly
dHkAImlzX3ZpZXdfcGVyc2lzdGVkACJsYXN0X3JlZnJlc2hfdGltZQAob3duZXJfcmVzb3VyY2Vf
Z3JvdXA+a2luZXRpY2FfZGVmYXVsdF9yZXNvdXJjZV9ncm91cCpwYXJ0aXRpb25fZGVmaW5pdGlv
bnMANHBhcnRpdGlvbl9kZWZpbml0aW9uc19qc29uBHt9HHBhcnRpdGlvbl9rZXlzABxwYXJ0aXRp
b25fdHlwZQhOT05FIHByaW1hcnlfa2V5X3R5cGUMbWVtb3J5GHJlY29yZF9ieXRlcwYxNjkccmVm
cmVzaF9tZXRob2QAJnJlbWFpbmluZ190YWJsZV90dGwELTEicmVxdWVzdF9hdnJvX2pzb27iA3si
dGFibGVfbmFtZSI6ImRlbW8udGVzdF9wcm9maWxlcyIsInR5cGVfaWQiOiIxODI1Nzg2OTE2MzI3
NDQyNTE2MyIsIm9wdGlvbnMiOnsiZGlzYWxsb3dfaG9tb2dlbmVvdXNfdGFibGVzIjoiZmFsc2Ui
LCJmb3JlaWduX2tleXMiOiIiLCJpc19jb2xsZWN0aW9uIjoiZmFsc2UiLCJpc19yZXBsaWNhdGVk
IjoiZmFsc2UiLCJub19lcnJvcl9pZl9leGlzdHMiOiJmYWxzZSIsInByaW1hcnlfa2V5X3R5cGUi
OiJtZW1vcnkifX0icmVxdWVzdF9hdnJvX3R5cGUQaXNfdGFibGUWc2NoZW1hX25hbWUIZGVtbyZz
dHJhdGVneV9kZWZpbml0aW9uUiggKCBWUkFNIDIsIFJBTSA1LCBESVNLMCA1LCBQRVJTSVNUIDUg
KSApGnRhYmxlX21vbml0b3IEe30SdGFibGVfdHRsBC0xFnRvdGFsX2J5dGVzAjA4dXNlcl9jaHVu
a19jb2x1bW5fbWF4X21lbW9yeRI1MTIwMDAwMDAqdXNlcl9jaHVua19tYXhfbWVtb3J5FDgxOTIw
MDAwMDAedXNlcl9jaHVua19zaXplDjgwMDAwMDAedmlld190YWJsZV9uYW1lAAAAAsgBAALIAQAC
AAAAAAAAAAAAyAHIAQAA
headers:
Access-Control-Allow-Origin:
- '*'
Access-Control-Expose-Headers:
- x-request-time-secs
Connection:
- Close
Content-Type:
- application/octet-stream
Date:
- Thu, 22 Feb 2024 00:26:36 GMT
Strict-Transport-Security:
- max-age=31536000; includeSubDomains
X-Content-Type-Options:
- nosniff
X-Frame-Options:
- SAMEORIGIN
X-XSS-Protection:
- 1; mode=block
x-request-time-secs:
- '0.00293'
status:
code: 200
message: OK
- request:
body: !!binary |
6AUKICAgICAgICBDUkVBVEUgT1IgUkVQTEFDRSBDT05URVhUIGRlbW8udGVzdF9sbG1fY3R4CiAg
ICAgICAgKAogICAgICAgICAgICBUQUJMRSA9IGRlbW8udGVzdF9wcm9maWxlcwogICAgICAgICAg
ICBDT01NRU5UID0gJ0NvbnRhaW5zIHVzZXIgcHJvZmlsZXMuJwogICAgICAgICksCiAgICAgICAg
KAogICAgICAgICAgICBTQU1QTEVTID0gKAogICAgICAgICAgICAnSG93IG1hbnkgbWFsZSB1c2Vy
cyBhcmUgdGhlcmU/JyA9IAogICAgICAgICAgICAnc2VsZWN0IGNvdW50KDEpIGFzIG51bV91c2Vy
cwogICAgICAgICAgICBmcm9tIGRlbW8udGVzdF9wcm9maWxlcwogICAgICAgICAgICB3aGVyZSBz
ZXggPSAnJ00nJzsnKQogICAgICAgICkKICAgICAgICAAnZwBDGJpbmFyeQAAAA==
headers:
Accept:
- application/octet-stream
Content-type:
- application/octet-stream
authorization:
- DUMMY
method: POST
uri: http://172.31.33.30:9191/execute/sql
response:
body:
string: !!binary |
BE9LAChleGVjdXRlX3NxbF9yZXNwb25zZeoFAvYDeyJuYW1lIjoiZ2VuZXJpY19yZXNwb25zZSIs
InR5cGUiOiJyZWNvcmQiLCJmaWVsZHMiOlt7Im5hbWUiOiJjb2x1bW5fMSIsInR5cGUiOnsidHlw
ZSI6ImFycmF5IiwiaXRlbXMiOiJzdHJpbmcifX0seyJuYW1lIjoiY29sdW1uX2hlYWRlcnMiLCJ0
eXBlIjp7InR5cGUiOiJhcnJheSIsIml0ZW1zIjoic3RyaW5nIn19LHsibmFtZSI6ImNvbHVtbl9k
YXRhdHlwZXMiLCJ0eXBlIjp7InR5cGUiOiJhcnJheSIsIml0ZW1zIjoic3RyaW5nIn19XX0kAAIK
ZHVtbXkAAgxzdHJpbmcAAAEAAAggWC1LaW5ldGljYS1Hcm91cAZEREwKY291bnQCMBpsYXN0X2Vu
ZHBvaW50MC9jcmVhdGUvY29udGV4dC9pbnRlcm5hbC50b3RhbF9udW1iZXJfb2ZfcmVjb3JkcwIw
AAA=
headers:
Access-Control-Allow-Origin:
- '*'
Access-Control-Expose-Headers:
- x-request-time-secs
Connection:
- Close
Content-Type:
- application/octet-stream
Date:
- Thu, 22 Feb 2024 00:26:36 GMT
Strict-Transport-Security:
- max-age=31536000; includeSubDomains
X-Content-Type-Options:
- nosniff
X-Frame-Options:
- SAMEORIGIN
X-Kinetica-Group:
- DDL
X-XSS-Protection:
- 1; mode=block
x-request-time-secs:
- '0.01522'
status:
code: 200
message: OK
version: 1
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models | lc_public_repos/langchain/libs/community/tests/integration_tests/chat_models/cassettes/TestChatKinetica.test_full_chain.yaml | interactions:
- request:
body: "\0"
headers:
Accept:
- application/octet-stream
Content-type:
- application/octet-stream
authorization:
- DUMMY
method: POST
uri: http://172.31.33.30:9191/show/system/status
response:
body:
string: !!binary |
BE9LADZzaG93X3N5c3RlbV9zdGF0dXNfcmVzcG9uc2X8ugEUCmdyYXBovAl7ImNvdW50Ijo4LCJz
dGF0dXMiOlt7InZlcnNpb24iOjM5LCJzZXJ2ZXJfaWQiOjAsImhvc3RfaWQiOiIxNzIuMzEuMzMu
MzAiLCJzdGF0dXMiOiJydW5uaW5nIn0seyJ2ZXJzaW9uIjozOSwic2VydmVyX2lkIjoxLCJob3N0
X2lkIjoiMTcyLjMxLjMzLjMxIiwic3RhdHVzIjoicnVubmluZyJ9LHsidmVyc2lvbiI6MzksInNl
cnZlcl9pZCI6MiwiaG9zdF9pZCI6IjE3Mi4zMS4zMy4zMiIsInN0YXR1cyI6InJ1bm5pbmcifSx7
InZlcnNpb24iOjM5LCJzZXJ2ZXJfaWQiOjMsImhvc3RfaWQiOiIxNzIuMzEuMzMuMzMiLCJzdGF0
dXMiOiJydW5uaW5nIn0seyJ2ZXJzaW9uIjozOSwic2VydmVyX2lkIjo0LCJob3N0X2lkIjoiMTcy
LjMxLjMzLjM0Iiwic3RhdHVzIjoicnVubmluZyJ9LHsidmVyc2lvbiI6MzksInNlcnZlcl9pZCI6
NSwiaG9zdF9pZCI6IjE3Mi4zMS4zMy4zNSIsInN0YXR1cyI6InJ1bm5pbmcifSx7InZlcnNpb24i
OjM5LCJzZXJ2ZXJfaWQiOjYsImhvc3RfaWQiOiIxNzIuMzEuMzMuMzYiLCJzdGF0dXMiOiJydW5u
aW5nIn0seyJ2ZXJzaW9uIjozOSwic2VydmVyX2lkIjo3LCJob3N0X2lkIjoiMTcyLjMxLjMzLjM3
Iiwic3RhdHVzIjoicnVubmluZyJ9XX0eaGFfY2x1c3Rlcl9pbmZvzAN7ImhhX3JhbmtzX2luZm8i
Olt7InByaXZhdGVfdXJsIjoiaHR0cDovLzE3Mi4zMS4zMy4zMDo5MTkxIiwicHVibGljX3VybCI6
Imh0dHA6Ly8xNzIuMzEuMzMuMzA6OTE5MSIsImhhX3VybF9vdmVycmlkZSI6Imh0dHA6Ly8xNzIu
MzEuMzMuMzA6OTE5MSIsImFsdGVybmF0ZV91cmxzIjpbImh0dHA6Ly8xNzIuMzEuMzMuMzA6OTE5
MSJdfV0sInJlYWR5Ijp0cnVlLCJuYW1lIjoicHJvZHVjdGlvbi1uZXcifQpob3N0c/hLeyJjb3Vu
dCI6OCwic3RhdHVzIjpbeyJ2ZXJzaW9uIjoxMzMsImhvc3RfbnVtYmVyIjowLCJpZCI6IjE3Mi4z
MS4zMy4zMCIsImhvc3RuYW1lIjoiMzAwLTMwMy11MzAtdjEwMCIsInN0YXR1cyI6InJ1bm5pbmci
LCJodHRwZF9zdGF0dXMiOiJkaXNhYmxlZCIsInN0YXRzX3N0YXR1cyI6InJ1bm5pbmciLCJtbF9z
dGF0dXMiOiJkaXNhYmxlZCIsInF1ZXJ5X3BsYW5uZXJfc3RhdHVzIjoicnVubmluZyIsInJldmVh
bF9zdGF0dXMiOiJydW5uaW5nIiwidG90YWxfbWVtb3J5Ijo4MTAyMDEyNjAwMzIsInN0YXJ0X3Rp
bWUiOjE3MDgxMDM0NTIsIm5ldHdvcmtfaXBzIjpbIjE3Mi4zMS4zMy4zMCJdLCJncHVfaWRzIjpb
IkdQVS1hMDdjMGU0OC05NDhhLTQwNWMtNDNiNy03Mzc0ZDJkZGMwOGMiLCJHUFUtNTNlMWI3YjQt
NjhiOC1hMjhjLTEwZjMtZGI2YTA1OTdmYmI0IiwiR1BVLTQxZmY5MWZiLWVjYjktMGE1Yi1kNDdj
LWQ1YmI3ZWYxMDM4YSIsIkdQVS1hOTQwYjEyNi1iMWE0LTlmMDctNDRlMS02MzZjMzc5ODllY2Yi
XSwiYWNjZXB0c19mYWlsb3ZlciI6ImZhbHNlIiwiaG9zdF9yb2xlIjoibGVhZGVyIiwiaG9zdF90
ZXJtIjowLCJob3N0X2VsZWN0aW9uX3N0YXR1cyI6ImxlYWRlcl9lbGVjdGVkIn0seyJ2ZXJzaW9u
IjoxMTMsImhvc3RfbnVtYmVyIjoxLCJpZCI6IjE3Mi4zMS4zMy4zMSIsImhvc3RuYW1lIjoiMzAw
LTMwMy11MzEtdjEwMCIsInN0YXR1cyI6InJ1bm5pbmciLCJodHRwZF9zdGF0dXMiOiJkaXNhYmxl
ZCIsInN0YXRzX3N0YXR1cyI6InJ1bm5pbmciLCJtbF9zdGF0dXMiOiJkaXNhYmxlZCIsInF1ZXJ5
X3BsYW5uZXJfc3RhdHVzIjoic3RvcHBlZCIsInJldmVhbF9zdGF0dXMiOiJzdG9wcGVkIiwidG90
YWxfbWVtb3J5Ijo4MTAyMDEyMzU0NTYsInN0YXJ0X3RpbWUiOjE3MDgxMDM0NTIsIm5ldHdvcmtf
aXBzIjpbIjE3Mi4zMS4zMy4zMSJdLCJncHVfaWRzIjpbIkdQVS0xMzJmMWRkYi1hNWY0LTIwZDMt
MTEyMi02ZDM1OTlhZmNmMWQiLCJHUFUtM2JjN2JmY2QtOTVhOS1hNjI2LThlOGYtMzdiOTcxOWFh
OWRkIiwiR1BVLWZlMTBhZDNkLTg4M2QtYTU5MC1kNDA1LWUwYTU2OTNiMGFmMCIsIkdQVS02Yjgy
OTY5OS0wYjRjLTEyZjAtMGMyOC04Y2Y0NmMyNGMxODUiXSwiYWNjZXB0c19mYWlsb3ZlciI6ImZh
bHNlIiwiaG9zdF9yb2xlIjoiZm9sbG93ZXIiLCJob3N0X3Rlcm0iOjAsImhvc3RfZWxlY3Rpb25f
c3RhdHVzIjoibGVhZGVyX2VsZWN0ZWQifSx7InZlcnNpb24iOjExMywiaG9zdF9udW1iZXIiOjIs
ImlkIjoiMTcyLjMxLjMzLjMyIiwiaG9zdG5hbWUiOiIzMDAtMzAzLXUzMi12MTAwIiwic3RhdHVz
IjoicnVubmluZyIsImh0dHBkX3N0YXR1cyI6ImRpc2FibGVkIiwic3RhdHNfc3RhdHVzIjoicnVu
bmluZyIsIm1sX3N0YXR1cyI6ImRpc2FibGVkIiwicXVlcnlfcGxhbm5lcl9zdGF0dXMiOiJzdG9w
cGVkIiwicmV2ZWFsX3N0YXR1cyI6InN0b3BwZWQiLCJ0b3RhbF9tZW1vcnkiOjgxMDIwMTIzMTM2
MCwic3RhcnRfdGltZSI6MTcwODEwMzQ1MiwibmV0d29ya19pcHMiOlsiMTcyLjMxLjMzLjMyIl0s
ImdwdV9pZHMiOlsiR1BVLTA5MGM0NDMwLWRiYmUtMWUxYS03ZjdmLWExODI3ODNhZDIzMSIsIkdQ
VS1iZTE0YjVjZS1iNDExLTQ4Y2EtYTlmZi01YTA2YzdhNmYzOTgiLCJHUFUtNGIxYTU2ODgtMGU4
Yy1jYzk0LTgzM2ItYzJmMzllOTk1M2I4IiwiR1BVLTEyZWJhNDYzLTgzMmUtMTA4Yi1lY2IyLTVj
OWFmOGRhNjE2NCJdLCJhY2NlcHRzX2ZhaWxvdmVyIjoiZmFsc2UiLCJob3N0X3JvbGUiOiJmb2xs
b3dlciIsImhvc3RfdGVybSI6MCwiaG9zdF9lbGVjdGlvbl9zdGF0dXMiOiJsZWFkZXJfZWxlY3Rl
ZCJ9LHsidmVyc2lvbiI6MTEzLCJob3N0X251bWJlciI6MywiaWQiOiIxNzIuMzEuMzMuMzMiLCJo
b3N0bmFtZSI6IjMwMC0zMDMtdTMzLXYxMDAiLCJzdGF0dXMiOiJydW5uaW5nIiwiaHR0cGRfc3Rh
dHVzIjoiZGlzYWJsZWQiLCJzdGF0c19zdGF0dXMiOiJydW5uaW5nIiwibWxfc3RhdHVzIjoiZGlz
YWJsZWQiLCJxdWVyeV9wbGFubmVyX3N0YXR1cyI6InN0b3BwZWQiLCJyZXZlYWxfc3RhdHVzIjoi
c3RvcHBlZCIsInRvdGFsX21lbW9yeSI6ODEwMjAxMjM5NTUyLCJzdGFydF90aW1lIjoxNzA4MTAz
NDUyLCJuZXR3b3JrX2lwcyI6WyIxNzIuMzEuMzMuMzMiXSwiZ3B1X2lkcyI6WyJHUFUtMjYzMzFh
MDctMTc1Ni1mMDY2LTFlNWEtMzc1M2Y1ZTViYzc4IiwiR1BVLWE0MWFhMTg3LTQ1NmQtNjBiMy04
ZmM5LWI4YjMzZWFlMjFiYyIsIkdQVS01NWRlZjYxOS0wMTE2LWViZjctMzMwMy03ZDkzMmRmYzcw
ZmYiLCJHUFUtNGM5YWYzODgtYjlmYi03MWQ5LWZiNDUtODMwYTM4MTIwMGQzIl0sImFjY2VwdHNf
ZmFpbG92ZXIiOiJmYWxzZSIsImhvc3Rfcm9sZSI6ImZvbGxvd2VyIiwiaG9zdF90ZXJtIjowLCJo
b3N0X2VsZWN0aW9uX3N0YXR1cyI6ImxlYWRlcl9lbGVjdGVkIn0seyJ2ZXJzaW9uIjoxMTMsImhv
c3RfbnVtYmVyIjo0LCJpZCI6IjE3Mi4zMS4zMy4zNCIsImhvc3RuYW1lIjoiMzAwLTMwMy11MzQt
djEwMCIsInN0YXR1cyI6InJ1bm5pbmciLCJodHRwZF9zdGF0dXMiOiJkaXNhYmxlZCIsInN0YXRz
X3N0YXR1cyI6InJ1bm5pbmciLCJtbF9zdGF0dXMiOiJkaXNhYmxlZCIsInF1ZXJ5X3BsYW5uZXJf
c3RhdHVzIjoic3RvcHBlZCIsInJldmVhbF9zdGF0dXMiOiJzdG9wcGVkIiwidG90YWxfbWVtb3J5
Ijo4MTAyMDEyMTA4ODAsInN0YXJ0X3RpbWUiOjE3MDgxMDM0NTIsIm5ldHdvcmtfaXBzIjpbIjE3
Mi4zMS4zMy4zNCJdLCJncHVfaWRzIjpbIkdQVS02OGUwOWNmYy1mOWE0LTJhMTQtNTdhNC05NDgz
YjkxYzJkOWEiLCJHUFUtM2RmMjE2ZTgtZmU3NC0wNDdhLTk1YWMtNzJlMmNiZWNiNTIyIiwiR1BV
LTE0ZDQ0Yjk4LWIwNDItY2I4MS0xZGQzLTIwZDRmNjljODljYSIsIkdQVS0zNDA2NzMwYi1iZWFk
LWM1MGEtNDZlYi1lMGEyYzJiZjZlNzYiXSwiYWNjZXB0c19mYWlsb3ZlciI6ImZhbHNlIiwiaG9z
dF9yb2xlIjoiZm9sbG93ZXIiLCJob3N0X3Rlcm0iOjAsImhvc3RfZWxlY3Rpb25fc3RhdHVzIjoi
bGVhZGVyX2VsZWN0ZWQifSx7InZlcnNpb24iOjExMywiaG9zdF9udW1iZXIiOjUsImlkIjoiMTcy
LjMxLjMzLjM1IiwiaG9zdG5hbWUiOiIzMDAtMzAzLXUzNS12MTAwIiwic3RhdHVzIjoicnVubmlu
ZyIsImh0dHBkX3N0YXR1cyI6ImRpc2FibGVkIiwic3RhdHNfc3RhdHVzIjoicnVubmluZyIsIm1s
X3N0YXR1cyI6ImRpc2FibGVkIiwicXVlcnlfcGxhbm5lcl9zdGF0dXMiOiJzdG9wcGVkIiwicmV2
ZWFsX3N0YXR1cyI6InN0b3BwZWQiLCJ0b3RhbF9tZW1vcnkiOjgxMDIwMTI0Nzc0NCwic3RhcnRf
dGltZSI6MTcwODEwMzQ1MiwibmV0d29ya19pcHMiOlsiMTcyLjMxLjMzLjM1Il0sImdwdV9pZHMi
OlsiR1BVLWY3MzAyYWJjLTllYWEtMTRjOS1mNDI2LTE2M2RmM2RhOGMyNiIsIkdQVS03N2RkN2Q0
OC1mOTgwLWZkMDYtNzIyYy0xYzViOTMyMTgyMDMiLCJHUFUtNTUyYjIwYTUtNTdlNi00OTg2LWJl
MmItMmIzNzhmZDRiY2FhIiwiR1BVLTYzMDUzMTYyLTMwN2YtNTVjNS1hOTc0LTU4ZGZlODQzNDJi
MiJdLCJhY2NlcHRzX2ZhaWxvdmVyIjoiZmFsc2UiLCJob3N0X3JvbGUiOiJmb2xsb3dlciIsImhv
c3RfdGVybSI6MCwiaG9zdF9lbGVjdGlvbl9zdGF0dXMiOiJsZWFkZXJfZWxlY3RlZCJ9LHsidmVy
c2lvbiI6MTEzLCJob3N0X251bWJlciI6NiwiaWQiOiIxNzIuMzEuMzMuMzYiLCJob3N0bmFtZSI6
IjMwMC0zMDMtdTM2LXYxMDAiLCJzdGF0dXMiOiJydW5uaW5nIiwiaHR0cGRfc3RhdHVzIjoiZGlz
YWJsZWQiLCJzdGF0c19zdGF0dXMiOiJydW5uaW5nIiwibWxfc3RhdHVzIjoiZGlzYWJsZWQiLCJx
dWVyeV9wbGFubmVyX3N0YXR1cyI6InN0b3BwZWQiLCJyZXZlYWxfc3RhdHVzIjoic3RvcHBlZCIs
InRvdGFsX21lbW9yeSI6ODEwMjAxMjIzMTY4LCJzdGFydF90aW1lIjoxNzA4MTAzNDUyLCJuZXR3
b3JrX2lwcyI6WyIxNzIuMzEuMzMuMzYiXSwiZ3B1X2lkcyI6WyJHUFUtOGU5ZDRhZGItYzYxMS04
MmYwLTNmZTQtZjFmMmMzZTZhNDRmIiwiR1BVLTRjNmFiYWM5LTc0ZWMtZjc4Yy1mYWE5LTQ3NDZk
MDA1N2FiOCIsIkdQVS05NjI1YTc4Yy1lMmFlLTdmMDktOWNjZi1lZTA0OTk1MjYzMTAiLCJHUFUt
YWI3NTRhM2MtNjA4Ni1iYjUxLWU5NGEtM2NmNGExNDkwNWJhIl0sImFjY2VwdHNfZmFpbG92ZXIi
OiJmYWxzZSIsImhvc3Rfcm9sZSI6ImZvbGxvd2VyIiwiaG9zdF90ZXJtIjowLCJob3N0X2VsZWN0
aW9uX3N0YXR1cyI6ImxlYWRlcl9lbGVjdGVkIn0seyJ2ZXJzaW9uIjoxMTMsImhvc3RfbnVtYmVy
Ijo3LCJpZCI6IjE3Mi4zMS4zMy4zNyIsImhvc3RuYW1lIjoiMzAwLTMwMy11MzctdjEwMCIsInN0
YXR1cyI6InJ1bm5pbmciLCJodHRwZF9zdGF0dXMiOiJkaXNhYmxlZCIsInN0YXRzX3N0YXR1cyI6
InJ1bm5pbmciLCJtbF9zdGF0dXMiOiJkaXNhYmxlZCIsInF1ZXJ5X3BsYW5uZXJfc3RhdHVzIjoi
c3RvcHBlZCIsInJldmVhbF9zdGF0dXMiOiJzdG9wcGVkIiwidG90YWxfbWVtb3J5Ijo4MTAyMDEy
MzU0NTYsInN0YXJ0X3RpbWUiOjE3MDgxMDM0NTIsIm5ldHdvcmtfaXBzIjpbIjE3Mi4zMS4zMy4z
NyJdLCJncHVfaWRzIjpbIkdQVS1lYTE4ZDM4OC1lMmIzLTQyODMtYTZiNS1hMzIzYTQ4NzI1YTki
LCJHUFUtZGNkODRlZmItOTRjYS1iNDk3LThjMTUtN2EzMjY5NDBjMWViIiwiR1BVLTkxNjAxYTVl
LTk3M2YtZDFlNC02ZTFkLWY2NTUyOTRkMzQ2MCIsIkdQVS0wN2ZhNGRiNi05ZTU1LWI2MWYtOTA5
YS04NWM0ZDFiZWIwODgiXSwiYWNjZXB0c19mYWlsb3ZlciI6ImZhbHNlIiwiaG9zdF9yb2xlIjoi
Zm9sbG93ZXIiLCJob3N0X3Rlcm0iOjAsImhvc3RfZWxlY3Rpb25fc3RhdHVzIjoibGVhZGVyX2Vs
ZWN0ZWQifV19Fmh0dHBfc2VydmVyoAN7ImNvbm5lY3Rpb25zIjp7ImN1cnJlbnQiOjEsIm1heF9j
b25jdXJyZW50IjoxMTYsInF1ZXVlZCI6MCwibWF4X3F1ZXVlZF9hbGxvd2VkIjo2NTUzNiwidG90
YWwiOjMyNTkwMiwicmVmdXNlZCI6MCwidGhyZWFkcyI6Mn0sInRocmVhZHMiOnsidXNlZCI6Miwi
Y2FwYWNpdHkiOjUxMiwiYWxsb2NhdGVkIjo4LCJhdmFpbGFibGUiOjUxMCwic3RhY2tfc2l6ZSI6
MH19FG1pZ3JhdGlvbnMueyJjb3VudCI6MCwic3RhdHVzIjpbXX0KcmFua3PgUnsiY291bnQiOjks
InN0YXR1cyI6W3sidmVyc2lvbiI6MTE4LCJyYW5rIjowLCJyYW5rX2lkIjoiMCA6IDE3Mi4zMS4z
My4zMCA6IDMyNzc0MDYiLCJyYW5rX21vZGUiOiJydW4iLCJyYW5rX3N0YXR1cyI6InJ1bm5pbmci
LCJyZW1vdmFsX3N0YXR1cyI6Im5vbmUiLCJhcHBfdmVyc2lvbiI6IjcuMi4wLjEuMjAyNDAyMTQy
MTA5MDYiLCJwaWQiOjMyNzc0MDYsInN0YXJ0X3RpbWUiOjE3MDg0NDM2NTYsInN0YXJ0X3RpbWVf
c3RyIjoiVHVlIEZlYiAyMCAxNTo0MDo1NiAyMDI0Iiwic3RhcnRfY291bnQiOjEsImFjY2VwdGlu
Z19qb2JzIjp0cnVlLCJuZXdfcmFuayI6ZmFsc2UsInJlYWRfb25seSI6ZmFsc2UsImhvc3RuYW1l
IjoiMzAwLTMwMy11MzAtdjEwMCIsImhvc3RfaWQiOiIxNzIuMzEuMzMuMzAiLCJncHVfaWRzIjpb
IkdQVS1hMDdjMGU0OC05NDhhLTQwNWMtNDNiNy03Mzc0ZDJkZGMwOGMiXSwiZ3B1X2luZGV4Ijow
LCJncHVfaW5kaWNlcyI6WzBdfSx7InZlcnNpb24iOjEwMiwicmFuayI6MSwicmFua19pZCI6IjEg
OiAxNzIuMzEuMzMuMzAgOiAzMjc4NTYwIiwicmFua19tb2RlIjoicnVuIiwicmFua19zdGF0dXMi
OiJydW5uaW5nIiwicmVtb3ZhbF9zdGF0dXMiOiJub25lIiwiYXBwX3ZlcnNpb24iOiI3LjIuMC4x
LjIwMjQwMjE0MjEwOTA2IiwicGlkIjozMjc4NTYwLCJzdGFydF90aW1lIjoxNzA4NDQzNjU2LCJz
dGFydF90aW1lX3N0ciI6IlR1ZSBGZWIgMjAgMTU6NDA6NTYgMjAyNCIsInN0YXJ0X2NvdW50Ijox
LCJhY2NlcHRpbmdfam9icyI6dHJ1ZSwibmV3X3JhbmsiOmZhbHNlLCJyZWFkX29ubHkiOmZhbHNl
LCJob3N0bmFtZSI6IjMwMC0zMDMtdTMwLXYxMDAiLCJob3N0X2lkIjoiMTcyLjMxLjMzLjMwIiwi
Z3B1X2lkcyI6WyJHUFUtYTA3YzBlNDgtOTQ4YS00MDVjLTQzYjctNzM3NGQyZGRjMDhjIiwiR1BV
LTUzZTFiN2I0LTY4YjgtYTI4Yy0xMGYzLWRiNmEwNTk3ZmJiNCIsIkdQVS00MWZmOTFmYi1lY2I5
LTBhNWItZDQ3Yy1kNWJiN2VmMTAzOGEiLCJHUFUtYTk0MGIxMjYtYjFhNC05ZjA3LTQ0ZTEtNjM2
YzM3OTg5ZWNmIl0sImdwdV9pbmRleCI6MCwiZ3B1X2luZGljZXMiOlswLDEsMiwzXX0seyJ2ZXJz
aW9uIjoxMDIsInJhbmsiOjIsInJhbmtfaWQiOiIyIDogMTcyLjMxLjMzLjMxIDogMTU0NDUxOSIs
InJhbmtfbW9kZSI6InJ1biIsInJhbmtfc3RhdHVzIjoicnVubmluZyIsInJlbW92YWxfc3RhdHVz
Ijoibm9uZSIsImFwcF92ZXJzaW9uIjoiNy4yLjAuMS4yMDI0MDIxNDIxMDkwNiIsInBpZCI6MTU0
NDUxOSwic3RhcnRfdGltZSI6MTcwODQ0MzY1Niwic3RhcnRfdGltZV9zdHIiOiJUdWUgRmViIDIw
IDE1OjQwOjU2IDIwMjQiLCJzdGFydF9jb3VudCI6MSwiYWNjZXB0aW5nX2pvYnMiOnRydWUsIm5l
d19yYW5rIjpmYWxzZSwicmVhZF9vbmx5IjpmYWxzZSwiaG9zdG5hbWUiOiIzMDAtMzAzLXUzMS12
MTAwIiwiaG9zdF9pZCI6IjE3Mi4zMS4zMy4zMSIsImdwdV9pZHMiOlsiR1BVLTEzMmYxZGRiLWE1
ZjQtMjBkMy0xMTIyLTZkMzU5OWFmY2YxZCIsIkdQVS0zYmM3YmZjZC05NWE5LWE2MjYtOGU4Zi0z
N2I5NzE5YWE5ZGQiLCJHUFUtZmUxMGFkM2QtODgzZC1hNTkwLWQ0MDUtZTBhNTY5M2IwYWYwIiwi
R1BVLTZiODI5Njk5LTBiNGMtMTJmMC0wYzI4LThjZjQ2YzI0YzE4NSJdLCJncHVfaW5kZXgiOjAs
ImdwdV9pbmRpY2VzIjpbMCwxLDIsM119LHsidmVyc2lvbiI6MTAwLCJyYW5rIjozLCJyYW5rX2lk
IjoiMyA6IDE3Mi4zMS4zMy4zMiA6IDE0Mjk2ODUiLCJyYW5rX21vZGUiOiJydW4iLCJyYW5rX3N0
YXR1cyI6InJ1bm5pbmciLCJyZW1vdmFsX3N0YXR1cyI6Im5vbmUiLCJhcHBfdmVyc2lvbiI6Ijcu
Mi4wLjEuMjAyNDAyMTQyMTA5MDYiLCJwaWQiOjE0Mjk2ODUsInN0YXJ0X3RpbWUiOjE3MDg0NDM2
NTYsInN0YXJ0X3RpbWVfc3RyIjoiVHVlIEZlYiAyMCAxNTo0MDo1NiAyMDI0Iiwic3RhcnRfY291
bnQiOjEsImFjY2VwdGluZ19qb2JzIjp0cnVlLCJuZXdfcmFuayI6ZmFsc2UsInJlYWRfb25seSI6
ZmFsc2UsImhvc3RuYW1lIjoiMzAwLTMwMy11MzItdjEwMCIsImhvc3RfaWQiOiIxNzIuMzEuMzMu
MzIiLCJncHVfaWRzIjpbIkdQVS0wOTBjNDQzMC1kYmJlLTFlMWEtN2Y3Zi1hMTgyNzgzYWQyMzEi
LCJHUFUtYmUxNGI1Y2UtYjQxMS00OGNhLWE5ZmYtNWEwNmM3YTZmMzk4IiwiR1BVLTRiMWE1Njg4
LTBlOGMtY2M5NC04MzNiLWMyZjM5ZTk5NTNiOCIsIkdQVS0xMmViYTQ2My04MzJlLTEwOGItZWNi
Mi01YzlhZjhkYTYxNjQiXSwiZ3B1X2luZGV4IjowLCJncHVfaW5kaWNlcyI6WzAsMSwyLDNdfSx7
InZlcnNpb24iOjExNiwicmFuayI6NCwicmFua19pZCI6IjQgOiAxNzIuMzEuMzMuMzMgOiAxNjA4
NzYxIiwicmFua19tb2RlIjoicnVuIiwicmFua19zdGF0dXMiOiJydW5uaW5nIiwicmVtb3ZhbF9z
dGF0dXMiOiJub25lIiwiYXBwX3ZlcnNpb24iOiI3LjIuMC4xLjIwMjQwMjE0MjEwOTA2IiwicGlk
IjoxNjA4NzYxLCJzdGFydF90aW1lIjoxNzA4NDQzNjU1LCJzdGFydF90aW1lX3N0ciI6IlR1ZSBG
ZWIgMjAgMTU6NDA6NTUgMjAyNCIsInN0YXJ0X2NvdW50IjoxLCJhY2NlcHRpbmdfam9icyI6dHJ1
ZSwibmV3X3JhbmsiOmZhbHNlLCJyZWFkX29ubHkiOmZhbHNlLCJob3N0bmFtZSI6IjMwMC0zMDMt
dTMzLXYxMDAiLCJob3N0X2lkIjoiMTcyLjMxLjMzLjMzIiwiZ3B1X2lkcyI6WyJHUFUtMjYzMzFh
MDctMTc1Ni1mMDY2LTFlNWEtMzc1M2Y1ZTViYzc4IiwiR1BVLWE0MWFhMTg3LTQ1NmQtNjBiMy04
ZmM5LWI4YjMzZWFlMjFiYyIsIkdQVS01NWRlZjYxOS0wMTE2LWViZjctMzMwMy03ZDkzMmRmYzcw
ZmYiLCJHUFUtNGM5YWYzODgtYjlmYi03MWQ5LWZiNDUtODMwYTM4MTIwMGQzIl0sImdwdV9pbmRl
eCI6MCwiZ3B1X2luZGljZXMiOlswLDEsMiwzXX0seyJ2ZXJzaW9uIjoxMDIsInJhbmsiOjUsInJh
bmtfaWQiOiI1IDogMTcyLjMxLjMzLjM0IDogMTY2MDEwMiIsInJhbmtfbW9kZSI6InJ1biIsInJh
bmtfc3RhdHVzIjoicnVubmluZyIsInJlbW92YWxfc3RhdHVzIjoibm9uZSIsImFwcF92ZXJzaW9u
IjoiNy4yLjAuMS4yMDI0MDIxNDIxMDkwNiIsInBpZCI6MTY2MDEwMiwic3RhcnRfdGltZSI6MTcw
ODQ0MzY1Niwic3RhcnRfdGltZV9zdHIiOiJUdWUgRmViIDIwIDE1OjQwOjU2IDIwMjQiLCJzdGFy
dF9jb3VudCI6MSwiYWNjZXB0aW5nX2pvYnMiOnRydWUsIm5ld19yYW5rIjpmYWxzZSwicmVhZF9v
bmx5IjpmYWxzZSwiaG9zdG5hbWUiOiIzMDAtMzAzLXUzNC12MTAwIiwiaG9zdF9pZCI6IjE3Mi4z
MS4zMy4zNCIsImdwdV9pZHMiOlsiR1BVLTY4ZTA5Y2ZjLWY5YTQtMmExNC01N2E0LTk0ODNiOTFj
MmQ5YSIsIkdQVS0zZGYyMTZlOC1mZTc0LTA0N2EtOTVhYy03MmUyY2JlY2I1MjIiLCJHUFUtMTRk
NDRiOTgtYjA0Mi1jYjgxLTFkZDMtMjBkNGY2OWM4OWNhIiwiR1BVLTM0MDY3MzBiLWJlYWQtYzUw
YS00NmViLWUwYTJjMmJmNmU3NiJdLCJncHVfaW5kZXgiOjAsImdwdV9pbmRpY2VzIjpbMCwxLDIs
M119LHsidmVyc2lvbiI6MTAyLCJyYW5rIjo2LCJyYW5rX2lkIjoiNiA6IDE3Mi4zMS4zMy4zNSA6
IDEzNzk3MTUiLCJyYW5rX21vZGUiOiJydW4iLCJyYW5rX3N0YXR1cyI6InJ1bm5pbmciLCJyZW1v
dmFsX3N0YXR1cyI6Im5vbmUiLCJhcHBfdmVyc2lvbiI6IjcuMi4wLjEuMjAyNDAyMTQyMTA5MDYi
LCJwaWQiOjEzNzk3MTUsInN0YXJ0X3RpbWUiOjE3MDg0NDM2NTYsInN0YXJ0X3RpbWVfc3RyIjoi
VHVlIEZlYiAyMCAxNTo0MDo1NiAyMDI0Iiwic3RhcnRfY291bnQiOjEsImFjY2VwdGluZ19qb2Jz
Ijp0cnVlLCJuZXdfcmFuayI6ZmFsc2UsInJlYWRfb25seSI6ZmFsc2UsImhvc3RuYW1lIjoiMzAw
LTMwMy11MzUtdjEwMCIsImhvc3RfaWQiOiIxNzIuMzEuMzMuMzUiLCJncHVfaWRzIjpbIkdQVS1m
NzMwMmFiYy05ZWFhLTE0YzktZjQyNi0xNjNkZjNkYThjMjYiLCJHUFUtNzdkZDdkNDgtZjk4MC1m
ZDA2LTcyMmMtMWM1YjkzMjE4MjAzIiwiR1BVLTU1MmIyMGE1LTU3ZTYtNDk4Ni1iZTJiLTJiMzc4
ZmQ0YmNhYSIsIkdQVS02MzA1MzE2Mi0zMDdmLTU1YzUtYTk3NC01OGRmZTg0MzQyYjIiXSwiZ3B1
X2luZGV4IjowLCJncHVfaW5kaWNlcyI6WzAsMSwyLDNdfSx7InZlcnNpb24iOjEwMiwicmFuayI6
NywicmFua19pZCI6IjcgOiAxNzIuMzEuMzMuMzYgOiAxMzc5NTM5IiwicmFua19tb2RlIjoicnVu
IiwicmFua19zdGF0dXMiOiJydW5uaW5nIiwicmVtb3ZhbF9zdGF0dXMiOiJub25lIiwiYXBwX3Zl
cnNpb24iOiI3LjIuMC4xLjIwMjQwMjE0MjEwOTA2IiwicGlkIjoxMzc5NTM5LCJzdGFydF90aW1l
IjoxNzA4NDQzNjU2LCJzdGFydF90aW1lX3N0ciI6IlR1ZSBGZWIgMjAgMTU6NDA6NTYgMjAyNCIs
InN0YXJ0X2NvdW50IjoxLCJhY2NlcHRpbmdfam9icyI6dHJ1ZSwibmV3X3JhbmsiOmZhbHNlLCJy
ZWFkX29ubHkiOmZhbHNlLCJob3N0bmFtZSI6IjMwMC0zMDMtdTM2LXYxMDAiLCJob3N0X2lkIjoi
MTcyLjMxLjMzLjM2IiwiZ3B1X2lkcyI6WyJHUFUtOGU5ZDRhZGItYzYxMS04MmYwLTNmZTQtZjFm
MmMzZTZhNDRmIiwiR1BVLTRjNmFiYWM5LTc0ZWMtZjc4Yy1mYWE5LTQ3NDZkMDA1N2FiOCIsIkdQ
VS05NjI1YTc4Yy1lMmFlLTdmMDktOWNjZi1lZTA0OTk1MjYzMTAiLCJHUFUtYWI3NTRhM2MtNjA4
Ni1iYjUxLWU5NGEtM2NmNGExNDkwNWJhIl0sImdwdV9pbmRleCI6MCwiZ3B1X2luZGljZXMiOlsw
LDEsMiwzXX0seyJ2ZXJzaW9uIjoxMDIsInJhbmsiOjgsInJhbmtfaWQiOiI4IDogMTcyLjMxLjMz
LjM3IDogMTM5Nzg5MyIsInJhbmtfbW9kZSI6InJ1biIsInJhbmtfc3RhdHVzIjoicnVubmluZyIs
InJlbW92YWxfc3RhdHVzIjoibm9uZSIsImFwcF92ZXJzaW9uIjoiNy4yLjAuMS4yMDI0MDIxNDIx
MDkwNiIsInBpZCI6MTM5Nzg5Mywic3RhcnRfdGltZSI6MTcwODQ0MzY1Niwic3RhcnRfdGltZV9z
dHIiOiJUdWUgRmViIDIwIDE1OjQwOjU2IDIwMjQiLCJzdGFydF9jb3VudCI6MSwiYWNjZXB0aW5n
X2pvYnMiOnRydWUsIm5ld19yYW5rIjpmYWxzZSwicmVhZF9vbmx5IjpmYWxzZSwiaG9zdG5hbWUi
OiIzMDAtMzAzLXUzNy12MTAwIiwiaG9zdF9pZCI6IjE3Mi4zMS4zMy4zNyIsImdwdV9pZHMiOlsi
R1BVLWVhMThkMzg4LWUyYjMtNDI4My1hNmI1LWEzMjNhNDg3MjVhOSIsIkdQVS1kY2Q4NGVmYi05
NGNhLWI0OTctOGMxNS03YTMyNjk0MGMxZWIiLCJHUFUtOTE2MDFhNWUtOTczZi1kMWU0LTZlMWQt
ZjY1NTI5NGQzNDYwIiwiR1BVLTA3ZmE0ZGI2LTllNTUtYjYxZi05MDlhLTg1YzRkMWJlYjA4OCJd
LCJncHVfaW5kZXgiOjAsImdwdV9pbmRpY2VzIjpbMCwxLDIsM119XX0Oc3ltYm9scxZ7ImNvdW50
IjoxfQxzeXN0ZW2qA3siaWQiOiJLaW5ldGljYSAzMDAtMzAzLXUzMC12MTAwIiwic3RhcnRfdGlt
ZSI6MTcwODEwMzQ1Miwic3RhdHVzIjoicnVubmluZyIsImNsdXN0ZXJfbGVhZGVyIjoiMTcyLjMx
LjMzLjMwIiwidmVyc2lvbiI6MjUsImNsdXN0ZXJfb3BlcmF0aW9uX3J1bm5pbmciOiJmYWxzZSIs
ImNsdXN0ZXJfb3BlcmF0aW9uX3N0YXR1cyI6IiIsIm9mZmxpbmVfc3RhdHVzIjoiZmFsc2UifQh0
ZXh03AV7ImNvdW50Ijo4LCJzdGF0dXMiOlt7InZlcnNpb24iOjMzLCJyYW5rIjoxLCJzdGF0dXMi
OiJydW5uaW5nIn0seyJ2ZXJzaW9uIjozMywicmFuayI6Miwic3RhdHVzIjoicnVubmluZyJ9LHsi
dmVyc2lvbiI6MzMsInJhbmsiOjMsInN0YXR1cyI6InJ1bm5pbmcifSx7InZlcnNpb24iOjMzLCJy
YW5rIjo0LCJzdGF0dXMiOiJydW5uaW5nIn0seyJ2ZXJzaW9uIjozMywicmFuayI6NSwic3RhdHVz
IjoicnVubmluZyJ9LHsidmVyc2lvbiI6MzMsInJhbmsiOjYsInN0YXR1cyI6InJ1bm5pbmcifSx7
InZlcnNpb24iOjMzLCJyYW5rIjo3LCJzdGF0dXMiOiJydW5uaW5nIn0seyJ2ZXJzaW9uIjozMywi
cmFuayI6OCwic3RhdHVzIjoicnVubmluZyJ9XX0QdHJpZ2dlcnNeeyJ0b3RhbF9jb3VudCI6MCwi
cmFuZ2VfY291bnQiOjAsIm5haV9jb3VudCI6MH0AAAA=
headers:
Access-Control-Allow-Origin:
- '*'
Access-Control-Expose-Headers:
- x-request-time-secs
Connection:
- Close
Content-Type:
- application/octet-stream
Date:
- Thu, 22 Feb 2024 00:26:38 GMT
Strict-Transport-Security:
- max-age=31536000; includeSubDomains
X-Content-Type-Options:
- nosniff
X-Frame-Options:
- SAMEORIGIN
X-XSS-Protection:
- 1; mode=block
x-request-time-secs:
- '0.00065'
status:
code: 200
message: OK
- request:
body: "\0"
headers:
Accept:
- application/octet-stream
Content-type:
- application/octet-stream
authorization:
- DUMMY
method: POST
uri: http://172.31.33.30:9191/show/system/properties
response:
body:
string: !!binary |
BE9LAD5zaG93X3N5c3RlbV9wcm9wZXJ0aWVzX3Jlc3BvbnNlvPUCpAk8Y29uZi5haS5hcGkuY29u
bmVjdGlvbl90aW1lb3V0BDkwHmNvbmYuYWkuYXBpLmtleQAoY29uZi5haS5hcGkucHJvdmlkZXIW
a2luZXRpY2FsbG0eY29uZi5haS5hcGkudXJsSGh0dHA6Ly8xNzIuMzEuMzEuMTM6ODA1MC9zcWwv
c3VnZ2VzdDBjb25mLmFsZXJ0X2Rpc2tfYWJzb2x1dGUANGNvbmYuYWxlcnRfZGlza19wZXJjZW50
YWdlGDEsIDUsIDEwLCAyMBxjb25mLmFsZXJ0X2V4ZQAsY29uZi5hbGVydF9ob3N0X3N0YXR1cwhU
UlVFOmNvbmYuYWxlcnRfaG9zdF9zdGF0dXNfZmlsdGVyIGZhdGFsX2luaXRfZXJyb3I4Y29uZi5h
bGVydF9tYXhfc3RvcmVkX2FsZXJ0cwYxMDA0Y29uZi5hbGVydF9tZW1vcnlfYWJzb2x1dGUAOGNv
bmYuYWxlcnRfbWVtb3J5X3BlcmNlbnRhZ2UYMSwgNSwgMTAsIDIwNGNvbmYuYWxlcnRfcmFua19j
dWRhX2Vycm9yCFRSVUVEY29uZi5hbGVydF9yYW5rX2ZhbGxiYWNrX2FsbG9jYXRvcghUUlVFLGNv
bmYuYWxlcnRfcmFua19zdGF0dXMIVFJVRTpjb25mLmFsZXJ0X3Jhbmtfc3RhdHVzX2ZpbHRlclhm
YXRhbF9pbml0X2Vycm9yLCBub3RfcmVzcG9uZGluZywgdGVybWluYXRlZB5jb25mLmF1ZGl0X2Jv
ZHkKRkFMU0UeY29uZi5hdWRpdF9kYXRhCkZBTFNFJGNvbmYuYXVkaXRfaGVhZGVycwpGQUxTRT5j
b25mLmF1dG9fY3JlYXRlX2V4dGVybmFsX3VzZXJzCkZBTFNFTGNvbmYuYnVpbGRfbWF0ZXJpYWxp
emVkX3ZpZXdzX29uX3N0YXJ0Em9uX2RlbWFuZDhjb25mLmJ1aWxkX3BrX2luZGV4X29uX3N0YXJ0
Em9uX2RlbWFuZDhjb25mLmNodW5rX2NvbHVtbl9tYXhfbWVtb3J5EjUxMjAwMDAwMCpjb25mLmNo
dW5rX21heF9tZW1vcnkUODE5MjAwMDAwMB5jb25mLmNodW5rX3NpemUOODAwMDAwMCJjb25mLmNs
dXN0ZXJfbmFtZRxwcm9kdWN0aW9uLW5ld0Bjb25mLmNvbmN1cnJlbnRfa2VybmVsX2V4ZWN1dGlv
bghUUlVFOmNvbmYuZGVmYXVsdF9wcmltYXJ5X2tleV90eXBlDG1lbW9yeSBjb25mLmRlZmF1bHRf
dHRsBDIwLGNvbmYuZGlzYWJsZV9jbGVhcl9hbGwIVFJVRT5jb25mLmVncmVzc19wYXJxdWV0X2Nv
bXByZXNzaW9uDHNuYXBweSRjb25mLmVuYWJsZV9hbGVydHMIVFJVRSJjb25mLmVuYWJsZV9hdWRp
dApGQUxTRTJjb25mLmVuYWJsZV9hdXRob3JpemF0aW9uCFRSVUVGY29uZi5lbmFibGVfZXh0ZXJu
YWxfYXV0aGVudGljYXRpb24KRkFMU0UwY29uZi5lbmFibGVfZ3JhcGhfc2VydmVyCFRSVUUcY29u
Zi5lbmFibGVfaGEKRkFMU0UuY29uZi5lbmFibGVfaHR0cGRfcHJveHkKRkFMU0UcY29uZi5lbmFi
bGVfbWwKRkFMU0U2Y29uZi5lbmFibGVfb3BlbmdsX3JlbmRlcmVyCFRSVUVAY29uZi5lbmFibGVf
b3ZlcmxhcHBlZF9lcXVpX2pvaW4IVFJVRTRjb25mLmVuYWJsZV9wb3N0Z3Jlc19wcm94eQhUUlVF
PmNvbmYuZW5hYmxlX3ByZWRpY2F0ZV9lcXVpX2pvaW4IVFJVRSJjb25mLmVuYWJsZV9wcm9jcwhU
UlVFJGNvbmYuZW5hYmxlX3JldmVhbAhUUlVFMGNvbmYuZW5hYmxlX3N0YXRzX3NlcnZlcghUUlVF
LmNvbmYuZW5hYmxlX3RleHRfc2VhcmNoCFRSVUU8Y29uZi5lbmFibGVfdmVjdG9ydGlsZV9zZXJ2
aWNlCFRSVUUsY29uZi5lbmFibGVfdnJhbV9jYWNoZQhUUlVFPmNvbmYuZW5hYmxlX3dvcmtlcl9o
dHRwX3NlcnZlcnMIVFJVRTJjb25mLmV2ZW50X3NlcnZlcl9hZGRyZXNzGDE3Mi4zMS4zMy4zMDRj
b25mLmV2ZW50X3NlcnZlcl9pbnRlcm5hbApGQUxTRTpjb25mLmV4dGVybmFsX2ZpbGVzX2RpcmVj
dG9yeSAvbmZzL2RhdGEvcHVibGljFGNvbmYuZ21faXAYMTcyLjMxLjMzLjMwGmNvbmYuZ21fcG9y
dDEINTU1MiBjb25mLmdtX3B1Yl9wb3J0CDU1NTMoY29uZi5ncmFwaC5oZWFkX3BvcnQIODEwMC5j
b25mLmdyYXBoLnNlcnZlcjAuaG9zdApob3N0MC5jb25mLmdyYXBoLnNlcnZlcjAucG9ydAg4MTAx
OGNvbmYuZ3JhcGguc2VydmVyMC5yYW1fbGltaXQCMC5jb25mLmdyYXBoLnNlcnZlcjEuaG9zdApo
b3N0MS5jb25mLmdyYXBoLnNlcnZlcjEucG9ydAg4MTAyOGNvbmYuZ3JhcGguc2VydmVyMS5yYW1f
bGltaXQCMC5jb25mLmdyYXBoLnNlcnZlcjIuaG9zdApob3N0Mi5jb25mLmdyYXBoLnNlcnZlcjIu
cG9ydAg4MTAzOGNvbmYuZ3JhcGguc2VydmVyMi5yYW1fbGltaXQCMC5jb25mLmdyYXBoLnNlcnZl
cjMuaG9zdApob3N0My5jb25mLmdyYXBoLnNlcnZlcjMucG9ydAg4MTA0OGNvbmYuZ3JhcGguc2Vy
dmVyMy5yYW1fbGltaXQCMC5jb25mLmdyYXBoLnNlcnZlcjQuaG9zdApob3N0NC5jb25mLmdyYXBo
LnNlcnZlcjQucG9ydAg4MTA1OGNvbmYuZ3JhcGguc2VydmVyNC5yYW1fbGltaXQCMC5jb25mLmdy
YXBoLnNlcnZlcjUuaG9zdApob3N0NS5jb25mLmdyYXBoLnNlcnZlcjUucG9ydAg4MTA2OGNvbmYu
Z3JhcGguc2VydmVyNS5yYW1fbGltaXQCMC5jb25mLmdyYXBoLnNlcnZlcjYuaG9zdApob3N0Ni5j
b25mLmdyYXBoLnNlcnZlcjYucG9ydAg4MTA3OGNvbmYuZ3JhcGguc2VydmVyNi5yYW1fbGltaXQC
MC5jb25mLmdyYXBoLnNlcnZlcjcuaG9zdApob3N0Ny5jb25mLmdyYXBoLnNlcnZlcjcucG9ydAg4
MTA4OGNvbmYuZ3JhcGguc2VydmVyNy5yYW1fbGltaXQCMBxjb25mLmhhX3F1ZXVlcwAuY29uZi5o
YV9yaW5nX2hlYWRfbm9kZXMAKGNvbmYuaGVhZF9pcF9hZGRyZXNzGDE3Mi4zMS4zMy4zMBxjb25m
LmhlYWRfcG9ydAg5MTkxImNvbmYuaG1faHR0cF9wb3J0CDkzMDA2Y29uZi5ob3N0MF9hY2NlcHRz
X2ZhaWxvdmVyCkZBTFNFJGNvbmYuaG9zdDBfYWRkcmVzcxgxNzIuMzEuMzMuMzAeY29uZi5ob3N0
MF9ncHVzDjAsMSwyLDNEY29uZi5ob3N0MF9ob3N0X21hbmFnZXJfcHVibGljX3VybDBodHRwOi8v
MTcyLjMxLjMzLjMwOjkzMDAsY29uZi5ob3N0MF9wcml2YXRlX3VybCZodHRwOi8vMTcyLjMxLjMz
LjMwMmNvbmYuaG9zdDBfcHVibGljX2FkZHJlc3MYMTcyLjMxLjMzLjMwLGNvbmYuaG9zdDBfcHVi
bGljX3VybHMmaHR0cDovLzE3Mi4zMS4zMy4zMChjb25mLmhvc3QwX3JhbV9saW1pdBg2NTY3Njc3
OTk5OTk2Y29uZi5ob3N0MV9hY2NlcHRzX2ZhaWxvdmVyCkZBTFNFJGNvbmYuaG9zdDFfYWRkcmVz
cxgxNzIuMzEuMzMuMzEeY29uZi5ob3N0MV9ncHVzDjAsMSwyLDNEY29uZi5ob3N0MV9ob3N0X21h
bmFnZXJfcHVibGljX3VybDBodHRwOi8vMTcyLjMxLjMzLjMxOjkzMDAsY29uZi5ob3N0MV9wcml2
YXRlX3VybCZodHRwOi8vMTcyLjMxLjMzLjMxMmNvbmYuaG9zdDFfcHVibGljX2FkZHJlc3MYMTcy
LjMxLjMzLjMxLGNvbmYuaG9zdDFfcHVibGljX3VybHMmaHR0cDovLzE3Mi4zMS4zMy4zMShjb25m
Lmhvc3QxX3JhbV9saW1pdBg2NTY3Njc3OTk5OTk2Y29uZi5ob3N0Ml9hY2NlcHRzX2ZhaWxvdmVy
CkZBTFNFJGNvbmYuaG9zdDJfYWRkcmVzcxgxNzIuMzEuMzMuMzIeY29uZi5ob3N0Ml9ncHVzDjAs
MSwyLDNEY29uZi5ob3N0Ml9ob3N0X21hbmFnZXJfcHVibGljX3VybDBodHRwOi8vMTcyLjMxLjMz
LjMyOjkzMDAsY29uZi5ob3N0Ml9wcml2YXRlX3VybCZodHRwOi8vMTcyLjMxLjMzLjMyMmNvbmYu
aG9zdDJfcHVibGljX2FkZHJlc3MYMTcyLjMxLjMzLjMyLGNvbmYuaG9zdDJfcHVibGljX3VybHMm
aHR0cDovLzE3Mi4zMS4zMy4zMihjb25mLmhvc3QyX3JhbV9saW1pdBg2NTY3Njc3OTk5OTk2Y29u
Zi5ob3N0M19hY2NlcHRzX2ZhaWxvdmVyCkZBTFNFJGNvbmYuaG9zdDNfYWRkcmVzcxgxNzIuMzEu
MzMuMzMeY29uZi5ob3N0M19ncHVzDjAsMSwyLDNEY29uZi5ob3N0M19ob3N0X21hbmFnZXJfcHVi
bGljX3VybDBodHRwOi8vMTcyLjMxLjMzLjMzOjkzMDAsY29uZi5ob3N0M19wcml2YXRlX3VybCZo
dHRwOi8vMTcyLjMxLjMzLjMzMmNvbmYuaG9zdDNfcHVibGljX2FkZHJlc3MYMTcyLjMxLjMzLjMz
LGNvbmYuaG9zdDNfcHVibGljX3VybHMmaHR0cDovLzE3Mi4zMS4zMy4zMyhjb25mLmhvc3QzX3Jh
bV9saW1pdBg2NTY3Njc3OTk5OTk2Y29uZi5ob3N0NF9hY2NlcHRzX2ZhaWxvdmVyCkZBTFNFJGNv
bmYuaG9zdDRfYWRkcmVzcxgxNzIuMzEuMzMuMzQeY29uZi5ob3N0NF9ncHVzDjAsMSwyLDNEY29u
Zi5ob3N0NF9ob3N0X21hbmFnZXJfcHVibGljX3VybDBodHRwOi8vMTcyLjMxLjMzLjM0OjkzMDAs
Y29uZi5ob3N0NF9wcml2YXRlX3VybCZodHRwOi8vMTcyLjMxLjMzLjM0MmNvbmYuaG9zdDRfcHVi
bGljX2FkZHJlc3MYMTcyLjMxLjMzLjM0LGNvbmYuaG9zdDRfcHVibGljX3VybHMmaHR0cDovLzE3
Mi4zMS4zMy4zNChjb25mLmhvc3Q0X3JhbV9saW1pdBg2NTY3Njc3OTk5OTk2Y29uZi5ob3N0NV9h
Y2NlcHRzX2ZhaWxvdmVyCkZBTFNFJGNvbmYuaG9zdDVfYWRkcmVzcxgxNzIuMzEuMzMuMzUeY29u
Zi5ob3N0NV9ncHVzDjAsMSwyLDNEY29uZi5ob3N0NV9ob3N0X21hbmFnZXJfcHVibGljX3VybDBo
dHRwOi8vMTcyLjMxLjMzLjM1OjkzMDAsY29uZi5ob3N0NV9wcml2YXRlX3VybCZodHRwOi8vMTcy
LjMxLjMzLjM1MmNvbmYuaG9zdDVfcHVibGljX2FkZHJlc3MYMTcyLjMxLjMzLjM1LGNvbmYuaG9z
dDVfcHVibGljX3VybHMmaHR0cDovLzE3Mi4zMS4zMy4zNShjb25mLmhvc3Q1X3JhbV9saW1pdBg2
NTY3Njc3OTk5OTk2Y29uZi5ob3N0Nl9hY2NlcHRzX2ZhaWxvdmVyCkZBTFNFJGNvbmYuaG9zdDZf
YWRkcmVzcxgxNzIuMzEuMzMuMzYeY29uZi5ob3N0Nl9ncHVzDjAsMSwyLDNEY29uZi5ob3N0Nl9o
b3N0X21hbmFnZXJfcHVibGljX3VybDBodHRwOi8vMTcyLjMxLjMzLjM2OjkzMDAsY29uZi5ob3N0
Nl9wcml2YXRlX3VybCZodHRwOi8vMTcyLjMxLjMzLjM2MmNvbmYuaG9zdDZfcHVibGljX2FkZHJl
c3MYMTcyLjMxLjMzLjM2LGNvbmYuaG9zdDZfcHVibGljX3VybHMmaHR0cDovLzE3Mi4zMS4zMy4z
Nihjb25mLmhvc3Q2X3JhbV9saW1pdBg2NTY3Njc3OTk5OTk2Y29uZi5ob3N0N19hY2NlcHRzX2Zh
aWxvdmVyCkZBTFNFJGNvbmYuaG9zdDdfYWRkcmVzcxgxNzIuMzEuMzMuMzceY29uZi5ob3N0N19n
cHVzDjAsMSwyLDNEY29uZi5ob3N0N19ob3N0X21hbmFnZXJfcHVibGljX3VybDBodHRwOi8vMTcy
LjMxLjMzLjM3OjkzMDAsY29uZi5ob3N0N19wcml2YXRlX3VybCZodHRwOi8vMTcyLjMxLjMzLjM3
MmNvbmYuaG9zdDdfcHVibGljX2FkZHJlc3MYMTcyLjMxLjMzLjM3LGNvbmYuaG9zdDdfcHVibGlj
X3VybHMmaHR0cDovLzE3Mi4zMS4zMy4zNyhjb25mLmhvc3Q3X3JhbV9saW1pdBg2NTY3Njc3OTk5
OTkqY29uZi5odHRwZF9wcm94eV9wb3J0CDgwODI0Y29uZi5odHRwZF9wcm94eV91c2VfaHR0cHMK
RkFMU0U4Y29uZi5pbml0X3dpdGhfbm93X2F0X3dvcmtlcgpGQUxTRSpjb25mLmthZmthLmJhdGNo
X3NpemUIMTAwMC5jb25mLmthZmthLnBvbGxfdGltZW91dAIwKGNvbmYua2Fma2Eud2FpdF90aW1l
BDMwLmNvbmYua2VybmVsX29tcF90aHJlYWRzAjQ0Y29uZi5sb2FkX3ZlY3RvcnNfb25fc3RhcnQS
b25fZGVtYW5kHmNvbmYubG9ja19hdWRpdApGQUxTRTZjb25mLm1heF9hdXRvX3ZpZXdfdXBkYXRv
cnMCMzZjb25mLm1heF9jb25jdXJyZW50X2tlcm5lbHMCMDJjb25mLm1heF9nZXRfcmVjb3Jkc19z
aXplCjIwMDAwKmNvbmYubWF4X2hlYXRtYXBfc2l6ZQgzMDcyKmNvbmYubWF4X2h0dHBfdGhyZWFk
cwY1MTI6Y29uZi5tZXRhZGF0YV9zdG9yZV9zeW5jX21vZGUMbm9ybWFsKmNvbmYubWluX2h0dHBf
dGhyZWFkcwI4MGNvbmYubWluX3Bhc3N3b3JkX2xlbmd0aAIwIGNvbmYubWxfYXBpX3BvcnQIOTE4
N1xjb25mLm5wMS5idWlsZF9tYXRlcmlhbGl6ZWRfdmlld3Nfb25fbWlncmF0aW9uDGFsd2F5c0hj
b25mLm5wMS5idWlsZF9wa19pbmRleF9vbl9taWdyYXRpb24MYWx3YXlzRGNvbmYubnAxLmNyaXRp
Y2FsX3Jlc3RhcnRfYXR0ZW1wdHMCMTpjb25mLm5wMS5lbmFibGVfaGVhZF9mYWlsb3ZlcgpGQUxT
RT5jb25mLm5wMS5lbmFibGVfd29ya2VyX2ZhaWxvdmVyCkZBTFNFSmNvbmYubnAxLmZhaWxvdmVy
X2Rpc3RyaWJ1dGlvbl9wb2xpY3kIZmlsbERjb25mLm5wMS5sb2FkX3ZlY3RvcnNfb25fbWlncmF0
aW9uDGFsd2F5c0xjb25mLm5wMS5ub25fY3JpdGljYWxfcmVzdGFydF9hdHRlbXB0cwIzPGNvbmYu
bnAxLnJhbmtfcmVzdGFydF9hdHRlbXB0cwIxMmNvbmYubnAxLnJlc3RhcnRfaW50ZXJ2YWwENjA2
Y29uZi5ucDEuc3RvcmFnZV9hcGlfc2NyaXB0AChjb25mLm51bWJlcl9vZl9ob3N0cwI4KGNvbmYu
bnVtYmVyX29mX3JhbmtzAjk8Y29uZi5vcGVuZ2xfYW50aWFsaWFzaW5nX2xldmVsAjAsY29uZi5w
ZXJzaXN0X2RpcmVjdG9yeTAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC8sY29uZi5wZXJzaXN0X3N5
bmNfdGltZQI1NmNvbmYucG9pbnRfcmVuZGVyX3RocmVzaG9sZAwxMDAwMDBWY29uZi5wb3N0Z3Jl
c19wcm94eS5pZGxlX2Nvbm5lY3Rpb25fdGltZW91dAYzMDBUY29uZi5wb3N0Z3Jlc19wcm94eS5t
YXhfcXVldWVkX2Nvbm5lY3Rpb25zAjE+Y29uZi5wb3N0Z3Jlc19wcm94eS5tYXhfdGhyZWFkcwQ2
ND5jb25mLnBvc3RncmVzX3Byb3h5Lm1pbl90aHJlYWRzAjIwY29uZi5wb3N0Z3Jlc19wcm94eS5w
b3J0CDU0MzIuY29uZi5wb3N0Z3Jlc19wcm94eS5zc2wIVFJVRRxjb25mLnJhbmswX2dwdQIwKmNv
bmYucmFuazBfaXBfYWRkcmVzcxgxNzIuMzEuMzMuMzAqY29uZi5yYW5rMV9pcF9hZGRyZXNzGDE3
Mi4zMS4zMy4zMCpjb25mLnJhbmsyX2lwX2FkZHJlc3MYMTcyLjMxLjMzLjMxKmNvbmYucmFuazNf
aXBfYWRkcmVzcxgxNzIuMzEuMzMuMzIqY29uZi5yYW5rNF9pcF9hZGRyZXNzGDE3Mi4zMS4zMy4z
Mypjb25mLnJhbms1X2lwX2FkZHJlc3MYMTcyLjMxLjMzLjM0KmNvbmYucmFuazZfaXBfYWRkcmVz
cxgxNzIuMzEuMzMuMzUqY29uZi5yYW5rN19pcF9hZGRyZXNzGDE3Mi4zMS4zMy4zNipjb25mLnJh
bms4X2lwX2FkZHJlc3MYMTcyLjMxLjMzLjM3KGNvbmYucmVxdWVzdF90aW1lb3V0CDI0MDA2Y29u
Zi5yZXF1aXJlX2F1dGhlbnRpY2F0aW9uCFRSVUVeY29uZi5yZXNvdXJjZV9ncm91cC5kZWZhdWx0
Lm1heF9jcHVfY29uY3VycmVuY3kELTFaY29uZi5yZXNvdXJjZV9ncm91cC5kZWZhdWx0Lm1heF90
aWVyX3ByaW9yaXR5BDEwSmNvbmYucmVzb3VyY2VfZ3JvdXAuZGVmYXVsdC5yYW1fbGltaXQELTFa
Y29uZi5yZXNvdXJjZV9ncm91cC5kZWZhdWx0LnNjaGVkdWxlX3ByaW9yaXR5BDUwTGNvbmYucmVz
b3VyY2VfZ3JvdXAuZGVmYXVsdC52cmFtX2xpbWl0BC0xHGNvbmYucmluZ19uYW1lDmRlZmF1bHRW
Y29uZi5zZWN1cml0eS5leHRlcm5hbC5yYW5nZXIuY2FjaGVfbWludXRlcwQ2MFRjb25mLnNlY3Vy
aXR5LmV4dGVybmFsLnJhbmdlci5zZXJ2aWNlX25hbWUQa2luZXRpY2FCY29uZi5zZWN1cml0eS5l
eHRlcm5hbC5yYW5nZXIudXJsAGBjb25mLnNlY3VyaXR5LmV4dGVybmFsLnJhbmdlcl9hdXRob3Jp
emVyLmFkZHJlc3MyaXBjOi8vL3RtcC9ncHVkYi1yYW5nZXItMHRjb25mLnNlY3VyaXR5LmV4dGVy
bmFsLnJhbmdlcl9hdXRob3JpemVyLnJlbW90ZV9kZWJ1Z19wb3J0AjBgY29uZi5zZWN1cml0eS5l
eHRlcm5hbC5yYW5nZXJfYXV0aG9yaXplci50aW1lb3V0BjEyMCpjb25mLnNldF9tb25pdG9yX3Bv
cnQIOTAwMjZjb25mLnNldF9tb25pdG9yX3Byb3h5X3BvcnQIOTAwMzZjb25mLnNldF9tb25pdG9y
X3F1ZXVlX3NpemUIMTAwMChjb25mLnNoYWRvd19hZ2dfc2l6ZRI1MDAwMDAwMDAwY29uZi5zaGFk
b3dfY3ViZV9lbmFibGVkCFRSVUUuY29uZi5zaGFkb3dfZmlsdGVyX3NpemUSNTAwMDAwMDAwJmNv
bmYuc21fb21wX3RocmVhZHMCMiRjb25mLnNtc19kaXJlY3RvcnkwL21udC9kYXRhL2dwdWRiL3Bl
cnNpc3QvLmNvbmYuc21zX21heF9vcGVuX2ZpbGVzBjEyOEBjb25mLnNxbC5jb3N0X2Jhc2VkX29w
dGltaXphdGlvbgpGQUxTRTRjb25mLnNxbC5kaXN0cmlidXRlZF9qb2lucwhUUlVFPmNvbmYuc3Fs
LmRpc3RyaWJ1dGVkX29wZXJhdGlvbnMIVFJVRS5jb25mLnNxbC5lbmFibGVfcGxhbm5lcghUUlVF
NmNvbmYuc3FsLmZvcmNlX2JpbmFyeV9qb2lucwpGQUxTRTpjb25mLnNxbC5mb3JjZV9iaW5hcnlf
c2V0X29wcwpGQUxTRTZjb25mLnNxbC5tYXhfcGFyYWxsZWxfc3RlcHMCNEBjb25mLnNxbC5tYXhf
dmlld19uZXN0aW5nX2xldmVscwQxNjJjb25mLnNxbC5wYWdpbmdfdGFibGVfdHRsBDIwNmNvbmYu
c3FsLnBhcmFsbGVsX2V4ZWN1dGlvbghUUlVFMGNvbmYuc3FsLnBsYW5fY2FjaGVfc2l6ZQg0MDAw
MGNvbmYuc3FsLnBsYW5uZXIuYWRkcmVzcz5pcGM6Ly8vdG1wL2dwdWRiLXF1ZXJ5LWVuZ2luZS0w
NmNvbmYuc3FsLnBsYW5uZXIubWF4X21lbW9yeQg0MDk2NGNvbmYuc3FsLnBsYW5uZXIubWF4X3N0
YWNrAjZEY29uZi5zcWwucGxhbm5lci5yZW1vdGVfZGVidWdfcG9ydAIwMGNvbmYuc3FsLnBsYW5u
ZXIudGltZW91dAYxMjA0Y29uZi5zcWwucmVzdWx0X2NhY2hlX3NpemUINDAwMDRjb25mLnNxbC5y
ZXN1bHRzLmNhY2hlX3R0bAQ2MDBjb25mLnNxbC5yZXN1bHRzLmNhY2hpbmcIVFJVRUBjb25mLnNx
bC5ydWxlX2Jhc2VkX29wdGltaXphdGlvbghUUlVFPGNvbmYuc3VidGFza19jb25jdXJyZW5jeV9s
aW1pdAI0PmNvbmYuc3ltYm9sb2d5X3JlbmRlcl90aHJlc2hvbGQKMTAwMDBQY29uZi5zeXN0ZW1f
bWV0YWRhdGEuc3RhdHNfYWdncl9yb3djb3VudAoxMDAwMEhjb25mLnN5c3RlbV9tZXRhZGF0YS5z
dGF0c19hZ2dyX3RpbWUCMVJjb25mLnN5c3RlbV9tZXRhZGF0YS5zdGF0c19yZXRlbnRpb25fZGF5
cwQyMSZjb25mLnRhc2tjYWxjX2dwdS4xElswLDEsMiwzXSZjb25mLnRhc2tjYWxjX2dwdS4yElsw
LDEsMiwzXSZjb25mLnRhc2tjYWxjX2dwdS4zElswLDEsMiwzXSZjb25mLnRhc2tjYWxjX2dwdS40
ElswLDEsMiwzXSZjb25mLnRhc2tjYWxjX2dwdS41ElswLDEsMiwzXSZjb25mLnRhc2tjYWxjX2dw
dS42ElswLDEsMiwzXSZjb25mLnRhc2tjYWxjX2dwdS43ElswLDEsMiwzXSZjb25mLnRhc2tjYWxj
X2dwdS44ElswLDEsMiwzXSBjb25mLnRjc19wZXJfdG9tBDQwJmNvbmYudGVtcF9kaXJlY3RvcnkI
L3RtcDJjb25mLnRleHRfaW5kZXhfZGlyZWN0b3J5MC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0LzJj
b25mLnRleHRfaW5kaWNlc19wZXJfdG9tAjJMY29uZi50aWVyLmRpc2swLmRlZmF1bHQuaGlnaF93
YXRlcm1hcmsEOTA6Y29uZi50aWVyLmRpc2swLmRlZmF1bHQubGltaXQYNjAwMDAwMDAwMDAwSmNv
bmYudGllci5kaXNrMC5kZWZhdWx0Lmxvd193YXRlcm1hcmsEODA4Y29uZi50aWVyLmRpc2swLmRl
ZmF1bHQucGF0aEQvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC8vZGlza2NhY2hlYGNvbmYudGllci5k
aXNrMC5kZWZhdWx0LnN0b3JlX3BlcnNpc3RlbnRfb2JqZWN0cwpGQUxTRUhjb25mLnRpZXIuZGlz
azAucmFuazAuaGlnaF93YXRlcm1hcmsEOTA2Y29uZi50aWVyLmRpc2swLnJhbmswLmxpbWl0GDYw
MDAwMDAwMDAwMEZjb25mLnRpZXIuZGlzazAucmFuazAubG93X3dhdGVybWFyawQ4MDRjb25mLnRp
ZXIuZGlzazAucmFuazAucGF0aEQvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC8vZGlza2NhY2hlXGNv
bmYudGllci5kaXNrMC5yYW5rMC5zdG9yZV9wZXJzaXN0ZW50X29iamVjdHMKRkFMU0VIY29uZi50
aWVyLmRpc2swLnJhbmsxLmhpZ2hfd2F0ZXJtYXJrBDkwNmNvbmYudGllci5kaXNrMC5yYW5rMS5s
aW1pdBg2MDAwMDAwMDAwMDBGY29uZi50aWVyLmRpc2swLnJhbmsxLmxvd193YXRlcm1hcmsEODA0
Y29uZi50aWVyLmRpc2swLnJhbmsxLnBhdGhEL21udC9kYXRhL2dwdWRiL3BlcnNpc3QvL2Rpc2tj
YWNoZVxjb25mLnRpZXIuZGlzazAucmFuazEuc3RvcmVfcGVyc2lzdGVudF9vYmplY3RzCkZBTFNF
SGNvbmYudGllci5kaXNrMC5yYW5rMi5oaWdoX3dhdGVybWFyawQ5MDZjb25mLnRpZXIuZGlzazAu
cmFuazIubGltaXQYNjAwMDAwMDAwMDAwRmNvbmYudGllci5kaXNrMC5yYW5rMi5sb3dfd2F0ZXJt
YXJrBDgwNGNvbmYudGllci5kaXNrMC5yYW5rMi5wYXRoRC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0
Ly9kaXNrY2FjaGVcY29uZi50aWVyLmRpc2swLnJhbmsyLnN0b3JlX3BlcnNpc3RlbnRfb2JqZWN0
cwpGQUxTRUhjb25mLnRpZXIuZGlzazAucmFuazMuaGlnaF93YXRlcm1hcmsEOTA2Y29uZi50aWVy
LmRpc2swLnJhbmszLmxpbWl0GDYwMDAwMDAwMDAwMEZjb25mLnRpZXIuZGlzazAucmFuazMubG93
X3dhdGVybWFyawQ4MDRjb25mLnRpZXIuZGlzazAucmFuazMucGF0aEQvbW50L2RhdGEvZ3B1ZGIv
cGVyc2lzdC8vZGlza2NhY2hlXGNvbmYudGllci5kaXNrMC5yYW5rMy5zdG9yZV9wZXJzaXN0ZW50
X29iamVjdHMKRkFMU0VIY29uZi50aWVyLmRpc2swLnJhbms0LmhpZ2hfd2F0ZXJtYXJrBDkwNmNv
bmYudGllci5kaXNrMC5yYW5rNC5saW1pdBg2MDAwMDAwMDAwMDBGY29uZi50aWVyLmRpc2swLnJh
bms0Lmxvd193YXRlcm1hcmsEODA0Y29uZi50aWVyLmRpc2swLnJhbms0LnBhdGhEL21udC9kYXRh
L2dwdWRiL3BlcnNpc3QvL2Rpc2tjYWNoZVxjb25mLnRpZXIuZGlzazAucmFuazQuc3RvcmVfcGVy
c2lzdGVudF9vYmplY3RzCkZBTFNFSGNvbmYudGllci5kaXNrMC5yYW5rNS5oaWdoX3dhdGVybWFy
awQ5MDZjb25mLnRpZXIuZGlzazAucmFuazUubGltaXQYNjAwMDAwMDAwMDAwRmNvbmYudGllci5k
aXNrMC5yYW5rNS5sb3dfd2F0ZXJtYXJrBDgwNGNvbmYudGllci5kaXNrMC5yYW5rNS5wYXRoRC9t
bnQvZGF0YS9ncHVkYi9wZXJzaXN0Ly9kaXNrY2FjaGVcY29uZi50aWVyLmRpc2swLnJhbms1LnN0
b3JlX3BlcnNpc3RlbnRfb2JqZWN0cwpGQUxTRUhjb25mLnRpZXIuZGlzazAucmFuazYuaGlnaF93
YXRlcm1hcmsEOTA2Y29uZi50aWVyLmRpc2swLnJhbms2LmxpbWl0GDYwMDAwMDAwMDAwMEZjb25m
LnRpZXIuZGlzazAucmFuazYubG93X3dhdGVybWFyawQ4MDRjb25mLnRpZXIuZGlzazAucmFuazYu
cGF0aEQvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC8vZGlza2NhY2hlXGNvbmYudGllci5kaXNrMC5y
YW5rNi5zdG9yZV9wZXJzaXN0ZW50X29iamVjdHMKRkFMU0VIY29uZi50aWVyLmRpc2swLnJhbms3
LmhpZ2hfd2F0ZXJtYXJrBDkwNmNvbmYudGllci5kaXNrMC5yYW5rNy5saW1pdBg2MDAwMDAwMDAw
MDBGY29uZi50aWVyLmRpc2swLnJhbms3Lmxvd193YXRlcm1hcmsEODA0Y29uZi50aWVyLmRpc2sw
LnJhbms3LnBhdGhEL21udC9kYXRhL2dwdWRiL3BlcnNpc3QvL2Rpc2tjYWNoZVxjb25mLnRpZXIu
ZGlzazAucmFuazcuc3RvcmVfcGVyc2lzdGVudF9vYmplY3RzCkZBTFNFSGNvbmYudGllci5kaXNr
MC5yYW5rOC5oaWdoX3dhdGVybWFyawQ5MDZjb25mLnRpZXIuZGlzazAucmFuazgubGltaXQYNjAw
MDAwMDAwMDAwRmNvbmYudGllci5kaXNrMC5yYW5rOC5sb3dfd2F0ZXJtYXJrBDgwNGNvbmYudGll
ci5kaXNrMC5yYW5rOC5wYXRoRC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0Ly9kaXNrY2FjaGVcY29u
Zi50aWVyLmRpc2swLnJhbms4LnN0b3JlX3BlcnNpc3RlbnRfb2JqZWN0cwpGQUxTRVBjb25mLnRp
ZXIuZ2xvYmFsLmNvbmN1cnJlbnRfd2FpdF90aW1lb3V0BjYwMGpjb25mLnRpZXIuZ2xvYmFsLmRl
ZmVyX2NhY2hlX29iamVjdF9ldmljdGlvbnNfdG9fZGlzawhUUlVFUGNvbmYudGllci5wZXJzaXN0
LmRlZmF1bHQuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnBlcnNpc3QuZGVmYXVsdC5saW1p
dBo2NTAwMDAwMDAwMDAwTmNvbmYudGllci5wZXJzaXN0LmRlZmF1bHQubG93X3dhdGVybWFyawQ4
MDxjb25mLnRpZXIucGVyc2lzdC5kZWZhdWx0LnBhdGgwL21udC9kYXRhL2dwdWRiL3BlcnNpc3Qv
OmNvbmYudGllci5wZXJzaXN0LmdyYXBoMC5wYXRoMC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0Lzpj
b25mLnRpZXIucGVyc2lzdC5ncmFwaDEucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC86Y29u
Zi50aWVyLnBlcnNpc3QuZ3JhcGgyLnBhdGgwL21udC9kYXRhL2dwdWRiL3BlcnNpc3QvOmNvbmYu
dGllci5wZXJzaXN0LmdyYXBoMy5wYXRoMC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0Lzpjb25mLnRp
ZXIucGVyc2lzdC5ncmFwaDQucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC86Y29uZi50aWVy
LnBlcnNpc3QuZ3JhcGg1LnBhdGgwL21udC9kYXRhL2dwdWRiL3BlcnNpc3QvOmNvbmYudGllci5w
ZXJzaXN0LmdyYXBoNi5wYXRoMC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0Lzpjb25mLnRpZXIucGVy
c2lzdC5ncmFwaDcucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC9MY29uZi50aWVyLnBlcnNp
c3QucmFuazAuaGlnaF93YXRlcm1hcmsEOTA6Y29uZi50aWVyLnBlcnNpc3QucmFuazAubGltaXQa
NjUwMDAwMDAwMDAwMEpjb25mLnRpZXIucGVyc2lzdC5yYW5rMC5sb3dfd2F0ZXJtYXJrBDgwOGNv
bmYudGllci5wZXJzaXN0LnJhbmswLnBhdGgwL21udC9kYXRhL2dwdWRiL3BlcnNpc3QvTGNvbmYu
dGllci5wZXJzaXN0LnJhbmsxLmhpZ2hfd2F0ZXJtYXJrBDkwOmNvbmYudGllci5wZXJzaXN0LnJh
bmsxLmxpbWl0GjY1MDAwMDAwMDAwMDBKY29uZi50aWVyLnBlcnNpc3QucmFuazEubG93X3dhdGVy
bWFyawQ4MDhjb25mLnRpZXIucGVyc2lzdC5yYW5rMS5wYXRoMC9tbnQvZGF0YS9ncHVkYi9wZXJz
aXN0L0xjb25mLnRpZXIucGVyc2lzdC5yYW5rMi5oaWdoX3dhdGVybWFyawQ5MDpjb25mLnRpZXIu
cGVyc2lzdC5yYW5rMi5saW1pdBo2NTAwMDAwMDAwMDAwSmNvbmYudGllci5wZXJzaXN0LnJhbmsy
Lmxvd193YXRlcm1hcmsEODA4Y29uZi50aWVyLnBlcnNpc3QucmFuazIucGF0aDAvbW50L2RhdGEv
Z3B1ZGIvcGVyc2lzdC9MY29uZi50aWVyLnBlcnNpc3QucmFuazMuaGlnaF93YXRlcm1hcmsEOTA6
Y29uZi50aWVyLnBlcnNpc3QucmFuazMubGltaXQaNjUwMDAwMDAwMDAwMEpjb25mLnRpZXIucGVy
c2lzdC5yYW5rMy5sb3dfd2F0ZXJtYXJrBDgwOGNvbmYudGllci5wZXJzaXN0LnJhbmszLnBhdGgw
L21udC9kYXRhL2dwdWRiL3BlcnNpc3QvTGNvbmYudGllci5wZXJzaXN0LnJhbms0LmhpZ2hfd2F0
ZXJtYXJrBDkwOmNvbmYudGllci5wZXJzaXN0LnJhbms0LmxpbWl0GjY1MDAwMDAwMDAwMDBKY29u
Zi50aWVyLnBlcnNpc3QucmFuazQubG93X3dhdGVybWFyawQ4MDhjb25mLnRpZXIucGVyc2lzdC5y
YW5rNC5wYXRoMC9tbnQvZGF0YS9ncHVkYi9wZXJzaXN0L0xjb25mLnRpZXIucGVyc2lzdC5yYW5r
NS5oaWdoX3dhdGVybWFyawQ5MDpjb25mLnRpZXIucGVyc2lzdC5yYW5rNS5saW1pdBo2NTAwMDAw
MDAwMDAwSmNvbmYudGllci5wZXJzaXN0LnJhbms1Lmxvd193YXRlcm1hcmsEODA4Y29uZi50aWVy
LnBlcnNpc3QucmFuazUucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC9MY29uZi50aWVyLnBl
cnNpc3QucmFuazYuaGlnaF93YXRlcm1hcmsEOTA6Y29uZi50aWVyLnBlcnNpc3QucmFuazYubGlt
aXQaNjUwMDAwMDAwMDAwMEpjb25mLnRpZXIucGVyc2lzdC5yYW5rNi5sb3dfd2F0ZXJtYXJrBDgw
OGNvbmYudGllci5wZXJzaXN0LnJhbms2LnBhdGgwL21udC9kYXRhL2dwdWRiL3BlcnNpc3QvTGNv
bmYudGllci5wZXJzaXN0LnJhbms3LmhpZ2hfd2F0ZXJtYXJrBDkwOmNvbmYudGllci5wZXJzaXN0
LnJhbms3LmxpbWl0GjY1MDAwMDAwMDAwMDBKY29uZi50aWVyLnBlcnNpc3QucmFuazcubG93X3dh
dGVybWFyawQ4MDhjb25mLnRpZXIucGVyc2lzdC5yYW5rNy5wYXRoMC9tbnQvZGF0YS9ncHVkYi9w
ZXJzaXN0L0xjb25mLnRpZXIucGVyc2lzdC5yYW5rOC5oaWdoX3dhdGVybWFyawQ5MDpjb25mLnRp
ZXIucGVyc2lzdC5yYW5rOC5saW1pdBo2NTAwMDAwMDAwMDAwSmNvbmYudGllci5wZXJzaXN0LnJh
bms4Lmxvd193YXRlcm1hcmsEODA4Y29uZi50aWVyLnBlcnNpc3QucmFuazgucGF0aDAvbW50L2Rh
dGEvZ3B1ZGIvcGVyc2lzdC84Y29uZi50aWVyLnBlcnNpc3QudGV4dDEucGF0aDAvbW50L2RhdGEv
Z3B1ZGIvcGVyc2lzdC84Y29uZi50aWVyLnBlcnNpc3QudGV4dDIucGF0aDAvbW50L2RhdGEvZ3B1
ZGIvcGVyc2lzdC84Y29uZi50aWVyLnBlcnNpc3QudGV4dDMucGF0aDAvbW50L2RhdGEvZ3B1ZGIv
cGVyc2lzdC84Y29uZi50aWVyLnBlcnNpc3QudGV4dDQucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVy
c2lzdC84Y29uZi50aWVyLnBlcnNpc3QudGV4dDUucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lz
dC84Y29uZi50aWVyLnBlcnNpc3QudGV4dDYucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC84
Y29uZi50aWVyLnBlcnNpc3QudGV4dDcucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC84Y29u
Zi50aWVyLnBlcnNpc3QudGV4dDgucGF0aDAvbW50L2RhdGEvZ3B1ZGIvcGVyc2lzdC9IY29uZi50
aWVyLnJhbS5kZWZhdWx0LmhpZ2hfd2F0ZXJtYXJrBDkwNmNvbmYudGllci5yYW0uZGVmYXVsdC5s
aW1pdAQtMUZjb25mLnRpZXIucmFtLmRlZmF1bHQubG93X3dhdGVybWFyawQ4MERjb25mLnRpZXIu
cmFtLnJhbmswLmhpZ2hfd2F0ZXJtYXJrBDkwMmNvbmYudGllci5yYW0ucmFuazAubGltaXQWNzcy
NjY4MDAwMDBCY29uZi50aWVyLnJhbS5yYW5rMC5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazEuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rMS5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rMS5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazIuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rMi5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rMi5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazMuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rMy5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rMy5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazQuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rNC5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rNC5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazUuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rNS5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rNS5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazYuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rNi5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rNi5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazcuaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rNy5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rNy5sb3dfd2F0ZXJtYXJrBDgwRGNvbmYudGllci5y
YW0ucmFuazguaGlnaF93YXRlcm1hcmsEOTAyY29uZi50aWVyLnJhbS5yYW5rOC5saW1pdBg2MDAw
MDAwMDAwMDBCY29uZi50aWVyLnJhbS5yYW5rOC5sb3dfd2F0ZXJtYXJrBDgwXGNvbmYudGllci52
cmFtLmRlZmF1bHQuYWxsX2dwdXMuaGlnaF93YXRlcm1hcmsEOTBKY29uZi50aWVyLnZyYW0uZGVm
YXVsdC5hbGxfZ3B1cy5saW1pdAQtMVpjb25mLnRpZXIudnJhbS5kZWZhdWx0LmFsbF9ncHVzLmxv
d193YXRlcm1hcmsEODBUY29uZi50aWVyLnZyYW0uZGVmYXVsdC5ncHUwLmhpZ2hfd2F0ZXJtYXJr
BDkwQmNvbmYudGllci52cmFtLmRlZmF1bHQuZ3B1MC5saW1pdAQtMVJjb25mLnRpZXIudnJhbS5k
ZWZhdWx0LmdwdTAubG93X3dhdGVybWFyawQ4MFRjb25mLnRpZXIudnJhbS5kZWZhdWx0LmdwdTEu
aGlnaF93YXRlcm1hcmsEOTBCY29uZi50aWVyLnZyYW0uZGVmYXVsdC5ncHUxLmxpbWl0BC0xUmNv
bmYudGllci52cmFtLmRlZmF1bHQuZ3B1MS5sb3dfd2F0ZXJtYXJrBDgwVGNvbmYudGllci52cmFt
LmRlZmF1bHQuZ3B1Mi5oaWdoX3dhdGVybWFyawQ5MEJjb25mLnRpZXIudnJhbS5kZWZhdWx0Lmdw
dTIubGltaXQELTFSY29uZi50aWVyLnZyYW0uZGVmYXVsdC5ncHUyLmxvd193YXRlcm1hcmsEODBU
Y29uZi50aWVyLnZyYW0uZGVmYXVsdC5ncHUzLmhpZ2hfd2F0ZXJtYXJrBDkwQmNvbmYudGllci52
cmFtLmRlZmF1bHQuZ3B1My5saW1pdAQtMVJjb25mLnRpZXIudnJhbS5kZWZhdWx0LmdwdTMubG93
X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rMC5HUFUwLmhpZ2hfd2F0ZXJtYXJrBDkw
PmNvbmYudGllci52cmFtLnJhbmswLkdQVTAubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazAu
R1BVMC5sb3dfd2F0ZXJtYXJrBDgwWGNvbmYudGllci52cmFtLnJhbmswLmFsbF9ncHVzLmhpZ2hf
d2F0ZXJtYXJrBDkwRmNvbmYudGllci52cmFtLnJhbmswLmFsbF9ncHVzLmxpbWl0BC0xVmNvbmYu
dGllci52cmFtLnJhbmswLmFsbF9ncHVzLmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0u
cmFuazAuZ3B1MC5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJhbS5yYW5rMC5ncHUwLmxp
bWl0BC0xTmNvbmYudGllci52cmFtLnJhbmswLmdwdTAubG93X3dhdGVybWFyawQ4MFBjb25mLnRp
ZXIudnJhbS5yYW5rMC5ncHUxLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbmsw
LmdwdTEubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazAuZ3B1MS5sb3dfd2F0ZXJtYXJrBDgw
UGNvbmYudGllci52cmFtLnJhbmswLmdwdTIuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZy
YW0ucmFuazAuZ3B1Mi5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rMC5ncHUyLmxvd193YXRl
cm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazAuZ3B1My5oaWdoX3dhdGVybWFyawQ5MD5jb25m
LnRpZXIudnJhbS5yYW5rMC5ncHUzLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbmswLmdwdTMu
bG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rMS5HUFUwLmhpZ2hfd2F0ZXJtYXJr
BDkwPmNvbmYudGllci52cmFtLnJhbmsxLkdQVTAubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFu
azEuR1BVMC5sb3dfd2F0ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbmsxLkdQVTEuaGlnaF93
YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0ucmFuazEuR1BVMS5saW1pdAQtMU5jb25mLnRpZXIu
dnJhbS5yYW5rMS5HUFUxLmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazEuR1BV
Mi5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJhbS5yYW5rMS5HUFUyLmxpbWl0BC0xTmNv
bmYudGllci52cmFtLnJhbmsxLkdQVTIubG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5y
YW5rMS5HUFUzLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbmsxLkdQVTMubGlt
aXQELTFOY29uZi50aWVyLnZyYW0ucmFuazEuR1BVMy5sb3dfd2F0ZXJtYXJrBDgwWGNvbmYudGll
ci52cmFtLnJhbmsxLmFsbF9ncHVzLmhpZ2hfd2F0ZXJtYXJrBDkwRmNvbmYudGllci52cmFtLnJh
bmsxLmFsbF9ncHVzLmxpbWl0BC0xVmNvbmYudGllci52cmFtLnJhbmsxLmFsbF9ncHVzLmxvd193
YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazEuZ3B1MC5oaWdoX3dhdGVybWFyawQ5MD5j
b25mLnRpZXIudnJhbS5yYW5rMS5ncHUwLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbmsxLmdw
dTAubG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rMS5ncHUxLmhpZ2hfd2F0ZXJt
YXJrBDkwPmNvbmYudGllci52cmFtLnJhbmsxLmdwdTEubGltaXQELTFOY29uZi50aWVyLnZyYW0u
cmFuazEuZ3B1MS5sb3dfd2F0ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbmsxLmdwdTIuaGln
aF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0ucmFuazEuZ3B1Mi5saW1pdAQtMU5jb25mLnRp
ZXIudnJhbS5yYW5rMS5ncHUyLmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazEu
Z3B1My5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJhbS5yYW5rMS5ncHUzLmxpbWl0BC0x
TmNvbmYudGllci52cmFtLnJhbmsxLmdwdTMubG93X3dhdGVybWFyawQ4MFhjb25mLnRpZXIudnJh
bS5yYW5rMi5hbGxfZ3B1cy5oaWdoX3dhdGVybWFyawQ5MEZjb25mLnRpZXIudnJhbS5yYW5rMi5h
bGxfZ3B1cy5saW1pdAQtMVZjb25mLnRpZXIudnJhbS5yYW5rMi5hbGxfZ3B1cy5sb3dfd2F0ZXJt
YXJrBDgwUGNvbmYudGllci52cmFtLnJhbmsyLmdwdTAuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50
aWVyLnZyYW0ucmFuazIuZ3B1MC5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rMi5ncHUwLmxv
d193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazIuZ3B1MS5oaWdoX3dhdGVybWFyawQ5
MD5jb25mLnRpZXIudnJhbS5yYW5rMi5ncHUxLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbmsy
LmdwdTEubG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rMi5ncHUyLmhpZ2hfd2F0
ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbmsyLmdwdTIubGltaXQELTFOY29uZi50aWVyLnZy
YW0ucmFuazIuZ3B1Mi5sb3dfd2F0ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbmsyLmdwdTMu
aGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0ucmFuazIuZ3B1My5saW1pdAQtMU5jb25m
LnRpZXIudnJhbS5yYW5rMi5ncHUzLmxvd193YXRlcm1hcmsEODBYY29uZi50aWVyLnZyYW0ucmFu
azMuYWxsX2dwdXMuaGlnaF93YXRlcm1hcmsEOTBGY29uZi50aWVyLnZyYW0ucmFuazMuYWxsX2dw
dXMubGltaXQELTFWY29uZi50aWVyLnZyYW0ucmFuazMuYWxsX2dwdXMubG93X3dhdGVybWFyawQ4
MFBjb25mLnRpZXIudnJhbS5yYW5rMy5ncHUwLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGllci52
cmFtLnJhbmszLmdwdTAubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazMuZ3B1MC5sb3dfd2F0
ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbmszLmdwdTEuaGlnaF93YXRlcm1hcmsEOTA+Y29u
Zi50aWVyLnZyYW0ucmFuazMuZ3B1MS5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rMy5ncHUx
Lmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazMuZ3B1Mi5oaWdoX3dhdGVybWFy
awQ5MD5jb25mLnRpZXIudnJhbS5yYW5rMy5ncHUyLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJh
bmszLmdwdTIubG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rMy5ncHUzLmhpZ2hf
d2F0ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbmszLmdwdTMubGltaXQELTFOY29uZi50aWVy
LnZyYW0ucmFuazMuZ3B1My5sb3dfd2F0ZXJtYXJrBDgwWGNvbmYudGllci52cmFtLnJhbms0LmFs
bF9ncHVzLmhpZ2hfd2F0ZXJtYXJrBDkwRmNvbmYudGllci52cmFtLnJhbms0LmFsbF9ncHVzLmxp
bWl0BC0xVmNvbmYudGllci52cmFtLnJhbms0LmFsbF9ncHVzLmxvd193YXRlcm1hcmsEODBQY29u
Zi50aWVyLnZyYW0ucmFuazQuZ3B1MC5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJhbS5y
YW5rNC5ncHUwLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbms0LmdwdTAubG93X3dhdGVybWFy
awQ4MFBjb25mLnRpZXIudnJhbS5yYW5rNC5ncHUxLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGll
ci52cmFtLnJhbms0LmdwdTEubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazQuZ3B1MS5sb3df
d2F0ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbms0LmdwdTIuaGlnaF93YXRlcm1hcmsEOTA+
Y29uZi50aWVyLnZyYW0ucmFuazQuZ3B1Mi5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rNC5n
cHUyLmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFuazQuZ3B1My5oaWdoX3dhdGVy
bWFyawQ5MD5jb25mLnRpZXIudnJhbS5yYW5rNC5ncHUzLmxpbWl0BC0xTmNvbmYudGllci52cmFt
LnJhbms0LmdwdTMubG93X3dhdGVybWFyawQ4MFhjb25mLnRpZXIudnJhbS5yYW5rNS5hbGxfZ3B1
cy5oaWdoX3dhdGVybWFyawQ5MEZjb25mLnRpZXIudnJhbS5yYW5rNS5hbGxfZ3B1cy5saW1pdAQt
MVZjb25mLnRpZXIudnJhbS5yYW5rNS5hbGxfZ3B1cy5sb3dfd2F0ZXJtYXJrBDgwUGNvbmYudGll
ci52cmFtLnJhbms1LmdwdTAuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0ucmFuazUu
Z3B1MC5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rNS5ncHUwLmxvd193YXRlcm1hcmsEODBQ
Y29uZi50aWVyLnZyYW0ucmFuazUuZ3B1MS5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJh
bS5yYW5rNS5ncHUxLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbms1LmdwdTEubG93X3dhdGVy
bWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rNS5ncHUyLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYu
dGllci52cmFtLnJhbms1LmdwdTIubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazUuZ3B1Mi5s
b3dfd2F0ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbms1LmdwdTMuaGlnaF93YXRlcm1hcmsE
OTA+Y29uZi50aWVyLnZyYW0ucmFuazUuZ3B1My5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5r
NS5ncHUzLmxvd193YXRlcm1hcmsEODBYY29uZi50aWVyLnZyYW0ucmFuazYuYWxsX2dwdXMuaGln
aF93YXRlcm1hcmsEOTBGY29uZi50aWVyLnZyYW0ucmFuazYuYWxsX2dwdXMubGltaXQELTFWY29u
Zi50aWVyLnZyYW0ucmFuazYuYWxsX2dwdXMubG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIudnJh
bS5yYW5rNi5ncHUwLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbms2LmdwdTAu
bGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazYuZ3B1MC5sb3dfd2F0ZXJtYXJrBDgwUGNvbmYu
dGllci52cmFtLnJhbms2LmdwdTEuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0ucmFu
azYuZ3B1MS5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rNi5ncHUxLmxvd193YXRlcm1hcmsE
ODBQY29uZi50aWVyLnZyYW0ucmFuazYuZ3B1Mi5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIu
dnJhbS5yYW5rNi5ncHUyLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbms2LmdwdTIubG93X3dh
dGVybWFyawQ4MFBjb25mLnRpZXIudnJhbS5yYW5rNi5ncHUzLmhpZ2hfd2F0ZXJtYXJrBDkwPmNv
bmYudGllci52cmFtLnJhbms2LmdwdTMubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazYuZ3B1
My5sb3dfd2F0ZXJtYXJrBDgwWGNvbmYudGllci52cmFtLnJhbms3LmFsbF9ncHVzLmhpZ2hfd2F0
ZXJtYXJrBDkwRmNvbmYudGllci52cmFtLnJhbms3LmFsbF9ncHVzLmxpbWl0BC0xVmNvbmYudGll
ci52cmFtLnJhbms3LmFsbF9ncHVzLmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0ucmFu
azcuZ3B1MC5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJhbS5yYW5rNy5ncHUwLmxpbWl0
BC0xTmNvbmYudGllci52cmFtLnJhbms3LmdwdTAubG93X3dhdGVybWFyawQ4MFBjb25mLnRpZXIu
dnJhbS5yYW5rNy5ncHUxLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbms3Lmdw
dTEubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazcuZ3B1MS5sb3dfd2F0ZXJtYXJrBDgwUGNv
bmYudGllci52cmFtLnJhbms3LmdwdTIuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0u
cmFuazcuZ3B1Mi5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rNy5ncHUyLmxvd193YXRlcm1h
cmsEODBQY29uZi50aWVyLnZyYW0ucmFuazcuZ3B1My5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRp
ZXIudnJhbS5yYW5rNy5ncHUzLmxpbWl0BC0xTmNvbmYudGllci52cmFtLnJhbms3LmdwdTMubG93
X3dhdGVybWFyawQ4MFhjb25mLnRpZXIudnJhbS5yYW5rOC5hbGxfZ3B1cy5oaWdoX3dhdGVybWFy
awQ5MEZjb25mLnRpZXIudnJhbS5yYW5rOC5hbGxfZ3B1cy5saW1pdAQtMVZjb25mLnRpZXIudnJh
bS5yYW5rOC5hbGxfZ3B1cy5sb3dfd2F0ZXJtYXJrBDgwUGNvbmYudGllci52cmFtLnJhbms4Lmdw
dTAuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZyYW0ucmFuazguZ3B1MC5saW1pdAQtMU5j
b25mLnRpZXIudnJhbS5yYW5rOC5ncHUwLmxvd193YXRlcm1hcmsEODBQY29uZi50aWVyLnZyYW0u
cmFuazguZ3B1MS5oaWdoX3dhdGVybWFyawQ5MD5jb25mLnRpZXIudnJhbS5yYW5rOC5ncHUxLmxp
bWl0BC0xTmNvbmYudGllci52cmFtLnJhbms4LmdwdTEubG93X3dhdGVybWFyawQ4MFBjb25mLnRp
ZXIudnJhbS5yYW5rOC5ncHUyLmhpZ2hfd2F0ZXJtYXJrBDkwPmNvbmYudGllci52cmFtLnJhbms4
LmdwdTIubGltaXQELTFOY29uZi50aWVyLnZyYW0ucmFuazguZ3B1Mi5sb3dfd2F0ZXJtYXJrBDgw
UGNvbmYudGllci52cmFtLnJhbms4LmdwdTMuaGlnaF93YXRlcm1hcmsEOTA+Y29uZi50aWVyLnZy
YW0ucmFuazguZ3B1My5saW1pdAQtMU5jb25mLnRpZXIudnJhbS5yYW5rOC5ncHUzLmxvd193YXRl
cm1hcmsEODA0Y29uZi50aWVyX3N0cmF0ZWd5LmRlZmF1bHRCVlJBTSAyLCBSQU0gNSwgRElTSzAg
NSwgUEVSU0lTVCA1YGNvbmYudGllcl9zdHJhdGVneS5wcmVkaWNhdGVfZXZhbHVhdGlvbl9pbnRl
cnZhbAQ2MCRjb25mLnRvbXNfcGVyX3JhbmsCMSBjb25mLnRwc19wZXJfdG9tBDQwImNvbmYudHJp
Z2dlcl9wb3J0CDkwMDE+Y29uZi51bmlmaWVkX3NlY3VyaXR5X25hbWVzcGFjZQpGQUxTRTpjb25m
LnVzZV9leHRlcm5hbF90ZXh0X3NlcnZlcghUUlVFHGNvbmYudXNlX2h0dHBzCkZBTFNFLGNvbmYu
dmlkZW9fZGVmYXVsdF90dGwELTEoY29uZi52aWRlb19tYXhfY291bnQELTEyY29uZi52aWRlb190
ZW1wX2RpcmVjdG9yeSwvdG1wL2dwdWRiLXRlbXAtdmlkZW9zImNvbmYud2FsLmNoZWNrc3VtCFRS
VUUwY29uZi53YWwuZmx1c2hfZnJlcXVlbmN5BDYwMmNvbmYud2FsLm1heF9zZWdtZW50X3NpemUS
NTAwMDAwMDAwLGNvbmYud2FsLnNlZ21lbnRfY291bnQELTEoY29uZi53YWwuc3luY19wb2xpY3kK
Zmx1c2g2Y29uZi53b3JrZXJfaHR0cF9zZXJ2ZXJfaXBz6AExNzIuMzEuMzMuMzA7MTcyLjMxLjMz
LjMwOzE3Mi4zMS4zMy4zMTsxNzIuMzEuMzMuMzI7MTcyLjMxLjMzLjMzOzE3Mi4zMS4zMy4zNDsx
NzIuMzEuMzMuMzU7MTcyLjMxLjMzLjM2OzE3Mi4zMS4zMy4zNzpjb25mLndvcmtlcl9odHRwX3Nl
cnZlcl9wb3J0c1g5MTkxOzkxOTI7OTE5Mzs5MTk0OzkxOTU7OTE5Njs5MTk3OzkxOTg7OTE5OThj
b25mLndvcmtlcl9odHRwX3NlcnZlcl91cmxzwANodHRwOi8vMTcyLjMxLjMzLjMwOjkxOTE7aHR0
cDovLzE3Mi4zMS4zMy4zMDo5MTkyO2h0dHA6Ly8xNzIuMzEuMzMuMzE6OTE5MztodHRwOi8vMTcy
LjMxLjMzLjMyOjkxOTQ7aHR0cDovLzE3Mi4zMS4zMy4zMzo5MTk1O2h0dHA6Ly8xNzIuMzEuMzMu
MzQ6OTE5NjtodHRwOi8vMTcyLjMxLjMzLjM1OjkxOTc7aHR0cDovLzE3Mi4zMS4zMy4zNjo5MTk4
O2h0dHA6Ly8xNzIuMzEuMzMuMzc6OTE5OUhjb25mLndvcmtlcl9odHRwX3NlcnZlcl91cmxzX3By
aXZhdGXAA2h0dHA6Ly8xNzIuMzEuMzMuMzA6OTE5MTtodHRwOi8vMTcyLjMxLjMzLjMwOjkxOTI7
aHR0cDovLzE3Mi4zMS4zMy4zMTo5MTkzO2h0dHA6Ly8xNzIuMzEuMzMuMzI6OTE5NDtodHRwOi8v
MTcyLjMxLjMzLjMzOjkxOTU7aHR0cDovLzE3Mi4zMS4zMy4zNDo5MTk2O2h0dHA6Ly8xNzIuMzEu
MzMuMzU6OTE5NztodHRwOi8vMTcyLjMxLjMzLjM2OjkxOTg7aHR0cDovLzE3Mi4zMS4zMy4zNzo5
MTk5KHN5c3RlbS5mb250X2ZhbWlsaWVzpgFEZWphVnUgTWF0aCBUZVggR3lyZSxEZWphVnUgU2Fu
cyBNb25vLERlamFWdSBTYW5zLERlamFWdSBTZXJpZixTYW5zLFNlcmlmLE1vbm9zcGFjZTB2ZXJz
aW9uLmdwdWRiX2J1aWxkX2RhdGUoRmViIDE0IDIwMjQgMjM6NDk6MDFAdmVyc2lvbi5ncHVkYl9j
b21wdXRlX2NhcGFiaWxpdHkWNjA7NzA7ODA7ODY4dmVyc2lvbi5ncHVkYl9jb21wdXRlX2VuZ2lu
ZQhDVURBPnZlcnNpb24uZ3B1ZGJfY29yZV9saWJzX3ZlcnNpb24UMjAyNDAyMTMwMDR2ZXJzaW9u
LmdwdWRiX2NvcmVfdmVyc2lvbiw3LjIuMC4xLjIwMjQwMjE0MjEwOTA2NHZlcnNpb24uZ3B1ZGJf
ZmlsZV92ZXJzaW9uFDIwMjEwMzExMjAqdmVyc2lvbi5ncHVkYl92ZXJzaW9uUDkyMjYwYTMyOWNh
NDVjYjBlMzc3NzZjZjkxNDQ5NzE3OWY2MjExNDM0dmVyc2lvbi5ncHVkYl92ZXJzaW9uX2RhdGUy
MjAyNC0wMi0xNCAyMTowOTowNiAtMDUwMCx2ZXJzaW9uLnB5dGhvbl92ZXJzaW9uDjMuMTAuMTMA
AAA=
headers:
Access-Control-Allow-Origin:
- '*'
Access-Control-Expose-Headers:
- x-request-time-secs
Connection:
- Close
Content-Type:
- application/octet-stream
Date:
- Thu, 22 Feb 2024 00:26:38 GMT
Strict-Transport-Security:
- max-age=31536000; includeSubDomains
X-Content-Type-Options:
- nosniff
X-Frame-Options:
- SAMEORIGIN
X-XSS-Protection:
- 1; mode=block
x-request-time-secs:
- '0.00095'
status:
code: 200
message: OK
- request:
body: !!binary |
hAFHRU5FUkFURSBQUk9NUFQgV0lUSCBPUFRJT05TIChDT05URVhUX05BTUVTID0gJ2RlbW8udGVz
dF9sbG1fY3R4JykAAgxiaW5hcnkAAAA=
headers:
Accept:
- application/octet-stream
Content-type:
- application/octet-stream
authorization:
- DUMMY
method: POST
uri: http://172.31.33.30:9191/execute/sql
response:
body:
string: !!binary |
BE9LAChleGVjdXRlX3NxbF9yZXNwb25zZe4MAPYDeyJuYW1lIjoiZ2VuZXJpY19yZXNwb25zZSIs
InR5cGUiOiJyZWNvcmQiLCJmaWVsZHMiOlt7Im5hbWUiOiJjb2x1bW5fMSIsInR5cGUiOnsidHlw
ZSI6ImFycmF5IiwiaXRlbXMiOiJzdHJpbmcifX0seyJuYW1lIjoiY29sdW1uX2hlYWRlcnMiLCJ0
eXBlIjp7InR5cGUiOiJhcnJheSIsIml0ZW1zIjoic3RyaW5nIn19LHsibmFtZSI6ImNvbHVtbl9k
YXRhdHlwZXMiLCJ0eXBlIjp7InR5cGUiOiJhcnJheSIsIml0ZW1zIjoic3RyaW5nIn19XX2qBwL+
BnsicGF5bG9hZCI6eyJjb250ZXh0IjpbeyJ0YWJsZSI6ImRlbW8udGVzdF9wcm9maWxlcyIsImNv
bHVtbnMiOlsidXNlcm5hbWUgVkFSQ0hBUiAoMzIpIE5PVCBOVUxMIiwibmFtZSBWQVJDSEFSICgz
MikgTk9UIE5VTEwiLCJzZXggVkFSQ0hBUiAoMSkgTk9UIE5VTEwiLCJhZGRyZXNzIFZBUkNIQVIg
KDY0KSBOT1QgTlVMTCIsIm1haWwgVkFSQ0hBUiAoMzIpIE5PVCBOVUxMIiwiYmlydGhkYXRlIFRJ
TUVTVEFNUCBOT1QgTlVMTCJdLCJkZXNjcmlwdGlvbiI6IkNvbnRhaW5zIHVzZXIgcHJvZmlsZXMu
IiwicnVsZXMiOltdfSx7InNhbXBsZXMiOnsiSG93IG1hbnkgbWFsZSB1c2VycyBhcmUgdGhlcmU/
Ijoic2VsZWN0IGNvdW50KDEpIGFzIG51bV91c2Vyc1xuICAgICAgICAgICAgZnJvbSBkZW1vLnRl
c3RfcHJvZmlsZXNcbiAgICAgICAgICAgIHdoZXJlIHNleCA9ICcnTScnOyJ9fV19fQACDFByb21w
dAACDHN0cmluZwAAAgAACCBYLUtpbmV0aWNhLUdyb3VwBkRETApjb3VudAIwGmxhc3RfZW5kcG9p
bnQsL2dlbmVyYXRlL3NxbC9pbnRlcm5hbC50b3RhbF9udW1iZXJfb2ZfcmVjb3JkcwIwAAA=
headers:
Access-Control-Allow-Origin:
- '*'
Access-Control-Expose-Headers:
- x-request-time-secs
Connection:
- Close
Content-Type:
- application/octet-stream
Date:
- Thu, 22 Feb 2024 00:26:38 GMT
Strict-Transport-Security:
- max-age=31536000; includeSubDomains
X-Content-Type-Options:
- nosniff
X-Frame-Options:
- SAMEORIGIN
X-Kinetica-Group:
- DDL
X-XSS-Protection:
- 1; mode=block
x-request-time-secs:
- '0.00685'
status:
code: 200
message: OK
- request:
body: '{"messages": [{"role": "system", "content": "CREATE TABLE demo.test_profiles
AS\n(\n username VARCHAR (32) NOT NULL,\n name VARCHAR (32) NOT NULL,\n sex
VARCHAR (1) NOT NULL,\n address VARCHAR (64) NOT NULL,\n mail VARCHAR (32)
NOT NULL,\n birthdate TIMESTAMP NOT NULL\n);\nCOMMENT ON TABLE demo.test_profiles
IS ''Contains user profiles.'';"}, {"role": "user", "content": "How many male
users are there?"}, {"role": "assistant", "content": "select count(1) as num_users\n from
demo.test_profiles\n where sex = ''M'';"}, {"role": "user", "content":
"What are the female users ordered by username?"}]}'
headers:
Accept:
- text/plain
Content-type:
- application/json
authorization:
- DUMMY
method: POST
uri: http://172.31.33.30:9191/chat/completions
response:
body:
string: '{"status":"OK","data":{"status":"OK","data":{"id":"chatCompl-1708561599","object":"chat.completion","created":1708561600,"model":"sqlassist-1.1","choices":[{"index":0,"message":{"role":"assistant","content":"SELECT
username, name\n FROM demo.test_profiles\n WHERE sex
= ''F''\n ORDER BY username;"},"finish_reason":"stop"}],"usage":{"prompt_tokens":140,"completion_tokens":22,"total_tokens":162},"prompt":"What
are the female users ordered by username?"}}}'
headers:
Access-Control-Allow-Origin:
- '*'
Access-Control-Expose-Headers:
- x-request-time-secs
Connection:
- Close
Content-Type:
- application/json
Date:
- Thu, 22 Feb 2024 00:26:39 GMT
Strict-Transport-Security:
- max-age=31536000; includeSubDomains
X-Content-Type-Options:
- nosniff
X-Frame-Options:
- SAMEORIGIN
X-XSS-Protection:
- 1; mode=block
x-request-time-secs:
- '0.89590'
status:
code: 200
message: OK
- request:
body: !!binary |
6AFTRUxFQ1QgdXNlcm5hbWUsIG5hbWUKICAgICAgICAgICAgRlJPTSBkZW1vLnRlc3RfcHJvZmls
ZXMKICAgICAgICAgICAgV0hFUkUgc2V4ID0gJ0YnCiAgICAgICAgICAgIE9SREVSIEJZIHVzZXJu
YW1lOwCQTgxiaW5hcnkAAAIYcGFnaW5nX3RhYmxlSGJiYWMyMGQ4XzI2ZThfNDI1Zl9iY2ZhX2Ez
MDczNzZmMzhmMAA=
headers:
Accept:
- application/octet-stream
Content-type:
- application/octet-stream
authorization:
- DUMMY
method: POST
uri: http://172.31.33.30:9191/execute/sql
response:
body:
string: !!binary |
BE9LAChleGVjdXRlX3NxbF9yZXNwb25zZZ4YAPAEeyJuYW1lIjoiZ2VuZXJpY19yZXNwb25zZSIs
InR5cGUiOiJyZWNvcmQiLCJmaWVsZHMiOlt7Im5hbWUiOiJjb2x1bW5fMSIsInR5cGUiOnsidHlw
ZSI6ImFycmF5IiwiaXRlbXMiOiJzdHJpbmcifX0seyJuYW1lIjoiY29sdW1uXzIiLCJ0eXBlIjp7
InR5cGUiOiJhcnJheSIsIml0ZW1zIjoic3RyaW5nIn19LHsibmFtZSI6ImNvbHVtbl9oZWFkZXJz
IiwidHlwZSI6eyJ0eXBlIjoiYXJyYXkiLCJpdGVtcyI6InN0cmluZyJ9fSx7Im5hbWUiOiJjb2x1
bW5fZGF0YXR5cGVzIiwidHlwZSI6eyJ0eXBlIjoiYXJyYXkiLCJpdGVtcyI6InN0cmluZyJ9fV19
2hFYFmFsZXhhbmRlcjQwDmJidXJ0b24OYnJpYW4xMhJicm93bmFubmEMY2FybDE5HmNhc3RpbGxv
Y2FtZXJvbhpjaHJpc3RvcGhlcjI0GGNsYXJrbWF0dGhldx5jb2xsaW5zbWljaGVsbGUWZGF2aWRn
cmltZXMMZG1vcmFuEmVkdWFyZG82ORRlcmljZGVubmlzGmVyaWthcmV5bm9sZHMYZmxveWRjeW50
aGlhFmdyYW50aG9ydG9uGGhlbnJ5Y29sbGlucxpoaWdnaW5zcm9iZXJ0GGphY3F1ZWxpbmUyNw5q
YW1lczI2DGpzaG9ydBRraW1iZXJseTY2HGtyaXN0aW5iYXJuZXR0EmxhbmVqYWNvYg5sYXVyYTI4
GGxhdXJhbWF5bmFyZBBsZWR3YXJkcwxsaGVucnkSbWVsaXNzYTQ5Gm1lbGlzc2FidXRsZXIWbW9v
cmVhbmRyZXcQbmF0aGFuNjMabmljb2xlYmFsZHdpbg5xd3JpZ2h0DnJhbHBoMjUMc21ja2VlEnN1
c2Fucm9zZQ50cmFjeTUzGnZpY3RvcmlhbW9vcmUYd2FsdGVyc2RhdmlkDndhbmRhOTkSd2lsbGlh
bTU3HHdpbGxpYW1uaWNob2xzDnltb2xpbmEAWBhUaW5hIFJhbWlyZXoYUGF1bGEgS2Fpc2VyIlN0
ZWZhbmllIFdpbGxpYW1zGkplbm5pZmVyIFJvd2UYQW1hbmRhIFBvdHRzGkJyb29rZSBIYXJtb24W
S2VsbHkgV2F0dHMaSnVsaWUgQ29sbGlucyBIZWF0aGVyIENhbGxhaGFuFEphbmV0IExhcmEeQXNo
bGV5IEpvaG5zdG9uFEhhbGV5IEJlY2seSGVhdGhlciBKYWNrc29uHENhcnJpZSBTaGVsdG9uFEp1
ZHkgT3J0aXoaTWVsaXNzYSBEaXhvbhxEYXJsZW5lIE11cnJheRpNYXJ5IFJpY2hhcmRzEk1hcnkg
U290bx5QYXRyaWNpYSBQb3R0ZXIaQmVja3kgSm9obnNvbhZNYXJpYSBXb29kcx5EZWJvcmFoIFdh
bGxhY2UaQ2hlbHNlYSBXZWVrcxhEb25uYSBNYWRkZW4WRW1pbHkgQWxsZW4mTXJzLiBKdWxpZSBC
cmFkc2hhdxhFcmluIEJhcmFqYXMWV2VuZHkgUmVlc2UWQWxleGEgS2VsbHkaV2VuZHkgUmFtaXJl
ehhTdGFjZXkgSGFyZHkaQW5nZWxhIEhvcnRvbhxLZWxseSBGcmFua2xpbhhMYXVyYSBOZWxzb24Y
RXJpY2EgUm9tZXJvGE1hcmlhIFBhcmtlchpDaGVsc2VhIFNpbmdoHkNocmlzdHkgSm9obnNvbiRT
YW1hbnRoYSBMZXdpcyBERFMeSmVzc2ljYSBIZXJyZXJhHkVsaXphYmV0aCBXZWxscxZMYXVyYSBT
dG9uZRxBbmdlbGEgU3VtbWVycwAEEHVzZXJuYW1lCG5hbWUABAxjaGFyMzIMY2hhcjMyAABYAAAI
IFgtS2luZXRpY2EtR3JvdXAKUVVFUlkKY291bnQENDQabGFzdF9lbmRwb2ludCovZ2V0L3JlY29y
ZHMvYnljb2x1bW4udG90YWxfbnVtYmVyX29mX3JlY29yZHMENDQAAA==
headers:
Access-Control-Allow-Origin:
- '*'
Access-Control-Expose-Headers:
- x-request-time-secs
Connection:
- Close
Content-Type:
- application/octet-stream
Date:
- Thu, 22 Feb 2024 00:26:40 GMT
Strict-Transport-Security:
- max-age=31536000; includeSubDomains
X-Content-Type-Options:
- nosniff
X-Frame-Options:
- SAMEORIGIN
X-Kinetica-Group:
- QUERY
X-XSS-Protection:
- 1; mode=block
x-request-time-secs:
- '0.04596'
status:
code: 200
message: OK
version: 1
|
0 | lc_public_repos/langchain/libs/community/tests/integration_tests/smith | lc_public_repos/langchain/libs/community/tests/integration_tests/smith/evaluation/test_runner_utils.py | from typing import Iterator, List, Optional
from uuid import uuid4
import pytest
from langchain.chains.llm import LLMChain
from langchain.evaluation import EvaluatorType
from langchain.smith import RunEvalConfig, run_on_dataset
from langchain.smith.evaluation import InputFormatError
from langchain.smith.evaluation.runner_utils import arun_on_dataset
from langchain_core.messages import BaseMessage, HumanMessage
from langchain_core.prompts.chat import ChatPromptTemplate
from langsmith import Client as Client
from langsmith.evaluation import run_evaluator
from langsmith.schemas import DataType, Example, Run
from langchain_community.chat_models import ChatOpenAI
from langchain_community.llms.openai import OpenAI
def _check_all_feedback_passed(_project_name: str, client: Client) -> None:
# Assert that all runs completed, all feedback completed, and that the
# chain or llm passes for the feedback provided.
runs = list(client.list_runs(project_name=_project_name, execution_order=1))
if not runs:
# Queue delays. We are mainly just smoke checking rn.
return
feedback = list(client.list_feedback(run_ids=[run.id for run in runs]))
if not feedback:
return
assert all([bool(f.score) for f in feedback])
@run_evaluator
def not_empty(run: Run, example: Optional[Example] = None) -> dict:
return {
"score": run.outputs and next(iter(run.outputs.values())),
"key": "not_empty",
}
@pytest.fixture
def eval_project_name() -> str:
return f"lcp integration tests - {str(uuid4())[-8:]}"
@pytest.fixture(scope="module")
def client() -> Client:
return Client()
@pytest.fixture(
scope="module",
)
def kv_dataset_name() -> Iterator[str]:
import pandas as pd
client = Client()
df = pd.DataFrame(
{
"some_input": [
"What's the capital of California?",
"What's the capital of Nevada?",
"What's the capital of Oregon?",
"What's the capital of Washington?",
],
"other_input": [
"a",
"b",
"c",
"d",
],
"some_output": ["Sacramento", "Carson City", "Salem", "Olympia"],
"other_output": ["e", "f", "g", "h"],
}
)
uid = str(uuid4())[-8:]
_dataset_name = f"lcp kv dataset integration tests - {uid}"
client.upload_dataframe(
df,
name=_dataset_name,
input_keys=["some_input", "other_input"],
output_keys=["some_output", "other_output"],
description="Integration test dataset",
)
yield _dataset_name
def test_chat_model(
kv_dataset_name: str, eval_project_name: str, client: Client
) -> None:
llm = ChatOpenAI(temperature=0)
eval_config = RunEvalConfig(
evaluators=[EvaluatorType.QA], custom_evaluators=[not_empty]
)
with pytest.raises(ValueError, match="Must specify reference_key"):
run_on_dataset(
dataset_name=kv_dataset_name,
llm_or_chain_factory=llm,
evaluation=eval_config,
client=client,
)
eval_config = RunEvalConfig(
evaluators=[EvaluatorType.QA],
reference_key="some_output",
)
with pytest.raises(
InputFormatError, match="Example inputs do not match language model"
):
run_on_dataset(
dataset_name=kv_dataset_name,
llm_or_chain_factory=llm,
evaluation=eval_config,
client=client,
)
def input_mapper(d: dict) -> List[BaseMessage]:
return [HumanMessage(content=d["some_input"])]
run_on_dataset(
client=client,
dataset_name=kv_dataset_name,
llm_or_chain_factory=input_mapper | llm,
evaluation=eval_config,
project_name=eval_project_name,
tags=["shouldpass"],
)
_check_all_feedback_passed(eval_project_name, client)
def test_llm(kv_dataset_name: str, eval_project_name: str, client: Client) -> None:
llm = OpenAI(temperature=0)
eval_config = RunEvalConfig(evaluators=[EvaluatorType.QA])
with pytest.raises(ValueError, match="Must specify reference_key"):
run_on_dataset(
dataset_name=kv_dataset_name,
llm_or_chain_factory=llm,
evaluation=eval_config,
client=client,
)
eval_config = RunEvalConfig(
evaluators=[EvaluatorType.QA, EvaluatorType.CRITERIA],
reference_key="some_output",
)
with pytest.raises(InputFormatError, match="Example inputs"):
run_on_dataset(
dataset_name=kv_dataset_name,
llm_or_chain_factory=llm,
evaluation=eval_config,
client=client,
)
def input_mapper(d: dict) -> str:
return d["some_input"]
run_on_dataset(
client=client,
dataset_name=kv_dataset_name,
llm_or_chain_factory=input_mapper | llm,
evaluation=eval_config,
project_name=eval_project_name,
tags=["shouldpass"],
)
_check_all_feedback_passed(eval_project_name, client)
def test_chain(kv_dataset_name: str, eval_project_name: str, client: Client) -> None:
llm = ChatOpenAI(temperature=0)
chain = LLMChain.from_string(llm, "The answer to the {question} is: ")
eval_config = RunEvalConfig(evaluators=[EvaluatorType.QA, EvaluatorType.CRITERIA])
with pytest.raises(ValueError, match="Must specify reference_key"):
run_on_dataset(
dataset_name=kv_dataset_name,
llm_or_chain_factory=lambda: chain,
evaluation=eval_config,
client=client,
)
eval_config = RunEvalConfig(
evaluators=[EvaluatorType.QA, EvaluatorType.CRITERIA],
reference_key="some_output",
)
with pytest.raises(InputFormatError, match="Example inputs"):
run_on_dataset(
dataset_name=kv_dataset_name,
llm_or_chain_factory=lambda: chain,
evaluation=eval_config,
client=client,
)
eval_config = RunEvalConfig(
custom_evaluators=[not_empty],
)
def right_input_mapper(d: dict) -> dict:
return {"question": d["some_input"]}
run_on_dataset(
dataset_name=kv_dataset_name,
llm_or_chain_factory=lambda: right_input_mapper | chain,
client=client,
evaluation=eval_config,
project_name=eval_project_name,
tags=["shouldpass"],
)
_check_all_feedback_passed(eval_project_name, client)
### Testing Chat Datasets
@pytest.fixture(
scope="module",
)
def chat_dataset_name() -> Iterator[str]:
def _create_message(txt: str, role: str = "human") -> List[dict]:
return [{"type": role, "data": {"content": txt}}]
import pandas as pd
client = Client()
df = pd.DataFrame(
{
"input": [
_create_message(txt)
for txt in (
"What's the capital of California?",
"What's the capital of Nevada?",
"What's the capital of Oregon?",
"What's the capital of Washington?",
)
],
"output": [
_create_message(txt, role="ai")[0]
for txt in ("Sacramento", "Carson City", "Salem", "Olympia")
],
}
)
uid = str(uuid4())[-8:]
_dataset_name = f"lcp chat dataset integration tests - {uid}"
ds = client.create_dataset(
_dataset_name, description="Integration test dataset", data_type=DataType.chat
)
for row in df.itertuples():
client.create_example(
dataset_id=ds.id,
inputs={"input": row.input},
outputs={"output": row.output},
)
yield _dataset_name
def test_chat_model_on_chat_dataset(
chat_dataset_name: str, eval_project_name: str, client: Client
) -> None:
llm = ChatOpenAI(temperature=0)
eval_config = RunEvalConfig(custom_evaluators=[not_empty])
run_on_dataset(
dataset_name=chat_dataset_name,
llm_or_chain_factory=llm,
evaluation=eval_config,
client=client,
project_name=eval_project_name,
)
_check_all_feedback_passed(eval_project_name, client)
def test_llm_on_chat_dataset(
chat_dataset_name: str, eval_project_name: str, client: Client
) -> None:
llm = OpenAI(temperature=0)
eval_config = RunEvalConfig(custom_evaluators=[not_empty])
run_on_dataset(
dataset_name=chat_dataset_name,
llm_or_chain_factory=llm,
client=client,
evaluation=eval_config,
project_name=eval_project_name,
tags=["shouldpass"],
)
_check_all_feedback_passed(eval_project_name, client)
def test_chain_on_chat_dataset(chat_dataset_name: str, client: Client) -> None:
llm = ChatOpenAI(temperature=0)
chain = LLMChain.from_string(llm, "The answer to the {question} is: ")
eval_config = RunEvalConfig(evaluators=[EvaluatorType.QA, EvaluatorType.CRITERIA])
with pytest.raises(
ValueError, match="Cannot evaluate a chain on dataset with data_type=chat"
):
run_on_dataset(
dataset_name=chat_dataset_name,
client=client,
llm_or_chain_factory=lambda: chain,
evaluation=eval_config,
)
@pytest.fixture(
scope="module",
)
def llm_dataset_name() -> Iterator[str]:
import pandas as pd
client = Client()
df = pd.DataFrame(
{
"input": [
"What's the capital of California?",
"What's the capital of Nevada?",
"What's the capital of Oregon?",
"What's the capital of Washington?",
],
"output": ["Sacramento", "Carson City", "Salem", "Olympia"],
}
)
uid = str(uuid4())[-8:]
_dataset_name = f"lcp llm dataset integration tests - {uid}"
client.upload_dataframe(
df,
name=_dataset_name,
input_keys=["input"],
output_keys=["output"],
description="Integration test dataset",
data_type=DataType.llm,
)
yield _dataset_name
def test_chat_model_on_llm_dataset(
llm_dataset_name: str, eval_project_name: str, client: Client
) -> None:
llm = ChatOpenAI(temperature=0)
eval_config = RunEvalConfig(custom_evaluators=[not_empty])
run_on_dataset(
client=client,
dataset_name=llm_dataset_name,
llm_or_chain_factory=llm,
evaluation=eval_config,
project_name=eval_project_name,
tags=["shouldpass"],
)
_check_all_feedback_passed(eval_project_name, client)
def test_llm_on_llm_dataset(
llm_dataset_name: str, eval_project_name: str, client: Client
) -> None:
llm = OpenAI(temperature=0)
eval_config = RunEvalConfig(custom_evaluators=[not_empty])
run_on_dataset(
client=client,
dataset_name=llm_dataset_name,
llm_or_chain_factory=llm,
evaluation=eval_config,
project_name=eval_project_name,
tags=["shouldpass"],
)
_check_all_feedback_passed(eval_project_name, client)
def test_chain_on_llm_dataset(llm_dataset_name: str, client: Client) -> None:
llm = ChatOpenAI(temperature=0)
chain = LLMChain.from_string(llm, "The answer to the {question} is: ")
eval_config = RunEvalConfig(evaluators=[EvaluatorType.QA, EvaluatorType.CRITERIA])
with pytest.raises(
ValueError, match="Cannot evaluate a chain on dataset with data_type=llm"
):
run_on_dataset(
client=client,
dataset_name=llm_dataset_name,
llm_or_chain_factory=lambda: chain,
evaluation=eval_config,
)
@pytest.fixture(
scope="module",
)
def kv_singleio_dataset_name() -> Iterator[str]:
import pandas as pd
client = Client()
df = pd.DataFrame(
{
"the wackiest input": [
"What's the capital of California?",
"What's the capital of Nevada?",
"What's the capital of Oregon?",
"What's the capital of Washington?",
],
"unthinkable output": ["Sacramento", "Carson City", "Salem", "Olympia"],
}
)
uid = str(uuid4())[-8:]
_dataset_name = f"lcp singleio kv dataset integration tests - {uid}"
client.upload_dataframe(
df,
name=_dataset_name,
input_keys=["the wackiest input"],
output_keys=["unthinkable output"],
description="Integration test dataset",
)
yield _dataset_name
def test_chat_model_on_kv_singleio_dataset(
kv_singleio_dataset_name: str, eval_project_name: str, client: Client
) -> None:
llm = ChatOpenAI(temperature=0)
eval_config = RunEvalConfig(evaluators=[EvaluatorType.QA, EvaluatorType.CRITERIA])
run_on_dataset(
dataset_name=kv_singleio_dataset_name,
llm_or_chain_factory=llm,
evaluation=eval_config,
client=client,
project_name=eval_project_name,
tags=["shouldpass"],
)
_check_all_feedback_passed(eval_project_name, client)
def test_llm_on_kv_singleio_dataset(
kv_singleio_dataset_name: str, eval_project_name: str, client: Client
) -> None:
llm = OpenAI(temperature=0)
eval_config = RunEvalConfig(custom_evaluators=[not_empty])
run_on_dataset(
dataset_name=kv_singleio_dataset_name,
llm_or_chain_factory=llm,
client=client,
evaluation=eval_config,
project_name=eval_project_name,
tags=["shouldpass"],
)
_check_all_feedback_passed(eval_project_name, client)
def test_chain_on_kv_singleio_dataset(
kv_singleio_dataset_name: str, eval_project_name: str, client: Client
) -> None:
llm = ChatOpenAI(temperature=0)
chain = LLMChain.from_string(llm, "The answer to the {question} is: ")
eval_config = RunEvalConfig(custom_evaluators=[not_empty])
run_on_dataset(
dataset_name=kv_singleio_dataset_name,
llm_or_chain_factory=lambda: chain,
client=client,
evaluation=eval_config,
project_name=eval_project_name,
tags=["shouldpass"],
)
_check_all_feedback_passed(eval_project_name, client)
async def test_runnable_on_kv_singleio_dataset(
kv_singleio_dataset_name: str, eval_project_name: str, client: Client
) -> None:
runnable = (
ChatPromptTemplate.from_messages([("human", "{the wackiest input}")])
| ChatOpenAI()
)
eval_config = RunEvalConfig(custom_evaluators=[not_empty])
await arun_on_dataset(
dataset_name=kv_singleio_dataset_name,
llm_or_chain_factory=runnable,
client=client,
evaluation=eval_config,
project_name=eval_project_name,
tags=["shouldpass"],
)
_check_all_feedback_passed(eval_project_name, client)
async def test_arb_func_on_kv_singleio_dataset(
kv_singleio_dataset_name: str, eval_project_name: str, client: Client
) -> None:
runnable = (
ChatPromptTemplate.from_messages([("human", "{the wackiest input}")])
| ChatOpenAI()
)
def my_func(x: dict) -> str:
content = runnable.invoke(x).content
if isinstance(content, str):
return content
else:
raise ValueError(
f"Expected message with content type string, got {content}"
)
eval_config = RunEvalConfig(custom_evaluators=[not_empty])
await arun_on_dataset(
dataset_name=kv_singleio_dataset_name,
llm_or_chain_factory=my_func,
client=client,
evaluation=eval_config,
project_name=eval_project_name,
tags=["shouldpass"],
)
_check_all_feedback_passed(eval_project_name, client)
|
0 | lc_public_repos/langchain/libs/community/tests | lc_public_repos/langchain/libs/community/tests/unit_tests/test_sql_database.py | # flake8: noqa: E501
"""Test SQL database wrapper."""
import pytest
import sqlalchemy as sa
from packaging import version
from sqlalchemy import (
Column,
Integer,
MetaData,
String,
Table,
Text,
insert,
select,
)
from sqlalchemy.engine import Engine, Result
from langchain_community.utilities.sql_database import SQLDatabase, truncate_word
is_sqlalchemy_v1 = version.parse(sa.__version__).major == 1
metadata_obj = MetaData()
user = Table(
"user",
metadata_obj,
Column("user_id", Integer, primary_key=True),
Column("user_name", String(16), nullable=False),
Column("user_bio", Text, nullable=True),
)
company = Table(
"company",
metadata_obj,
Column("company_id", Integer, primary_key=True),
Column("company_location", String, nullable=False),
)
@pytest.fixture
def engine() -> Engine:
return sa.create_engine("sqlite:///:memory:")
@pytest.fixture
def db(engine: Engine) -> SQLDatabase:
metadata_obj.create_all(engine)
return SQLDatabase(engine)
@pytest.fixture
def db_lazy_reflection(engine: Engine) -> SQLDatabase:
metadata_obj.create_all(engine)
return SQLDatabase(engine, lazy_table_reflection=True)
@pytest.mark.xfail(is_sqlalchemy_v1, reason="SQLAlchemy 1.x issues")
def test_table_info(db: SQLDatabase) -> None:
"""Test that table info is constructed properly."""
output = db.table_info
expected_output = """
CREATE TABLE user (
user_id INTEGER NOT NULL,
user_name VARCHAR(16) NOT NULL,
user_bio TEXT,
PRIMARY KEY (user_id)
)
/*
3 rows from user table:
user_id user_name user_bio
/*
CREATE TABLE company (
company_id INTEGER NOT NULL,
company_location VARCHAR NOT NULL,
PRIMARY KEY (company_id)
)
/*
3 rows from company table:
company_id company_location
*/
"""
assert sorted(" ".join(output.split())) == sorted(" ".join(expected_output.split()))
@pytest.mark.xfail(is_sqlalchemy_v1, reason="SQLAlchemy 1.x issues")
def test_table_info_lazy_reflection(db_lazy_reflection: SQLDatabase) -> None:
"""Test that table info with lazy reflection"""
assert len(db_lazy_reflection._metadata.sorted_tables) == 0
output = db_lazy_reflection.get_table_info(["user"])
assert len(db_lazy_reflection._metadata.sorted_tables) == 1
expected_output = """
CREATE TABLE user (
user_id INTEGER NOT NULL,
user_name VARCHAR(16) NOT NULL,
user_bio TEXT,
PRIMARY KEY (user_id)
)
/*
3 rows from user table:
user_id user_name user_bio
/*
"""
assert sorted(" ".join(output.split())) == sorted(" ".join(expected_output.split()))
db_lazy_reflection.get_table_info(["company"])
assert len(db_lazy_reflection._metadata.sorted_tables) == 2
assert db_lazy_reflection._metadata.sorted_tables[0].name == "company"
assert db_lazy_reflection._metadata.sorted_tables[1].name == "user"
@pytest.mark.xfail(is_sqlalchemy_v1, reason="SQLAlchemy 1.x issues")
def test_table_info_w_sample_rows(db: SQLDatabase) -> None:
"""Test that table info is constructed properly."""
# Provision.
values = [
{"user_id": 13, "user_name": "Harrison", "user_bio": "bio"},
{"user_id": 14, "user_name": "Chase", "user_bio": "bio"},
]
stmt = insert(user).values(values)
db._execute(stmt)
# Query and verify.
db = SQLDatabase(db._engine, sample_rows_in_table_info=2)
output = db.table_info
expected_output = """
CREATE TABLE company (
company_id INTEGER NOT NULL,
company_location VARCHAR NOT NULL,
PRIMARY KEY (company_id)
)
/*
2 rows from company table:
company_id company_location
*/
CREATE TABLE user (
user_id INTEGER NOT NULL,
user_name VARCHAR(16) NOT NULL,
user_bio TEXT,
PRIMARY KEY (user_id)
)
/*
2 rows from user table:
user_id user_name user_bio
13 Harrison bio
14 Chase bio
*/
"""
assert sorted(output.split()) == sorted(expected_output.split())
def test_sql_database_run_fetch_all(db: SQLDatabase) -> None:
"""Verify running SQL expressions returning results as strings."""
# Provision.
stmt = insert(user).values(
user_id=13, user_name="Harrison", user_bio="That is my Bio " * 24
)
db._execute(stmt)
# Query and verify.
command = "select user_id, user_name, user_bio from user where user_id = 13"
partial_output = db.run(command)
user_bio = "That is my Bio " * 19 + "That is my..."
expected_partial_output = f"[(13, 'Harrison', '{user_bio}')]"
assert partial_output == expected_partial_output
full_output = db.run(command, include_columns=True)
expected_full_output = (
"[{'user_id': 13, 'user_name': 'Harrison', 'user_bio': '%s'}]" % user_bio
)
assert full_output == expected_full_output
def test_sql_database_run_fetch_result(db: SQLDatabase) -> None:
"""Verify running SQL expressions returning results as SQLAlchemy `Result` instances."""
# Provision.
stmt = insert(user).values(user_id=17, user_name="hwchase")
db._execute(stmt)
# Query and verify.
command = "select user_id, user_name, user_bio from user where user_id = 17"
result = db.run(command, fetch="cursor", include_columns=True)
expected = [{"user_id": 17, "user_name": "hwchase", "user_bio": None}]
assert isinstance(result, Result)
assert result.mappings().fetchall() == expected
def test_sql_database_run_with_parameters(db: SQLDatabase) -> None:
"""Verify running SQL expressions with query parameters."""
# Provision.
stmt = insert(user).values(user_id=17, user_name="hwchase")
db._execute(stmt)
# Query and verify.
command = "select user_id, user_name, user_bio from user where user_id = :user_id"
full_output = db.run(command, parameters={"user_id": 17}, include_columns=True)
expected_full_output = "[{'user_id': 17, 'user_name': 'hwchase', 'user_bio': None}]"
assert full_output == expected_full_output
def test_sql_database_run_sqlalchemy_selectable(db: SQLDatabase) -> None:
"""Verify running SQL expressions using SQLAlchemy selectable."""
# Provision.
stmt = insert(user).values(user_id=17, user_name="hwchase")
db._execute(stmt)
# Query and verify.
command = select(user).where(user.c.user_id == 17)
full_output = db.run(command, include_columns=True)
expected_full_output = "[{'user_id': 17, 'user_name': 'hwchase', 'user_bio': None}]"
assert full_output == expected_full_output
def test_sql_database_run_update(db: SQLDatabase) -> None:
"""Test commands which return no rows return an empty string."""
# Provision.
stmt = insert(user).values(user_id=13, user_name="Harrison")
db._execute(stmt)
# Query and verify.
command = "update user set user_name='Updated' where user_id = 13"
output = db.run(command)
expected_output = ""
assert output == expected_output
@pytest.mark.skipif(is_sqlalchemy_v1, reason="Requires SQLAlchemy 2 or newer")
def test_sql_database_schema_translate_map() -> None:
"""Verify using statement-specific execution options."""
engine = sa.create_engine("sqlite:///:memory:")
db = SQLDatabase(engine)
# Define query using SQLAlchemy selectable.
command = select(user).where(user.c.user_id == 17)
# Define statement-specific execution options.
execution_options = {"schema_translate_map": {None: "bar"}}
# Verify the schema translation is applied.
with pytest.raises(sa.exc.OperationalError) as ex:
db.run(command, execution_options=execution_options, fetch="cursor")
assert ex.match("no such table: bar.user")
def test_truncate_word() -> None:
assert truncate_word("Hello World", length=5) == "He..."
assert truncate_word("Hello World", length=0) == "Hello World"
assert truncate_word("Hello World", length=-10) == "Hello World"
assert truncate_word("Hello World", length=5, suffix="!!!") == "He!!!"
assert truncate_word("Hello World", length=12, suffix="!!!") == "Hello World"
|
0 | lc_public_repos/langchain/libs/community/tests | lc_public_repos/langchain/libs/community/tests/unit_tests/test_sql_database_schema.py | # flake8: noqa
"""Test SQL database wrapper with schema support.
Using DuckDB as SQLite does not support schemas.
"""
import pytest
from sqlalchemy import (
Column,
Integer,
MetaData,
Sequence,
String,
Table,
create_engine,
event,
insert,
schema,
)
import sqlalchemy as sa
from packaging import version
from langchain_community.utilities.sql_database import SQLDatabase
metadata_obj = MetaData()
event.listen(metadata_obj, "before_create", schema.CreateSchema("schema_a"))
event.listen(metadata_obj, "before_create", schema.CreateSchema("schema_b"))
user = Table(
"user",
metadata_obj,
Column("user_id", Integer, Sequence("user_id_seq"), primary_key=True),
Column("user_name", String, nullable=False),
schema="schema_a",
)
company = Table(
"company",
metadata_obj,
Column("company_id", Integer, Sequence("company_id_seq"), primary_key=True),
Column("company_location", String, nullable=False),
schema="schema_b",
)
@pytest.mark.xfail(
version.parse(sa.__version__).major == 1, reason="SQLAlchemy 1.x issues"
)
def test_table_info() -> None:
"""Test that table info is constructed properly."""
engine = create_engine("duckdb:///:memory:")
metadata_obj.create_all(engine)
db = SQLDatabase(engine, schema="schema_a", metadata=metadata_obj)
output = db.table_info
expected_output = """
CREATE TABLE schema_a."user" (
user_id INTEGER NOT NULL,
user_name VARCHAR NOT NULL,
PRIMARY KEY (user_id)
)
/*
3 rows from user table:
user_id user_name
*/
"""
assert sorted(" ".join(output.split())) == sorted(" ".join(expected_output.split()))
@pytest.mark.xfail(
version.parse(sa.__version__).major == 1, reason="SQLAlchemy 1.x issues"
)
def test_sql_database_run() -> None:
"""Test that commands can be run successfully and returned in correct format."""
engine = create_engine("duckdb:///:memory:")
metadata_obj.create_all(engine)
stmt = insert(user).values(user_id=13, user_name="Harrison")
with engine.begin() as conn:
conn.execute(stmt)
with pytest.warns(Warning) as records:
db = SQLDatabase(engine, schema="schema_a")
# Metadata creation with duckdb raises 3 warnings at the moment about reflection.
# As a stop-gap to increase strictness of pytest to fail on warnings, we'll
# explicitly catch the warnings and assert that it's the one we expect.
# We may need to revisit at a later stage and determine why a warning is being
# raised here.
for record in records:
assert isinstance(record.message, Warning)
assert any(
record.message.args[0] # type: ignore
== "duckdb-engine doesn't yet support reflection on indices"
for record in records
)
command = 'select user_name from "user" where user_id = 13'
output = db.run(command)
expected_output = "[('Harrison',)]"
assert output == expected_output
|
0 | lc_public_repos/langchain/libs/community/tests | lc_public_repos/langchain/libs/community/tests/unit_tests/test_dependencies.py | """A unit test meant to catch accidental introduction of non-optional dependencies."""
from pathlib import Path
from typing import Any, Dict, Mapping
import pytest
import toml
HERE = Path(__file__).parent
PYPROJECT_TOML = HERE / "../../pyproject.toml"
@pytest.fixture()
def poetry_conf() -> Dict[str, Any]:
"""Load the pyproject.toml file."""
with open(PYPROJECT_TOML) as f:
return toml.load(f)["tool"]["poetry"]
def test_required_dependencies(poetry_conf: Mapping[str, Any]) -> None:
"""A test that checks if a new non-optional dependency is being introduced.
If this test is triggered, it means that a contributor is trying to introduce a new
required dependency. This should be avoided in most situations.
"""
# Get the dependencies from the [tool.poetry.dependencies] section
dependencies = poetry_conf["dependencies"]
is_required = {
package_name: isinstance(requirements, str)
or isinstance(requirements, list)
or not requirements.get("optional", False)
for package_name, requirements in dependencies.items()
}
required_dependencies = [
package_name for package_name, required in is_required.items() if required
]
assert sorted(required_dependencies) == sorted(
[
"PyYAML",
"SQLAlchemy",
"aiohttp",
"dataclasses-json",
"httpx-sse",
"langchain-core",
"langsmith",
"numpy",
"python",
"requests",
"pydantic-settings",
"tenacity",
"langchain",
]
)
unrequired_dependencies = [
package_name for package_name, required in is_required.items() if not required
]
in_extras = [
dep for group in poetry_conf.get("extras", {}).values() for dep in group
]
assert set(unrequired_dependencies) == set(in_extras)
def test_test_group_dependencies(poetry_conf: Mapping[str, Any]) -> None:
"""Check if someone is attempting to add additional test dependencies.
Only dependencies associated with test running infrastructure should be added
to the test group; e.g., pytest, pytest-cov etc.
Examples of dependencies that should NOT be included: boto3, azure, postgres, etc.
"""
test_group_deps = sorted(poetry_conf["group"]["test"]["dependencies"])
assert test_group_deps == sorted(
[
"duckdb-engine",
"freezegun",
"langchain-core",
"langchain-tests",
"langchain",
"lark",
"pandas",
"pytest",
"pytest-asyncio",
"pytest-cov",
"pytest-dotenv",
"pytest-mock",
"pytest-socket",
"pytest-watcher",
"responses",
"syrupy",
"requests-mock",
# TODO: Hack to get around cffi 1.17.1 not working with py3.9, remove when
# fix is released.
"cffi",
]
)
def test_imports() -> None:
"""Test that you can import all top level things okay."""
from langchain_core.prompts import BasePromptTemplate # noqa: F401
from langchain_community.callbacks import OpenAICallbackHandler # noqa: F401
from langchain_community.chat_models import ChatOpenAI # noqa: F401
from langchain_community.document_loaders import BSHTMLLoader # noqa: F401
from langchain_community.embeddings import OpenAIEmbeddings # noqa: F401
from langchain_community.llms import OpenAI # noqa: F401
from langchain_community.retrievers import VespaRetriever # noqa: F401
from langchain_community.tools import DuckDuckGoSearchResults # noqa: F401
from langchain_community.utilities import (
SearchApiAPIWrapper, # noqa: F401
SerpAPIWrapper, # noqa: F401
)
from langchain_community.vectorstores import FAISS # noqa: F401
|
0 | lc_public_repos/langchain/libs/community/tests | lc_public_repos/langchain/libs/community/tests/unit_tests/test_cache.py | """Test caching for LLMs and ChatModels."""
import sqlite3
from typing import Dict, Generator, List, Union
import pytest
from _pytest.fixtures import FixtureRequest
from langchain_core.caches import InMemoryCache
from langchain_core.language_models import FakeListChatModel, FakeListLLM
from langchain_core.language_models.chat_models import BaseChatModel
from langchain_core.language_models.llms import BaseLLM
from langchain_core.load import dumps
from langchain_core.messages import AIMessage, BaseMessage, HumanMessage
from langchain_core.outputs import ChatGeneration
from sqlalchemy import Column, Integer, Sequence, String, create_engine
from sqlalchemy.orm import Session
try:
from sqlalchemy.orm import declarative_base
except ImportError:
from sqlalchemy.ext.declarative import declarative_base
from langchain.globals import get_llm_cache, set_llm_cache
from langchain_core.outputs import Generation, LLMResult
from langchain_community.cache import SQLAlchemyCache
from tests.unit_tests.llms.fake_llm import FakeLLM
def get_sqlite_cache() -> SQLAlchemyCache:
return SQLAlchemyCache(
engine=create_engine(
"sqlite://", creator=lambda: sqlite3.connect("file::memory:?cache=shared")
)
)
CACHE_OPTIONS = [
InMemoryCache,
get_sqlite_cache,
]
@pytest.fixture(autouse=True, params=CACHE_OPTIONS)
def set_cache_and_teardown(request: FixtureRequest) -> Generator[None, None, None]:
# Will be run before each test
cache_instance = request.param
set_llm_cache(cache_instance())
if llm_cache := get_llm_cache():
llm_cache.clear()
else:
raise ValueError("Cache not set. This should never happen.")
yield
# Will be run after each test
if llm_cache:
llm_cache.clear()
set_llm_cache(None)
else:
raise ValueError("Cache not set. This should never happen.")
async def test_llm_caching() -> None:
prompt = "How are you?"
response = "Test response"
cached_response = "Cached test response"
llm = FakeListLLM(responses=[response])
if llm_cache := get_llm_cache():
# sync test
llm_cache.update(
prompt=prompt,
llm_string=create_llm_string(llm),
return_val=[Generation(text=cached_response)],
)
assert llm.invoke(prompt) == cached_response
# async test
await llm_cache.aupdate(
prompt=prompt,
llm_string=create_llm_string(llm),
return_val=[Generation(text=cached_response)],
)
assert await llm.ainvoke(prompt) == cached_response
else:
raise ValueError(
"The cache not set. This should never happen, as the pytest fixture "
"`set_cache_and_teardown` always sets the cache."
)
def test_old_sqlite_llm_caching() -> None:
llm_cache = get_llm_cache()
if isinstance(llm_cache, SQLAlchemyCache):
prompt = "How are you?"
response = "Test response"
cached_response = "Cached test response"
llm = FakeListLLM(responses=[response])
items = [
llm_cache.cache_schema(
prompt=prompt,
llm=create_llm_string(llm),
response=cached_response,
idx=0,
)
]
with Session(llm_cache.engine) as session, session.begin():
for item in items:
session.merge(item)
assert llm.invoke(prompt) == cached_response
async def test_chat_model_caching() -> None:
prompt: List[BaseMessage] = [HumanMessage(content="How are you?")]
response = "Test response"
cached_response = "Cached test response"
cached_message = AIMessage(content=cached_response)
llm = FakeListChatModel(responses=[response])
if llm_cache := get_llm_cache():
# sync test
llm_cache.update(
prompt=dumps(prompt),
llm_string=llm._get_llm_string(),
return_val=[ChatGeneration(message=cached_message)],
)
result = llm.invoke(prompt)
assert isinstance(result, AIMessage)
assert result.content == cached_response
# async test
await llm_cache.aupdate(
prompt=dumps(prompt),
llm_string=llm._get_llm_string(),
return_val=[ChatGeneration(message=cached_message)],
)
result = await llm.ainvoke(prompt)
assert isinstance(result, AIMessage)
assert result.content == cached_response
else:
raise ValueError(
"The cache not set. This should never happen, as the pytest fixture "
"`set_cache_and_teardown` always sets the cache."
)
async def test_chat_model_caching_params() -> None:
prompt: List[BaseMessage] = [HumanMessage(content="How are you?")]
response = "Test response"
cached_response = "Cached test response"
cached_message = AIMessage(content=cached_response)
llm = FakeListChatModel(responses=[response])
if llm_cache := get_llm_cache():
# sync test
llm_cache.update(
prompt=dumps(prompt),
llm_string=llm._get_llm_string(functions=[]),
return_val=[ChatGeneration(message=cached_message)],
)
result = llm.invoke(prompt, functions=[])
result_no_params = llm.invoke(prompt)
assert isinstance(result, AIMessage)
assert result.content == cached_response
assert isinstance(result_no_params, AIMessage)
assert result_no_params.content == response
# async test
await llm_cache.aupdate(
prompt=dumps(prompt),
llm_string=llm._get_llm_string(functions=[]),
return_val=[ChatGeneration(message=cached_message)],
)
result = await llm.ainvoke(prompt, functions=[])
result_no_params = await llm.ainvoke(prompt)
assert isinstance(result, AIMessage)
assert result.content == cached_response
assert isinstance(result_no_params, AIMessage)
assert result_no_params.content == response
else:
raise ValueError(
"The cache not set. This should never happen, as the pytest fixture "
"`set_cache_and_teardown` always sets the cache."
)
async def test_llm_cache_clear() -> None:
prompt = "How are you?"
expected_response = "Test response"
cached_response = "Cached test response"
llm = FakeListLLM(responses=[expected_response])
if llm_cache := get_llm_cache():
# sync test
llm_cache.update(
prompt=prompt,
llm_string=create_llm_string(llm),
return_val=[Generation(text=cached_response)],
)
llm_cache.clear()
response = llm.invoke(prompt)
assert response == expected_response
# async test
await llm_cache.aupdate(
prompt=prompt,
llm_string=create_llm_string(llm),
return_val=[Generation(text=cached_response)],
)
await llm_cache.aclear()
response = await llm.ainvoke(prompt)
assert response == expected_response
else:
raise ValueError(
"The cache not set. This should never happen, as the pytest fixture "
"`set_cache_and_teardown` always sets the cache."
)
def create_llm_string(llm: Union[BaseLLM, BaseChatModel]) -> str:
_dict: Dict = llm.dict()
_dict["stop"] = None
return str(sorted([(k, v) for k, v in _dict.items()]))
def test_sql_alchemy_cache() -> None:
"""Test custom_caching behavior."""
Base = declarative_base()
class FulltextLLMCache(Base): # type: ignore
"""Postgres table for fulltext-indexed LLM Cache."""
__tablename__ = "llm_cache_fulltext"
id = Column(Integer, Sequence("cache_id"), primary_key=True)
prompt = Column(String, nullable=False)
llm = Column(String, nullable=False)
idx = Column(Integer)
response = Column(String)
engine = create_engine("sqlite://")
from langchain_community.cache import SQLAlchemyCache
set_llm_cache(SQLAlchemyCache(engine, FulltextLLMCache))
llm = FakeLLM()
params = llm.dict()
params["stop"] = None
llm_string = str(sorted([(k, v) for k, v in params.items()]))
get_llm_cache().update("foo", llm_string, [Generation(text="fizz")])
output = llm.generate(["foo", "bar", "foo"])
expected_cache_output = [Generation(text="foo")]
cache_output = get_llm_cache().lookup("bar", llm_string)
assert cache_output == expected_cache_output
set_llm_cache(None)
expected_generations = [
[Generation(text="fizz")],
[Generation(text="foo")],
[Generation(text="fizz")],
]
expected_output = LLMResult(
generations=expected_generations,
llm_output=None,
)
assert output == expected_output
|
0 | lc_public_repos/langchain/libs/community/tests | lc_public_repos/langchain/libs/community/tests/unit_tests/test_imports.py | import ast
import glob
import importlib
from pathlib import Path
from typing import List, Tuple
COMMUNITY_ROOT = Path(__file__).parent.parent.parent / "langchain_community"
ALL_COMMUNITY_GLOB = COMMUNITY_ROOT.as_posix() + "/**/*.py"
HERE = Path(__file__).parent
ROOT = HERE.parent.parent
def test_importable_all() -> None:
for path in glob.glob(ALL_COMMUNITY_GLOB):
# Relative to community root
relative_path = Path(path).relative_to(COMMUNITY_ROOT)
str_path = str(relative_path)
if str_path.endswith("__init__.py"):
module_name = str(relative_path.parent).replace("/", ".")
else:
module_name = str(relative_path.with_suffix("")).replace("/", ".")
try:
module = importlib.import_module("langchain_community." + module_name)
except ModuleNotFoundError as e:
raise ModuleNotFoundError(
f"Could not import `{module_name}`. Defined in path: {path}"
) from e
all_ = getattr(module, "__all__", [])
for cls_ in all_:
getattr(module, cls_)
def test_glob_correct() -> None:
"""Verify that the glob pattern is correct."""
paths = list(glob.glob(ALL_COMMUNITY_GLOB))
# Get paths relative to community root
paths_ = [Path(path).relative_to(COMMUNITY_ROOT) for path in paths]
# Assert there's a callback paths
assert Path("callbacks/__init__.py") in paths_
def _check_correct_or_not_defined__all__(code: str) -> bool:
"""Return True if __all__ is correctly defined or not defined at all."""
# Parse the code into an AST
tree = ast.parse(code)
all_good = True
# Iterate through the body of the AST to find assignments
for node in tree.body:
# Check if the node is an assignment
if isinstance(node, ast.Assign):
# Check if the target of the assignment is '__all__'
for target in node.targets:
if isinstance(target, ast.Name) and target.id == "__all__":
# Check if the value assigned is a list
if isinstance(node.value, ast.List):
# Verify all elements in the list are string literals
if all(isinstance(el, ast.Str) for el in node.value.elts):
pass
else:
all_good = False
else:
all_good = False
return all_good
def test_no_dynamic__all__() -> None:
"""Verify that __all__ is not computed at runtime.
Computing __all__ dynamically can confuse static typing tools like pyright.
__all__ should always be listed as an explicit list of string literals.
"""
bad_definitions = []
for path in glob.glob(ALL_COMMUNITY_GLOB):
if not path.endswith("__init__.py"):
continue
with open(path, "r") as file:
code = file.read()
if _check_correct_or_not_defined__all__(code) is False:
bad_definitions.append(path)
if bad_definitions:
raise AssertionError(
f"__all__ is not correctly defined in the "
f"following files: {sorted(bad_definitions)}"
)
def _extract_type_checking_imports(code: str) -> List[Tuple[str, str]]:
"""Extract all TYPE CHECKING imports that import from langchain_community."""
imports: List[Tuple[str, str]] = []
tree = ast.parse(code)
class TypeCheckingVisitor(ast.NodeVisitor):
def visit_ImportFrom(self, node: ast.ImportFrom) -> None:
if node.module:
for alias in node.names:
imports.append((node.module, alias.name))
class GlobalScopeVisitor(ast.NodeVisitor):
def visit_If(self, node: ast.If) -> None:
if (
isinstance(node.test, ast.Name)
and node.test.id == "TYPE_CHECKING"
and isinstance(node.test.ctx, ast.Load)
):
TypeCheckingVisitor().visit(node)
self.generic_visit(node)
GlobalScopeVisitor().visit(tree)
return imports
def test_init_files_properly_defined() -> None:
"""This is part of a set of tests that verify that init files are properly
defined if they're using dynamic imports.
"""
# Please never ever add more modules to this list.
# Do feel free to fix the underlying issues and remove exceptions
# from the list.
excepted_modules = {"llms"} # NEVER ADD MORE MODULES TO THIS LIST
for path in glob.glob(ALL_COMMUNITY_GLOB):
# Relative to community root
relative_path = Path(path).relative_to(COMMUNITY_ROOT)
str_path = str(relative_path)
if not str_path.endswith("__init__.py"):
continue
module_name = str(relative_path.parent).replace("/", ".")
if module_name in excepted_modules:
continue
code = Path(path).read_text()
# Check for dynamic __getattr__ definition in the __init__ file
if "__getattr__" not in code:
continue
try:
module = importlib.import_module("langchain_community." + module_name)
except ModuleNotFoundError as e:
raise ModuleNotFoundError(
f"Could not import `{module_name}`. Defined in path: {path}"
) from e
if not hasattr(module, "__all__"):
raise AssertionError(
f"__all__ not defined in {module_name}. This is required "
f"if __getattr__ is defined."
)
imports = _extract_type_checking_imports(code)
# Get the names of all the TYPE CHECKING imports
names = [name for _, name in imports]
missing_imports = set(module.__all__) - set(names)
assert (
not missing_imports
), f"Missing imports: {missing_imports} in file path: {path}"
|
0 | lc_public_repos/langchain/libs/community/tests | lc_public_repos/langchain/libs/community/tests/unit_tests/test_document_transformers.py | """Unit tests for document transformers."""
import pytest
pytest.importorskip("langchain_community")
from langchain_community.document_transformers.embeddings_redundant_filter import ( # noqa: E402
_filter_similar_embeddings,
)
from langchain_community.utils.math import cosine_similarity # noqa: E402
def test__filter_similar_embeddings() -> None:
threshold = 0.79
embedded_docs = [[1.0, 2.0], [1.0, 2.0], [2.0, 1.0], [2.0, 0.5], [0.0, 0.0]]
expected = [1, 3, 4]
actual = _filter_similar_embeddings(embedded_docs, cosine_similarity, threshold)
assert expected == actual
def test__filter_similar_embeddings_empty() -> None:
assert len(_filter_similar_embeddings([], cosine_similarity, 0.0)) == 0
|
0 | lc_public_repos/langchain/libs/community/tests | lc_public_repos/langchain/libs/community/tests/unit_tests/test_sqlalchemy.py | import sqlalchemy.orm
import langchain_community # noqa: F401
def test_configure_mappers() -> None:
sqlalchemy.orm.configure_mappers()
|
0 | lc_public_repos/langchain/libs/community/tests | lc_public_repos/langchain/libs/community/tests/unit_tests/conftest.py | """Configuration for unit tests."""
from importlib import util
from typing import Dict, Sequence
import pytest
from pytest import Config, Function, Parser
def pytest_addoption(parser: Parser) -> None:
"""Add custom command line options to pytest."""
parser.addoption(
"--only-extended",
action="store_true",
help="Only run extended tests. Does not allow skipping any extended tests.",
)
parser.addoption(
"--only-core",
action="store_true",
help="Only run core tests. Never runs any extended tests.",
)
def pytest_collection_modifyitems(config: Config, items: Sequence[Function]) -> None:
"""Add implementations for handling custom markers.
At the moment, this adds support for a custom `requires` marker.
The `requires` marker is used to denote tests that require one or more packages
to be installed to run. If the package is not installed, the test is skipped.
The `requires` marker syntax is:
.. code-block:: python
@pytest.mark.requires("package1", "package2")
def test_something():
...
"""
# Mapping from the name of a package to whether it is installed or not.
# Used to avoid repeated calls to `util.find_spec`
required_pkgs_info: Dict[str, bool] = {}
only_extended = config.getoption("--only-extended") or False
only_core = config.getoption("--only-core") or False
if only_extended and only_core:
raise ValueError("Cannot specify both `--only-extended` and `--only-core`.")
for item in items:
requires_marker = item.get_closest_marker("requires")
if requires_marker is not None:
if only_core:
item.add_marker(pytest.mark.skip(reason="Skipping not a core test."))
continue
# Iterate through the list of required packages
required_pkgs = requires_marker.args
for pkg in required_pkgs:
# If we haven't yet checked whether the pkg is installed
# let's check it and store the result.
if pkg not in required_pkgs_info:
try:
installed = util.find_spec(pkg) is not None
except Exception:
installed = False
required_pkgs_info[pkg] = installed
if not required_pkgs_info[pkg]:
if only_extended:
pytest.fail(
f"Package `{pkg}` is not installed but is required for "
f"extended tests. Please install the given package and "
f"try again.",
)
else:
# If the package is not installed, we immediately break
# and mark the test as skipped.
item.add_marker(
pytest.mark.skip(reason=f"Requires pkg: `{pkg}`")
)
break
else:
if only_extended:
item.add_marker(
pytest.mark.skip(reason="Skipping not an extended test.")
)
|
0 | lc_public_repos/langchain/libs/community/tests | lc_public_repos/langchain/libs/community/tests/unit_tests/test_graph_vectorstores.py | import pytest
from langchain_core.documents import Document
from langchain_community.graph_vectorstores.base import (
Node,
_documents_to_nodes,
_texts_to_nodes,
)
from langchain_community.graph_vectorstores.links import Link
def test_texts_to_nodes() -> None:
assert list(_texts_to_nodes(["a", "b"], [{"a": "b"}, {"c": "d"}], ["a", "b"])) == [
Node(id="a", metadata={"a": "b"}, text="a"),
Node(id="b", metadata={"c": "d"}, text="b"),
]
assert list(_texts_to_nodes(["a", "b"], None, ["a", "b"])) == [
Node(id="a", metadata={}, text="a"),
Node(id="b", metadata={}, text="b"),
]
assert list(_texts_to_nodes(["a", "b"], [{"a": "b"}, {"c": "d"}], None)) == [
Node(metadata={"a": "b"}, text="a"),
Node(metadata={"c": "d"}, text="b"),
]
assert list(
_texts_to_nodes(
["a"],
[{"links": {Link.incoming(kind="hyperlink", tag="http://b")}}],
None,
)
) == [Node(links=[Link.incoming(kind="hyperlink", tag="http://b")], text="a")]
with pytest.raises(ValueError):
list(_texts_to_nodes(["a", "b"], None, ["a"]))
with pytest.raises(ValueError):
list(_texts_to_nodes(["a", "b"], [{"a": "b"}], None))
with pytest.raises(ValueError):
list(_texts_to_nodes(["a"], [{"a": "b"}, {"c": "d"}], None))
with pytest.raises(ValueError):
list(_texts_to_nodes(["a"], None, ["a", "b"]))
def test_documents_to_nodes() -> None:
documents = [
Document(
id="a",
page_content="some text a",
metadata={"links": [Link.incoming(kind="hyperlink", tag="http://b")]},
),
Document(id="b", page_content="some text b", metadata={"c": "d"}),
]
assert list(_documents_to_nodes(documents)) == [
Node(
id="a",
metadata={},
links=[Link.incoming(kind="hyperlink", tag="http://b")],
text="some text a",
),
Node(id="b", metadata={"c": "d"}, text="some text b"),
]
|
0 | lc_public_repos/langchain/libs/community/tests/unit_tests | lc_public_repos/langchain/libs/community/tests/unit_tests/query_constructors/test_deeplake.py | from typing import Dict, Tuple
from langchain_core.structured_query import (
Comparator,
Comparison,
Operation,
Operator,
StructuredQuery,
)
from langchain_community.query_constructors.deeplake import DeepLakeTranslator
DEFAULT_TRANSLATOR = DeepLakeTranslator()
def test_visit_comparison() -> None:
comp = Comparison(comparator=Comparator.LT, attribute="foo", value=["1", "2"])
expected = "(metadata['foo'] < 1 or metadata['foo'] < 2)"
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_operation() -> None:
op = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.LT, attribute="foo", value=2),
Comparison(comparator=Comparator.EQ, attribute="bar", value="baz"),
Comparison(comparator=Comparator.LT, attribute="abc", value=["1", "2"]),
],
)
expected = (
"(metadata['foo'] < 2 and metadata['bar'] == 'baz' "
"and (metadata['abc'] < 1 or metadata['abc'] < 2))"
)
actual = DEFAULT_TRANSLATOR.visit_operation(op)
assert expected == actual
def test_visit_structured_query() -> None:
query = "What is the capital of France?"
structured_query = StructuredQuery(
query=query,
filter=None,
)
expected: Tuple[str, Dict] = (query, {})
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
comp = Comparison(comparator=Comparator.LT, attribute="foo", value=["1", "2"])
structured_query = StructuredQuery(
query=query,
filter=comp,
)
expected = (
query,
{"tql": "SELECT * WHERE (metadata['foo'] < 1 or metadata['foo'] < 2)"},
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
op = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.LT, attribute="foo", value=2),
Comparison(comparator=Comparator.EQ, attribute="bar", value="baz"),
Comparison(comparator=Comparator.LT, attribute="abc", value=["1", "2"]),
],
)
structured_query = StructuredQuery(
query=query,
filter=op,
)
expected = (
query,
{
"tql": "SELECT * WHERE "
"(metadata['foo'] < 2 and metadata['bar'] == 'baz' and "
"(metadata['abc'] < 1 or metadata['abc'] < 2))"
},
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
|
0 | lc_public_repos/langchain/libs/community/tests/unit_tests | lc_public_repos/langchain/libs/community/tests/unit_tests/query_constructors/test_opensearch.py | from langchain_core.structured_query import (
Comparator,
Comparison,
Operation,
Operator,
StructuredQuery,
)
from langchain_community.query_constructors.opensearch import OpenSearchTranslator
DEFAULT_TRANSLATOR = OpenSearchTranslator()
def test_visit_comparison() -> None:
comp = Comparison(comparator=Comparator.EQ, attribute="foo", value="10")
expected = {"term": {"metadata.foo.keyword": "10"}}
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_operation() -> None:
op = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.GTE, attribute="bar", value=5),
Comparison(comparator=Comparator.LT, attribute="bar", value=10),
Comparison(comparator=Comparator.EQ, attribute="baz", value="abcd"),
],
)
expected = {
"bool": {
"must": [
{"range": {"metadata.bar": {"gte": 5}}},
{"range": {"metadata.bar": {"lt": 10}}},
{"term": {"metadata.baz.keyword": "abcd"}},
]
}
}
actual = DEFAULT_TRANSLATOR.visit_operation(op)
assert expected == actual
def test_visit_structured_query() -> None:
query = "What is the capital of France?"
operation = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.EQ, attribute="foo", value="20"),
Operation(
operator=Operator.OR,
arguments=[
Comparison(comparator=Comparator.LTE, attribute="bar", value=7),
Comparison(
comparator=Comparator.LIKE, attribute="baz", value="abc"
),
],
),
],
)
structured_query = StructuredQuery(query=query, filter=operation, limit=None)
expected = (
query,
{
"filter": {
"bool": {
"must": [
{"term": {"metadata.foo.keyword": "20"}},
{
"bool": {
"should": [
{"range": {"metadata.bar": {"lte": 7}}},
{
"fuzzy": {
"metadata.baz": {
"value": "abc",
}
}
},
]
}
},
]
}
}
},
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
def test_visit_structured_query_with_date_range() -> None:
query = "Who was the president of France in 1995?"
operation = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.EQ, attribute="foo", value="20"),
Operation(
operator=Operator.AND,
arguments=[
Comparison(
comparator=Comparator.GTE,
attribute="timestamp",
value={"date": "1995-01-01", "type": "date"},
),
Comparison(
comparator=Comparator.LT,
attribute="timestamp",
value={"date": "1996-01-01", "type": "date"},
),
],
),
],
)
structured_query = StructuredQuery(query=query, filter=operation, limit=None)
expected = (
query,
{
"filter": {
"bool": {
"must": [
{"term": {"metadata.foo.keyword": "20"}},
{
"bool": {
"must": [
{
"range": {
"metadata.timestamp": {"gte": "1995-01-01"}
}
},
{
"range": {
"metadata.timestamp": {"lt": "1996-01-01"}
}
},
]
}
},
]
}
}
},
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
def test_visit_structured_query_with_date() -> None:
query = "Who was the president of France on 1st of January 1995?"
operation = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.EQ, attribute="foo", value="20"),
Comparison(
comparator=Comparator.EQ,
attribute="timestamp",
value={"date": "1995-01-01", "type": "date"},
),
],
)
structured_query = StructuredQuery(query=query, filter=operation, limit=None)
expected = (
query,
{
"filter": {
"bool": {
"must": [
{"term": {"metadata.foo.keyword": "20"}},
{"term": {"metadata.timestamp": "1995-01-01"}},
]
}
}
},
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
|
0 | lc_public_repos/langchain/libs/community/tests/unit_tests | lc_public_repos/langchain/libs/community/tests/unit_tests/query_constructors/test_mongodb_atlas.py | from typing import Dict, Tuple
from langchain_core.structured_query import (
Comparator,
Comparison,
Operation,
Operator,
StructuredQuery,
)
from langchain_community.query_constructors.mongodb_atlas import MongoDBAtlasTranslator
DEFAULT_TRANSLATOR = MongoDBAtlasTranslator()
def test_visit_comparison_lt() -> None:
comp = Comparison(comparator=Comparator.LT, attribute="qty", value=20)
expected = {"qty": {"$lt": 20}}
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_comparison_eq() -> None:
comp = Comparison(comparator=Comparator.EQ, attribute="qty", value=10)
expected = {"qty": {"$eq": 10}}
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_comparison_ne() -> None:
comp = Comparison(comparator=Comparator.NE, attribute="name", value="foo")
expected = {"name": {"$ne": "foo"}}
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_comparison_in() -> None:
comp = Comparison(comparator=Comparator.IN, attribute="name", value="foo")
expected = {"name": {"$in": ["foo"]}}
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_comparison_nin() -> None:
comp = Comparison(comparator=Comparator.NIN, attribute="name", value="foo")
expected = {"name": {"$nin": ["foo"]}}
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_operation() -> None:
op = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.GTE, attribute="qty", value=10),
Comparison(comparator=Comparator.LTE, attribute="qty", value=20),
Comparison(comparator=Comparator.EQ, attribute="name", value="foo"),
],
)
expected = {
"$and": [
{"qty": {"$gte": 10}},
{"qty": {"$lte": 20}},
{"name": {"$eq": "foo"}},
]
}
actual = DEFAULT_TRANSLATOR.visit_operation(op)
assert expected == actual
def test_visit_structured_query_no_filter() -> None:
query = "What is the capital of France?"
structured_query = StructuredQuery(
query=query,
filter=None,
)
expected: Tuple[str, Dict] = (query, {})
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
def test_visit_structured_query_one_attr() -> None:
query = "What is the capital of France?"
comp = Comparison(comparator=Comparator.IN, attribute="qty", value=[5, 15, 20])
structured_query = StructuredQuery(
query=query,
filter=comp,
)
expected = (
query,
{"pre_filter": {"qty": {"$in": [5, 15, 20]}}},
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
def test_visit_structured_query_deep_nesting() -> None:
query = "What is the capital of France?"
op = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.EQ, attribute="name", value="foo"),
Operation(
operator=Operator.OR,
arguments=[
Comparison(comparator=Comparator.GT, attribute="qty", value=6),
Comparison(
comparator=Comparator.NIN,
attribute="tags",
value=["bar", "foo"],
),
],
),
],
)
structured_query = StructuredQuery(
query=query,
filter=op,
)
expected = (
query,
{
"pre_filter": {
"$and": [
{"name": {"$eq": "foo"}},
{"$or": [{"qty": {"$gt": 6}}, {"tags": {"$nin": ["bar", "foo"]}}]},
]
}
},
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
|
0 | lc_public_repos/langchain/libs/community/tests/unit_tests | lc_public_repos/langchain/libs/community/tests/unit_tests/query_constructors/test_dingo.py | from typing import Dict, Tuple
from langchain_core.structured_query import (
Comparator,
Comparison,
Operation,
Operator,
StructuredQuery,
)
from langchain_community.query_constructors.dingo import DingoDBTranslator
DEFAULT_TRANSLATOR = DingoDBTranslator()
def test_visit_comparison() -> None:
comp = Comparison(comparator=Comparator.LT, attribute="foo", value=["1", "2"])
expected = Comparison(comparator=Comparator.LT, attribute="foo", value=["1", "2"])
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_operation() -> None:
op = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.LT, attribute="foo", value=2),
Comparison(comparator=Comparator.EQ, attribute="bar", value="baz"),
],
)
expected = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.LT, attribute="foo", value=2),
Comparison(comparator=Comparator.EQ, attribute="bar", value="baz"),
],
)
actual = DEFAULT_TRANSLATOR.visit_operation(op)
assert expected == actual
def test_visit_structured_query() -> None:
query = "What is the capital of France?"
structured_query = StructuredQuery(
query=query,
filter=None,
)
expected: Tuple[str, Dict] = (query, {})
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
comp = Comparison(comparator=Comparator.LT, attribute="foo", value=["1", "2"])
structured_query = StructuredQuery(
query=query,
filter=comp,
)
expected = (
query,
{
"search_params": {
"langchain_expr": Comparison(
comparator=Comparator.LT, attribute="foo", value=["1", "2"]
)
}
},
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
op = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.LT, attribute="foo", value=2),
Comparison(comparator=Comparator.EQ, attribute="bar", value="baz"),
],
)
structured_query = StructuredQuery(
query=query,
filter=op,
)
expected = (
query,
{
"search_params": {
"langchain_expr": Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.LT, attribute="foo", value=2),
Comparison(
comparator=Comparator.EQ, attribute="bar", value="baz"
),
],
)
}
},
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
|
0 | lc_public_repos/langchain/libs/community/tests/unit_tests | lc_public_repos/langchain/libs/community/tests/unit_tests/query_constructors/test_milvus.py | from typing import Any, Dict, Tuple
import pytest
from langchain_core.structured_query import (
Comparator,
Comparison,
Operation,
Operator,
StructuredQuery,
)
from langchain_community.query_constructors.milvus import MilvusTranslator
DEFAULT_TRANSLATOR = MilvusTranslator()
@pytest.mark.parametrize(
"triplet",
[
(Comparator.EQ, 2, "( foo == 2 )"),
(Comparator.GT, 2, "( foo > 2 )"),
(Comparator.GTE, 2, "( foo >= 2 )"),
(Comparator.LT, 2, "( foo < 2 )"),
(Comparator.LTE, 2, "( foo <= 2 )"),
(Comparator.IN, ["bar", "abc"], "( foo in ['bar', 'abc'] )"),
(Comparator.LIKE, "bar", '( foo like "bar%" )'),
],
)
def test_visit_comparison(triplet: Tuple[Comparator, Any, str]) -> None:
comparator, value, expected = triplet
comp = Comparison(comparator=comparator, attribute="foo", value=value)
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_operation() -> None:
# Non-Unary operator
op = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.LT, attribute="foo", value=2),
Comparison(comparator=Comparator.EQ, attribute="bar", value="baz"),
Comparison(comparator=Comparator.LT, attribute="abc", value="4"),
],
)
expected = '(( foo < 2 ) and ( bar == "baz" ) ' 'and ( abc < "4" ))'
actual = DEFAULT_TRANSLATOR.visit_operation(op)
assert expected == actual
# Unary operator: normal execution
op = Operation(
operator=Operator.NOT,
arguments=[
Comparison(comparator=Comparator.LT, attribute="foo", value=2),
],
)
expected = "not(( foo < 2 ))"
actual = DEFAULT_TRANSLATOR.visit_operation(op)
assert expected == actual
# Unary operator: error
op = Operation(
operator=Operator.NOT,
arguments=[
Comparison(comparator=Comparator.LT, attribute="foo", value=2),
Comparison(comparator=Comparator.EQ, attribute="bar", value="baz"),
Comparison(comparator=Comparator.LT, attribute="abc", value="4"),
],
)
try:
DEFAULT_TRANSLATOR.visit_operation(op)
except ValueError as e:
assert str(e) == '"not" can have only one argument in Milvus'
else:
assert False, "Expected exception not raised" # No exception -> test failed
def test_visit_structured_query() -> None:
query = "What is the capital of France?"
structured_query = StructuredQuery(
query=query,
filter=None,
)
expected: Tuple[str, Dict] = (query, {})
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
comp = Comparison(comparator=Comparator.LT, attribute="foo", value=454)
structured_query = StructuredQuery(
query=query,
filter=comp,
)
expected = (
query,
{"expr": "( foo < 454 )"},
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
op = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.LT, attribute="foo", value=2),
Comparison(comparator=Comparator.EQ, attribute="bar", value="baz"),
Comparison(comparator=Comparator.LT, attribute="abc", value=50),
],
)
structured_query = StructuredQuery(
query=query,
filter=op,
)
expected = (
query,
{"expr": "(( foo < 2 ) " 'and ( bar == "baz" ) ' "and ( abc < 50 ))"},
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
|
0 | lc_public_repos/langchain/libs/community/tests/unit_tests | lc_public_repos/langchain/libs/community/tests/unit_tests/query_constructors/test_myscale.py | from typing import Any, Dict, Tuple
import pytest
from langchain_core.structured_query import (
Comparator,
Comparison,
Operation,
Operator,
StructuredQuery,
)
from langchain_community.query_constructors.myscale import MyScaleTranslator
DEFAULT_TRANSLATOR = MyScaleTranslator()
@pytest.mark.parametrize(
"triplet",
[
(Comparator.LT, 2, "metadata.foo < 2"),
(Comparator.LTE, 2, "metadata.foo <= 2"),
(Comparator.GT, 2, "metadata.foo > 2"),
(Comparator.GTE, 2, "metadata.foo >= 2"),
(Comparator.CONTAIN, 2, "has(metadata.foo,2)"),
(Comparator.LIKE, "bar", "metadata.foo ILIKE '%bar%'"),
],
)
def test_visit_comparison(triplet: Tuple[Comparator, Any, str]) -> None:
comparator, value, expected = triplet
comp = Comparison(comparator=comparator, attribute="foo", value=value)
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_operation() -> None:
op = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.LT, attribute="foo", value=2),
Comparison(comparator=Comparator.EQ, attribute="bar", value="baz"),
],
)
expected = "metadata.foo < 2 AND metadata.bar = 'baz'"
actual = DEFAULT_TRANSLATOR.visit_operation(op)
assert expected == actual
def test_visit_structured_query() -> None:
query = "What is the capital of France?"
structured_query = StructuredQuery(
query=query,
filter=None,
)
expected: Tuple[str, Dict] = (query, {})
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
comp = Comparison(comparator=Comparator.LT, attribute="foo", value=["1", "2"])
structured_query = StructuredQuery(
query=query,
filter=comp,
)
expected = (
query,
{"where_str": "metadata.foo < ['1', '2']"},
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
op = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.LT, attribute="foo", value=2),
Comparison(comparator=Comparator.EQ, attribute="bar", value="baz"),
],
)
structured_query = StructuredQuery(
query=query,
filter=op,
)
expected = (
query,
{"where_str": "metadata.foo < 2 AND metadata.bar = 'baz'"},
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
|
0 | lc_public_repos/langchain/libs/community/tests/unit_tests | lc_public_repos/langchain/libs/community/tests/unit_tests/query_constructors/test_hanavector.py | from typing import Dict, Tuple
import pytest as pytest
from langchain_core.structured_query import (
Comparator,
Comparison,
Operation,
Operator,
StructuredQuery,
)
from langchain_community.query_constructors.hanavector import HanaTranslator
DEFAULT_TRANSLATOR = HanaTranslator()
def test_visit_comparison() -> None:
comp = Comparison(comparator=Comparator.LT, attribute="foo", value=1)
expected = {"foo": {"$lt": 1}}
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_operation() -> None:
op = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.LT, attribute="foo", value=2),
Comparison(comparator=Comparator.EQ, attribute="bar", value="baz"),
Comparison(comparator=Comparator.GT, attribute="abc", value=2.0),
],
)
expected = {
"$and": [{"foo": {"$lt": 2}}, {"bar": {"$eq": "baz"}}, {"abc": {"$gt": 2.0}}]
}
actual = DEFAULT_TRANSLATOR.visit_operation(op)
assert expected == actual
def test_visit_structured_query() -> None:
query = "What is the capital of France?"
structured_query = StructuredQuery(
query=query,
filter=None,
)
expected: Tuple[str, Dict] = (query, {})
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
comp = Comparison(comparator=Comparator.LT, attribute="foo", value=1)
structured_query = StructuredQuery(
query=query,
filter=comp,
)
expected = (query, {"filter": {"foo": {"$lt": 1}}})
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
op = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.LT, attribute="foo", value=2),
Comparison(comparator=Comparator.EQ, attribute="bar", value="baz"),
Comparison(comparator=Comparator.GT, attribute="abc", value=2.0),
],
)
structured_query = StructuredQuery(
query=query,
filter=op,
)
expected = (
query,
{
"filter": {
"$and": [
{"foo": {"$lt": 2}},
{"bar": {"$eq": "baz"}},
{"abc": {"$gt": 2.0}},
]
}
},
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
|
0 | lc_public_repos/langchain/libs/community/tests/unit_tests | lc_public_repos/langchain/libs/community/tests/unit_tests/query_constructors/test_tencentvectordb.py | from langchain_core.structured_query import (
Comparator,
Comparison,
Operation,
Operator,
StructuredQuery,
)
from langchain_community.query_constructors.tencentvectordb import (
TencentVectorDBTranslator,
)
def test_translate_with_operator() -> None:
query = StructuredQuery(
query="What are songs by Taylor Swift or Katy Perry"
" under 3 minutes long in the dance pop genre",
filter=Operation(
operator=Operator.AND,
arguments=[
Operation(
operator=Operator.OR,
arguments=[
Comparison(
comparator=Comparator.EQ,
attribute="artist",
value="Taylor Swift",
),
Comparison(
comparator=Comparator.EQ,
attribute="artist",
value="Katy Perry",
),
],
),
Comparison(comparator=Comparator.LT, attribute="length", value=180),
],
),
)
translator = TencentVectorDBTranslator()
_, kwargs = translator.visit_structured_query(query)
expr = '(artist = "Taylor Swift" or artist = "Katy Perry") and length < 180'
assert kwargs["expr"] == expr
def test_translate_with_in_comparison() -> None:
# 写成Comparison的形式
query = StructuredQuery(
query="What are songs by Taylor Swift or Katy Perry "
"under 3 minutes long in the dance pop genre",
filter=Comparison(
comparator=Comparator.IN,
attribute="artist",
value=["Taylor Swift", "Katy Perry"],
),
)
translator = TencentVectorDBTranslator()
_, kwargs = translator.visit_structured_query(query)
expr = 'artist in ("Taylor Swift", "Katy Perry")'
assert kwargs["expr"] == expr
def test_translate_with_allowed_fields() -> None:
query = StructuredQuery(
query="What are songs by Taylor Swift or Katy Perry "
"under 3 minutes long in the dance pop genre",
filter=Comparison(
comparator=Comparator.IN,
attribute="artist",
value=["Taylor Swift", "Katy Perry"],
),
)
translator = TencentVectorDBTranslator(meta_keys=["artist"])
_, kwargs = translator.visit_structured_query(query)
expr = 'artist in ("Taylor Swift", "Katy Perry")'
assert kwargs["expr"] == expr
def test_translate_with_unsupported_field() -> None:
query = StructuredQuery(
query="What are songs by Taylor Swift or Katy Perry "
"under 3 minutes long in the dance pop genre",
filter=Comparison(
comparator=Comparator.IN,
attribute="artist",
value=["Taylor Swift", "Katy Perry"],
),
)
translator = TencentVectorDBTranslator(meta_keys=["title"])
try:
translator.visit_structured_query(query)
except ValueError as e:
assert str(e) == "Expr Filtering found Unsupported attribute: artist"
else:
assert False
|
0 | lc_public_repos/langchain/libs/community/tests/unit_tests | lc_public_repos/langchain/libs/community/tests/unit_tests/query_constructors/test_databricks_vector_search.py | from typing import Any, Dict, Tuple
import pytest
from langchain_core.structured_query import (
Comparator,
Comparison,
Operation,
Operator,
StructuredQuery,
)
from langchain_community.query_constructors.databricks_vector_search import (
DatabricksVectorSearchTranslator,
)
DEFAULT_TRANSLATOR = DatabricksVectorSearchTranslator()
@pytest.mark.parametrize(
"triplet",
[
(Comparator.EQ, 2, {"foo": 2}),
(Comparator.GT, 2, {"foo >": 2}),
(Comparator.GTE, 2, {"foo >=": 2}),
(Comparator.LT, 2, {"foo <": 2}),
(Comparator.LTE, 2, {"foo <=": 2}),
(Comparator.IN, ["bar", "abc"], {"foo": ["bar", "abc"]}),
(Comparator.LIKE, "bar", {"foo LIKE": "bar"}),
],
)
def test_visit_comparison(triplet: Tuple[Comparator, Any, str]) -> None:
comparator, value, expected = triplet
comp = Comparison(comparator=comparator, attribute="foo", value=value)
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_operation_and() -> None:
op = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.LT, attribute="foo", value=2),
Comparison(comparator=Comparator.EQ, attribute="bar", value="baz"),
],
)
expected = {"foo <": 2, "bar": "baz"}
actual = DEFAULT_TRANSLATOR.visit_operation(op)
assert expected == actual
def test_visit_operation_or() -> None:
op = Operation(
operator=Operator.OR,
arguments=[
Comparison(comparator=Comparator.EQ, attribute="foo", value=2),
Comparison(comparator=Comparator.EQ, attribute="bar", value="baz"),
],
)
expected = {"foo OR bar": [2, "baz"]}
actual = DEFAULT_TRANSLATOR.visit_operation(op)
assert expected == actual
def test_visit_operation_not() -> None:
op = Operation(
operator=Operator.NOT,
arguments=[
Comparison(comparator=Comparator.EQ, attribute="foo", value=2),
],
)
expected = {"foo NOT": 2}
actual = DEFAULT_TRANSLATOR.visit_operation(op)
assert expected == actual
def test_visit_operation_not_that_raises_for_more_than_one_filter_condition() -> None:
with pytest.raises(Exception) as exc_info:
op = Operation(
operator=Operator.NOT,
arguments=[
Comparison(comparator=Comparator.EQ, attribute="foo", value=2),
Comparison(comparator=Comparator.EQ, attribute="bar", value="baz"),
],
)
DEFAULT_TRANSLATOR.visit_operation(op)
assert (
str(exc_info.value) == '"not" can have only one argument in '
"Databricks vector search"
)
def test_visit_structured_query_with_no_filter() -> None:
query = "What is the capital of France?"
structured_query = StructuredQuery(
query=query,
filter=None,
)
expected: Tuple[str, Dict] = (query, {})
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
def test_visit_structured_query_with_one_arg_filter() -> None:
query = "What is the capital of France?"
comp = Comparison(comparator=Comparator.EQ, attribute="country", value="France")
structured_query = StructuredQuery(
query=query,
filter=comp,
)
expected = (query, {"filter": {"country": "France"}})
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
def test_visit_structured_query_with_multiple_arg_filter_and_operator() -> None:
query = "What is the capital of France in the years between 1888 and 1900?"
op = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.EQ, attribute="country", value="France"),
Comparison(comparator=Comparator.GTE, attribute="year", value=1888),
Comparison(comparator=Comparator.LTE, attribute="year", value=1900),
],
)
structured_query = StructuredQuery(
query=query,
filter=op,
)
expected = (
query,
{"filter": {"country": "France", "year >=": 1888, "year <=": 1900}},
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
|
0 | lc_public_repos/langchain/libs/community/tests/unit_tests | lc_public_repos/langchain/libs/community/tests/unit_tests/query_constructors/test_weaviate.py | from typing import Dict, Tuple
from langchain_core.structured_query import (
Comparator,
Comparison,
Operation,
Operator,
StructuredQuery,
)
from langchain_community.query_constructors.weaviate import WeaviateTranslator
DEFAULT_TRANSLATOR = WeaviateTranslator()
def test_visit_comparison() -> None:
comp = Comparison(comparator=Comparator.EQ, attribute="foo", value="1")
expected = {"operator": "Equal", "path": ["foo"], "valueText": "1"}
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_comparison_integer() -> None:
comp = Comparison(comparator=Comparator.GTE, attribute="foo", value=1)
expected = {"operator": "GreaterThanEqual", "path": ["foo"], "valueInt": 1}
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_comparison_number() -> None:
comp = Comparison(comparator=Comparator.GT, attribute="foo", value=1.4)
expected = {"operator": "GreaterThan", "path": ["foo"], "valueNumber": 1.4}
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_comparison_boolean() -> None:
comp = Comparison(comparator=Comparator.NE, attribute="foo", value=False)
expected = {"operator": "NotEqual", "path": ["foo"], "valueBoolean": False}
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_comparison_datetime() -> None:
comp = Comparison(
comparator=Comparator.LTE,
attribute="foo",
value={"type": "date", "date": "2023-09-13"},
)
expected = {
"operator": "LessThanEqual",
"path": ["foo"],
"valueDate": "2023-09-13T00:00:00Z",
}
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_comparison_date() -> None:
comp = Comparison(
comparator=Comparator.LT,
attribute="foo",
value={"type": "date", "date": "2023-09-13"},
)
expected = {
"operator": "LessThan",
"path": ["foo"],
"valueDate": "2023-09-13T00:00:00Z",
}
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_operation() -> None:
op = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.EQ, attribute="foo", value="hello"),
Comparison(
comparator=Comparator.GTE,
attribute="bar",
value={"type": "date", "date": "2023-09-13"},
),
Comparison(comparator=Comparator.LTE, attribute="abc", value=1.4),
],
)
expected = {
"operands": [
{"operator": "Equal", "path": ["foo"], "valueText": "hello"},
{
"operator": "GreaterThanEqual",
"path": ["bar"],
"valueDate": "2023-09-13T00:00:00Z",
},
{"operator": "LessThanEqual", "path": ["abc"], "valueNumber": 1.4},
],
"operator": "And",
}
actual = DEFAULT_TRANSLATOR.visit_operation(op)
assert expected == actual
def test_visit_structured_query() -> None:
query = "What is the capital of France?"
structured_query = StructuredQuery(
query=query,
filter=None,
)
expected: Tuple[str, Dict] = (query, {})
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
comp = Comparison(comparator=Comparator.EQ, attribute="foo", value="1")
structured_query = StructuredQuery(
query=query,
filter=comp,
)
expected = (
query,
{"where_filter": {"path": ["foo"], "operator": "Equal", "valueText": "1"}},
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
op = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.EQ, attribute="foo", value=2),
Comparison(comparator=Comparator.EQ, attribute="bar", value="baz"),
],
)
structured_query = StructuredQuery(
query=query,
filter=op,
)
expected = (
query,
{
"where_filter": {
"operator": "And",
"operands": [
{"path": ["foo"], "operator": "Equal", "valueInt": 2},
{"path": ["bar"], "operator": "Equal", "valueText": "baz"},
],
}
},
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
|
0 | lc_public_repos/langchain/libs/community/tests/unit_tests | lc_public_repos/langchain/libs/community/tests/unit_tests/query_constructors/test_astradb.py | from typing import Dict, Tuple
from langchain_core.structured_query import (
Comparator,
Comparison,
Operation,
Operator,
StructuredQuery,
)
from langchain_community.query_constructors.astradb import AstraDBTranslator
DEFAULT_TRANSLATOR = AstraDBTranslator()
def test_visit_comparison_lt() -> None:
comp = Comparison(comparator=Comparator.LT, attribute="qty", value=20)
expected = {"qty": {"$lt": 20}}
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_comparison_eq() -> None:
comp = Comparison(comparator=Comparator.EQ, attribute="qty", value=10)
expected = {"qty": {"$eq": 10}}
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_comparison_ne() -> None:
comp = Comparison(comparator=Comparator.NE, attribute="name", value="foo")
expected = {"name": {"$ne": "foo"}}
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_comparison_in() -> None:
comp = Comparison(comparator=Comparator.IN, attribute="name", value="foo")
expected = {"name": {"$in": ["foo"]}}
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_comparison_nin() -> None:
comp = Comparison(comparator=Comparator.NIN, attribute="name", value="foo")
expected = {"name": {"$nin": ["foo"]}}
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_operation() -> None:
op = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.GTE, attribute="qty", value=10),
Comparison(comparator=Comparator.LTE, attribute="qty", value=20),
Comparison(comparator=Comparator.EQ, attribute="name", value="foo"),
],
)
expected = {
"$and": [
{"qty": {"$gte": 10}},
{"qty": {"$lte": 20}},
{"name": {"$eq": "foo"}},
]
}
actual = DEFAULT_TRANSLATOR.visit_operation(op)
assert expected == actual
def test_visit_structured_query_no_filter() -> None:
query = "What is the capital of France?"
structured_query = StructuredQuery(
query=query,
filter=None,
)
expected: Tuple[str, Dict] = (query, {})
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
def test_visit_structured_query_one_attr() -> None:
query = "What is the capital of France?"
comp = Comparison(comparator=Comparator.IN, attribute="qty", value=[5, 15, 20])
structured_query = StructuredQuery(
query=query,
filter=comp,
)
expected = (
query,
{"filter": {"qty": {"$in": [5, 15, 20]}}},
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
def test_visit_structured_query_deep_nesting() -> None:
query = "What is the capital of France?"
op = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.EQ, attribute="name", value="foo"),
Operation(
operator=Operator.OR,
arguments=[
Comparison(comparator=Comparator.GT, attribute="qty", value=6),
Comparison(
comparator=Comparator.NIN,
attribute="tags",
value=["bar", "foo"],
),
],
),
],
)
structured_query = StructuredQuery(
query=query,
filter=op,
)
expected = (
query,
{
"filter": {
"$and": [
{"name": {"$eq": "foo"}},
{"$or": [{"qty": {"$gt": 6}}, {"tags": {"$nin": ["bar", "foo"]}}]},
]
}
},
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
|
0 | lc_public_repos/langchain/libs/community/tests/unit_tests | lc_public_repos/langchain/libs/community/tests/unit_tests/query_constructors/test_pinecone.py | from typing import Dict, Tuple
from langchain_core.structured_query import (
Comparator,
Comparison,
Operation,
Operator,
StructuredQuery,
)
from langchain_community.query_constructors.pinecone import PineconeTranslator
DEFAULT_TRANSLATOR = PineconeTranslator()
def test_visit_comparison() -> None:
comp = Comparison(comparator=Comparator.LT, attribute="foo", value=["1", "2"])
expected = {"foo": {"$lt": ["1", "2"]}}
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_operation() -> None:
op = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.LT, attribute="foo", value=2),
Comparison(comparator=Comparator.EQ, attribute="bar", value="baz"),
],
)
expected = {"$and": [{"foo": {"$lt": 2}}, {"bar": {"$eq": "baz"}}]}
actual = DEFAULT_TRANSLATOR.visit_operation(op)
assert expected == actual
def test_visit_structured_query() -> None:
query = "What is the capital of France?"
structured_query = StructuredQuery(
query=query,
filter=None,
)
expected: Tuple[str, Dict] = (query, {})
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
comp = Comparison(comparator=Comparator.LT, attribute="foo", value=["1", "2"])
structured_query = StructuredQuery(
query=query,
filter=comp,
)
expected = (
query,
{"filter": {"foo": {"$lt": ["1", "2"]}}},
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
op = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.LT, attribute="foo", value=2),
Comparison(comparator=Comparator.EQ, attribute="bar", value="baz"),
],
)
structured_query = StructuredQuery(
query=query,
filter=op,
)
expected = (
query,
{"filter": {"$and": [{"foo": {"$lt": 2}}, {"bar": {"$eq": "baz"}}]}},
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
|
0 | lc_public_repos/langchain/libs/community/tests/unit_tests | lc_public_repos/langchain/libs/community/tests/unit_tests/query_constructors/test_elasticsearch.py | from typing import Dict, Tuple
from langchain_core.structured_query import (
Comparator,
Comparison,
Operation,
Operator,
StructuredQuery,
)
from langchain_community.query_constructors.elasticsearch import ElasticsearchTranslator
DEFAULT_TRANSLATOR = ElasticsearchTranslator()
def test_visit_comparison() -> None:
comp = Comparison(comparator=Comparator.EQ, attribute="foo", value="1")
expected = {"term": {"metadata.foo.keyword": "1"}}
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_comparison_range_gt() -> None:
comp = Comparison(comparator=Comparator.GT, attribute="foo", value=1)
expected = {"range": {"metadata.foo": {"gt": 1}}}
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_comparison_range_gte() -> None:
comp = Comparison(comparator=Comparator.GTE, attribute="foo", value=1)
expected = {"range": {"metadata.foo": {"gte": 1}}}
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_comparison_range_lt() -> None:
comp = Comparison(comparator=Comparator.LT, attribute="foo", value=1)
expected = {"range": {"metadata.foo": {"lt": 1}}}
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_comparison_range_lte() -> None:
comp = Comparison(comparator=Comparator.LTE, attribute="foo", value=1)
expected = {"range": {"metadata.foo": {"lte": 1}}}
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_comparison_range_match() -> None:
comp = Comparison(comparator=Comparator.CONTAIN, attribute="foo", value="1")
expected = {"match": {"metadata.foo": {"query": "1"}}}
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_comparison_range_like() -> None:
comp = Comparison(comparator=Comparator.LIKE, attribute="foo", value="bar")
expected = {"match": {"metadata.foo": {"query": "bar", "fuzziness": "AUTO"}}}
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_operation() -> None:
op = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.EQ, attribute="foo", value=2),
Comparison(comparator=Comparator.EQ, attribute="bar", value="baz"),
],
)
expected = {
"bool": {
"must": [
{"term": {"metadata.foo": 2}},
{"term": {"metadata.bar.keyword": "baz"}},
]
}
}
actual = DEFAULT_TRANSLATOR.visit_operation(op)
assert expected == actual
def test_visit_operation_or() -> None:
op = Operation(
operator=Operator.OR,
arguments=[
Comparison(comparator=Comparator.EQ, attribute="foo", value=2),
Comparison(comparator=Comparator.EQ, attribute="bar", value="baz"),
],
)
expected = {
"bool": {
"should": [
{"term": {"metadata.foo": 2}},
{"term": {"metadata.bar.keyword": "baz"}},
]
}
}
actual = DEFAULT_TRANSLATOR.visit_operation(op)
assert expected == actual
def test_visit_operation_not() -> None:
op = Operation(
operator=Operator.NOT,
arguments=[
Comparison(comparator=Comparator.EQ, attribute="foo", value=2),
Comparison(comparator=Comparator.EQ, attribute="bar", value="baz"),
],
)
expected = {
"bool": {
"must_not": [
{"term": {"metadata.foo": 2}},
{"term": {"metadata.bar.keyword": "baz"}},
]
}
}
actual = DEFAULT_TRANSLATOR.visit_operation(op)
assert expected == actual
def test_visit_structured_query() -> None:
query = "What is the capital of France?"
structured_query = StructuredQuery(query=query, filter=None, limit=None)
expected: Tuple[str, Dict] = (query, {})
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
def test_visit_structured_query_filter() -> None:
query = "What is the capital of France?"
comp = Comparison(comparator=Comparator.EQ, attribute="foo", value="1")
structured_query = StructuredQuery(query=query, filter=comp, limit=None)
expected = (
query,
{"filter": [{"term": {"metadata.foo.keyword": "1"}}]},
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
def test_visit_structured_query_filter_and() -> None:
query = "What is the capital of France?"
op = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.EQ, attribute="foo", value=2),
Comparison(comparator=Comparator.EQ, attribute="bar", value="baz"),
],
)
structured_query = StructuredQuery(query=query, filter=op, limit=None)
expected = (
query,
{
"filter": [
{
"bool": {
"must": [
{"term": {"metadata.foo": 2}},
{"term": {"metadata.bar.keyword": "baz"}},
]
}
}
]
},
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
def test_visit_structured_query_complex() -> None:
query = "What is the capital of France?"
op = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.EQ, attribute="foo", value=2),
Operation(
operator=Operator.OR,
arguments=[
Comparison(comparator=Comparator.LT, attribute="bar", value=1),
Comparison(comparator=Comparator.LIKE, attribute="bar", value="10"),
],
),
],
)
structured_query = StructuredQuery(query=query, filter=op, limit=None)
expected = (
query,
{
"filter": [
{
"bool": {
"must": [
{"term": {"metadata.foo": 2}},
{
"bool": {
"should": [
{"range": {"metadata.bar": {"lt": 1}}},
{
"match": {
"metadata.bar": {
"query": "10",
"fuzziness": "AUTO",
}
}
},
]
}
},
]
}
}
]
},
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
def test_visit_structured_query_with_date_range() -> None:
query = "Who was the president of France in 1995?"
operation = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.EQ, attribute="foo", value="20"),
Operation(
operator=Operator.AND,
arguments=[
Comparison(
comparator=Comparator.GTE,
attribute="timestamp",
value={"date": "1995-01-01", "type": "date"},
),
Comparison(
comparator=Comparator.LT,
attribute="timestamp",
value={"date": "1996-01-01", "type": "date"},
),
],
),
],
)
structured_query = StructuredQuery(query=query, filter=operation, limit=None)
expected = (
query,
{
"filter": [
{
"bool": {
"must": [
{"term": {"metadata.foo.keyword": "20"}},
{
"bool": {
"must": [
{
"range": {
"metadata.timestamp": {
"gte": "1995-01-01"
}
}
},
{
"range": {
"metadata.timestamp": {
"lt": "1996-01-01"
}
}
},
]
}
},
]
}
}
]
},
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
def test_visit_structured_query_with_date() -> None:
query = "Who was the president of France on 1st of January 1995?"
operation = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.EQ, attribute="foo", value="20"),
Comparison(
comparator=Comparator.EQ,
attribute="timestamp",
value={"date": "1995-01-01", "type": "date"},
),
],
)
structured_query = StructuredQuery(query=query, filter=operation, limit=None)
expected = (
query,
{
"filter": [
{
"bool": {
"must": [
{"term": {"metadata.foo.keyword": "20"}},
{"term": {"metadata.timestamp": "1995-01-01"}},
]
}
}
]
},
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
|
0 | lc_public_repos/langchain/libs/community/tests/unit_tests | lc_public_repos/langchain/libs/community/tests/unit_tests/query_constructors/test_pgvector.py | from typing import Dict, Tuple
import pytest as pytest
from langchain_core.structured_query import (
Comparator,
Comparison,
Operation,
Operator,
StructuredQuery,
)
from langchain_community.query_constructors.pgvector import PGVectorTranslator
DEFAULT_TRANSLATOR = PGVectorTranslator()
def test_visit_comparison() -> None:
comp = Comparison(comparator=Comparator.LT, attribute="foo", value=1)
expected = {"foo": {"lt": 1}}
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
@pytest.mark.skip("Not implemented")
def test_visit_operation() -> None:
op = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.LT, attribute="foo", value=2),
Comparison(comparator=Comparator.EQ, attribute="bar", value="baz"),
Comparison(comparator=Comparator.GT, attribute="abc", value=2.0),
],
)
expected = {
"foo": {"lt": 2},
"bar": {"eq": "baz"},
"abc": {"gt": 2.0},
}
actual = DEFAULT_TRANSLATOR.visit_operation(op)
assert expected == actual
def test_visit_structured_query() -> None:
query = "What is the capital of France?"
structured_query = StructuredQuery(
query=query,
filter=None,
)
expected: Tuple[str, Dict] = (query, {})
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
comp = Comparison(comparator=Comparator.LT, attribute="foo", value=1)
structured_query = StructuredQuery(
query=query,
filter=comp,
)
expected = (query, {"filter": {"foo": {"lt": 1}}})
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
op = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.LT, attribute="foo", value=2),
Comparison(comparator=Comparator.EQ, attribute="bar", value="baz"),
Comparison(comparator=Comparator.GT, attribute="abc", value=2.0),
],
)
structured_query = StructuredQuery(
query=query,
filter=op,
)
expected = (
query,
{
"filter": {
"and": [
{"foo": {"lt": 2}},
{"bar": {"eq": "baz"}},
{"abc": {"gt": 2.0}},
]
}
},
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
|
0 | lc_public_repos/langchain/libs/community/tests/unit_tests | lc_public_repos/langchain/libs/community/tests/unit_tests/query_constructors/test_vectara.py | from typing import Dict, Tuple
from langchain_core.structured_query import (
Comparator,
Comparison,
Operation,
Operator,
StructuredQuery,
)
from langchain_community.query_constructors.vectara import VectaraTranslator
DEFAULT_TRANSLATOR = VectaraTranslator()
def test_visit_comparison() -> None:
comp = Comparison(comparator=Comparator.LT, attribute="foo", value="1")
expected = "( doc.foo < '1' )"
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_operation() -> None:
op = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.LT, attribute="foo", value=2),
Comparison(comparator=Comparator.EQ, attribute="bar", value="baz"),
Comparison(comparator=Comparator.LT, attribute="abc", value=1),
],
)
expected = "( ( doc.foo < 2 ) and ( doc.bar = 'baz' ) and ( doc.abc < 1 ) )"
actual = DEFAULT_TRANSLATOR.visit_operation(op)
assert expected == actual
def test_visit_structured_query() -> None:
query = "What is the capital of France?"
structured_query = StructuredQuery(
query=query,
filter=None,
limit=None,
)
expected: Tuple[str, Dict] = (query, {})
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
comp = Comparison(comparator=Comparator.LT, attribute="foo", value=1)
expected = (query, {"filter": "( doc.foo < 1 )"})
structured_query = StructuredQuery(
query=query,
filter=comp,
limit=None,
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
op = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.LT, attribute="foo", value=2),
Comparison(comparator=Comparator.EQ, attribute="bar", value="baz"),
Comparison(comparator=Comparator.LT, attribute="abc", value=1),
],
)
structured_query = StructuredQuery(query=query, filter=op, limit=None)
expected = (
query,
{"filter": "( ( doc.foo < 2 ) and ( doc.bar = 'baz' ) and ( doc.abc < 1 ) )"},
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
|
0 | lc_public_repos/langchain/libs/community/tests/unit_tests | lc_public_repos/langchain/libs/community/tests/unit_tests/query_constructors/test_timescalevector.py | from typing import Dict, Tuple
import pytest as pytest
from langchain_core.structured_query import (
Comparator,
Comparison,
Operation,
Operator,
StructuredQuery,
)
from langchain_community.query_constructors.timescalevector import (
TimescaleVectorTranslator,
)
DEFAULT_TRANSLATOR = TimescaleVectorTranslator()
@pytest.mark.requires("timescale_vector")
def test_visit_comparison() -> None:
from timescale_vector import client
comp = Comparison(comparator=Comparator.LT, attribute="foo", value=1)
expected = client.Predicates(("foo", "<", 1))
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
@pytest.mark.requires("timescale_vector")
def test_visit_operation() -> None:
from timescale_vector import client
op = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.LT, attribute="foo", value=2),
Comparison(comparator=Comparator.EQ, attribute="bar", value="baz"),
Comparison(comparator=Comparator.GT, attribute="abc", value=2.0),
],
)
expected = client.Predicates(
client.Predicates(("foo", "<", 2)),
client.Predicates(("bar", "==", "baz")),
client.Predicates(("abc", ">", 2.0)),
)
actual = DEFAULT_TRANSLATOR.visit_operation(op)
assert expected == actual
@pytest.mark.requires("timescale_vector")
def test_visit_structured_query() -> None:
from timescale_vector import client
query = "What is the capital of France?"
structured_query = StructuredQuery(
query=query,
filter=None,
)
expected: Tuple[str, Dict] = (query, {})
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
comp = Comparison(comparator=Comparator.LT, attribute="foo", value=1)
expected = (
query,
{"predicates": client.Predicates(("foo", "<", 1))},
)
structured_query = StructuredQuery(
query=query,
filter=comp,
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
op = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.LT, attribute="foo", value=2),
Comparison(comparator=Comparator.EQ, attribute="bar", value="baz"),
Comparison(comparator=Comparator.GT, attribute="abc", value=2.0),
],
)
structured_query = StructuredQuery(
query=query,
filter=op,
)
expected = (
query,
{
"predicates": client.Predicates(
client.Predicates(("foo", "<", 2)),
client.Predicates(("bar", "==", "baz")),
client.Predicates(("abc", ">", 2.0)),
)
},
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
|
0 | lc_public_repos/langchain/libs/community/tests/unit_tests | lc_public_repos/langchain/libs/community/tests/unit_tests/query_constructors/test_chroma.py | from typing import Dict, Tuple
from langchain_core.structured_query import (
Comparator,
Comparison,
Operation,
Operator,
StructuredQuery,
)
from langchain_community.query_constructors.chroma import ChromaTranslator
DEFAULT_TRANSLATOR = ChromaTranslator()
def test_visit_comparison() -> None:
comp = Comparison(comparator=Comparator.LT, attribute="foo", value=["1", "2"])
expected = {"foo": {"$lt": ["1", "2"]}}
actual = DEFAULT_TRANSLATOR.visit_comparison(comp)
assert expected == actual
def test_visit_operation() -> None:
op = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.LT, attribute="foo", value=2),
Comparison(comparator=Comparator.EQ, attribute="bar", value="baz"),
Comparison(comparator=Comparator.LT, attribute="abc", value=["1", "2"]),
],
)
expected = {
"$and": [
{"foo": {"$lt": 2}},
{"bar": {"$eq": "baz"}},
{"abc": {"$lt": ["1", "2"]}},
]
}
actual = DEFAULT_TRANSLATOR.visit_operation(op)
assert expected == actual
def test_visit_structured_query() -> None:
query = "What is the capital of France?"
structured_query = StructuredQuery(
query=query,
filter=None,
)
expected: Tuple[str, Dict] = (query, {})
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
comp = Comparison(comparator=Comparator.LT, attribute="foo", value=["1", "2"])
expected = (
query,
{"filter": {"foo": {"$lt": ["1", "2"]}}},
)
structured_query = StructuredQuery(
query=query,
filter=comp,
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
op = Operation(
operator=Operator.AND,
arguments=[
Comparison(comparator=Comparator.LT, attribute="foo", value=2),
Comparison(comparator=Comparator.EQ, attribute="bar", value="baz"),
Comparison(comparator=Comparator.LT, attribute="abc", value=["1", "2"]),
],
)
structured_query = StructuredQuery(
query=query,
filter=op,
)
expected = (
query,
{
"filter": {
"$and": [
{"foo": {"$lt": 2}},
{"bar": {"$eq": "baz"}},
{"abc": {"$lt": ["1", "2"]}},
]
}
},
)
actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query)
assert expected == actual
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.