language stringclasses 1
value | repo stringclasses 346
values | path stringlengths 6 201 | class_span dict | source stringlengths 21 2.38M | target stringlengths 1 96 |
|---|---|---|---|---|---|
python | great-expectations__great_expectations | great_expectations/datasource/fluent/databricks_sql_datasource.py | {
"start": 2601,
"end": 4342
} | class ____(AnyUrl):
allowed_schemes = {
"databricks",
}
query: str # if query is not provided, validate_parts() will raise an error
@classmethod
@override
def validate_parts(cls, parts: Parts, validate_port: bool = True) -> Parts:
"""
Overridden to validate additional fields outside of scheme (which is performed by AnyUrl).
"""
query = parts["query"]
if query is None:
raise _UrlQueryError()
http_path = _parse_param_from_query_string(param="http_path", query=query)
if http_path is None:
raise _UrlHttpPathError()
catalog = _parse_param_from_query_string(param="catalog", query=query)
if catalog is None:
raise _UrlCatalogError()
schema = _parse_param_from_query_string(param="schema", query=query)
if schema is None:
raise _UrlSchemaError()
return AnyUrl.validate_parts(parts=parts, validate_port=validate_port)
@overload
@classmethod
def parse_url(
cls, url: ConfigStr, config_provider: _ConfigurationProvider = ...
) -> DatabricksDsn: ...
@overload
@classmethod
def parse_url(
cls, url: str, config_provider: _ConfigurationProvider | None = ...
) -> DatabricksDsn: ...
@classmethod
def parse_url(
cls, url: ConfigStr | str, config_provider: _ConfigurationProvider | None = None
) -> DatabricksDsn:
if isinstance(url, ConfigStr):
assert config_provider, "`config_provider` must be provided"
url = url.get_config_value(config_provider=config_provider)
parsed_url = pydantic.parse_obj_as(DatabricksDsn, url)
return parsed_url
| DatabricksDsn |
python | getsentry__sentry | src/sentry/deletions/base.py | {
"start": 1945,
"end": 2293
} | class ____(BaseRelation):
def __init__(
self,
model: type[ModelT],
query: Mapping[str, Any],
task: type[BaseDeletionTask[Any]] | None = None,
) -> None:
params = {"model": model, "query": query}
super().__init__(params=params, task=task)
ModelT = TypeVar("ModelT", bound=Model)
| ModelRelation |
python | streamlit__streamlit | lib/tests/streamlit/elements/pdf_test.py | {
"start": 2425,
"end": 9908
} | class ____(DeltaGeneratorTestCase):
"""Test ability to marshall PDF protos."""
# Dummy PDF bytes for testing (not a real PDF, but sufficient for testing)
DUMMY_PDF_BYTES = (
b"%PDF-1.4\n1 0 obj\n<<\n/Type /Catalog\n>>\nendobj\nxref\n0 1\n0000000000 65535"
b"f \ntrailer\n<<\n/Size 1\n/Root 1 0 R\n>>\nstartxref\n9\n%%EOF"
)
def test_pdf_url(self):
"""Test PDF with URL."""
# Use a fake URL to avoid dependency on external resources
url = "https://example.com/fake-document.pdf"
st.pdf(url)
element = self.get_delta_from_queue().new_element
assert element.bidi_component.component_name == "streamlit-pdf.pdf_viewer"
# Parse the JSON args to check the parameters
json_args = json.loads(element.bidi_component.json)
assert json_args["file"] == url
assert json_args["height"] == "500" # Height is converted to string
def test_pdf_with_height(self):
"""Test PDF with custom height."""
url = "https://example.com/fake-document.pdf"
st.pdf(url, height=600)
element = self.get_delta_from_queue().new_element
assert element.bidi_component.component_name == "streamlit-pdf.pdf_viewer"
json_args = json.loads(element.bidi_component.json)
assert json_args["file"] == url
assert json_args["height"] == "600" # Height is converted to string
def test_pdf_with_height_stretch(self):
"""Test PDF with stretch height."""
url = "https://example.com/fake-document.pdf"
st.pdf(url, height="stretch")
element = self.get_delta_from_queue().new_element
assert element.bidi_component.component_name == "streamlit-pdf.pdf_viewer"
json_args = json.loads(element.bidi_component.json)
assert json_args["file"] == url
assert (
json_args["height"] == "stretch"
) # stretch is passed as "stretch" to component
def test_pdf_with_bytes_data(self):
"""Test PDF with raw bytes data."""
st.pdf(self.DUMMY_PDF_BYTES)
element = self.get_delta_from_queue().new_element
assert element.bidi_component.component_name == "streamlit-pdf.pdf_viewer"
# Check that bytes are uploaded to media storage and passed as URL
json_args = json.loads(element.bidi_component.json)
assert json_args["file"].startswith("/media/") # Media URL
assert json_args["height"] == "500"
def test_pdf_with_bytesio_data(self):
"""Test PDF with BytesIO data."""
pdf_bytesio = io.BytesIO(self.DUMMY_PDF_BYTES)
st.pdf(pdf_bytesio)
element = self.get_delta_from_queue().new_element
assert element.bidi_component.component_name == "streamlit-pdf.pdf_viewer"
# Check that bytes are uploaded to media storage and passed as URL
json_args = json.loads(element.bidi_component.json)
assert json_args["file"].startswith("/media/") # Media URL
assert json_args["height"] == "500"
def test_pdf_with_file_like_object(self):
"""Test PDF with file-like object (simulating UploadedFile)."""
# Create a mock file-like object
class MockUploadedFile:
def __init__(self, data):
self._data = data
def read(self):
return self._data
mock_file = MockUploadedFile(self.DUMMY_PDF_BYTES)
st.pdf(mock_file)
element = self.get_delta_from_queue().new_element
assert element.bidi_component.component_name == "streamlit-pdf.pdf_viewer"
# Check that bytes are uploaded to media storage and passed as URL
json_args = json.loads(element.bidi_component.json)
assert json_args["file"].startswith("/media/") # Media URL
assert json_args["height"] == "500"
def test_pdf_with_path_object(self):
"""Test PDF with Path object."""
# Create a temporary file to test with
import os
import tempfile
with tempfile.NamedTemporaryFile(suffix=".pdf", delete=False) as tmp_file:
tmp_file.write(self.DUMMY_PDF_BYTES)
tmp_file_path = tmp_file.name
try:
path_obj = Path(tmp_file_path)
st.pdf(path_obj)
element = self.get_delta_from_queue().new_element
assert element.bidi_component.component_name == "streamlit-pdf.pdf_viewer"
json_args = json.loads(element.bidi_component.json)
# For file paths, the content is uploaded to media storage
assert json_args["file"].startswith("/media/") # Media URL
assert json_args["height"] == "500"
finally:
# Clean up the temporary file
os.unlink(tmp_file_path)
def test_pdf_with_local_file_path_string(self):
"""Test PDF with local file path as string."""
# Create a temporary file to test with
import os
import tempfile
with tempfile.NamedTemporaryFile(suffix=".pdf", delete=False) as tmp_file:
tmp_file.write(self.DUMMY_PDF_BYTES)
tmp_file_path = tmp_file.name
try:
st.pdf(tmp_file_path)
element = self.get_delta_from_queue().new_element
assert element.bidi_component.component_name == "streamlit-pdf.pdf_viewer"
json_args = json.loads(element.bidi_component.json)
# For file paths, the content is uploaded to media storage
assert json_args["file"].startswith("/media/") # Media URL
assert json_args["height"] == "500"
finally:
# Clean up the temporary file
os.unlink(tmp_file_path)
def test_pdf_with_invalid_file_path(self):
"""Test PDF with invalid file path."""
invalid_path = "/nonexistent/path/to/file.pdf"
with pytest.raises(
StreamlitAPIException, match=f"Unable to read file '{invalid_path}'"
):
st.pdf(invalid_path)
def test_pdf_with_none_data(self):
"""Test PDF with None data."""
with pytest.raises(StreamlitAPIException, match="The PDF data cannot be None"):
st.pdf(None)
def test_pdf_with_unsupported_data_type(self):
"""Test PDF with unsupported data type."""
unsupported_data = {"not": "supported"}
with pytest.raises(
StreamlitAPIException, match="Unsupported data type for PDF"
):
st.pdf(unsupported_data)
@parameterized.expand(
[
"invalid",
"content", # content is not allowed for PDF
-100,
0,
100.5,
]
)
def test_pdf_with_invalid_height(self, height):
"""Test PDF with invalid height values."""
url = "https://example.com/fake-document.pdf"
with pytest.raises(StreamlitAPIException) as e:
st.pdf(url, height=height)
assert "Invalid height" in str(e.value)
def test_pdf_height_as_integer_gets_stringified(self):
"""Test that integer height values are converted to strings for the component."""
url = "https://example.com/fake-document.pdf"
st.pdf(url, height=450)
element = self.get_delta_from_queue().new_element
json_args = json.loads(element.bidi_component.json)
# Component should receive height as string
assert json_args["height"] == "450"
assert isinstance(json_args["height"], str)
| PdfTest |
python | astropy__astropy | astropy/table/tests/test_table.py | {
"start": 25096,
"end": 34704
} | class ____(SetupData):
@property
def b(self):
if self._column_type is not None:
if not hasattr(self, "_b"):
self._b = self._column_type(name="b", data=[4.0, 5.1, 6.2])
return self._b
@property
def c(self):
if self._column_type is not None:
if not hasattr(self, "_c"):
self._c = self._column_type(name="c", data=["7", "8", "9"])
return self._c
@property
def d(self):
if self._column_type is not None:
if not hasattr(self, "_d"):
self._d = self._column_type(name="d", data=[[1, 2], [3, 4], [5, 6]])
return self._d
@property
def t(self):
if self._table_type is not None:
if not hasattr(self, "_t"):
self._t = self._table_type([self.a, self.b, self.c])
return self._t
def test_add_none_to_empty_table(self, table_types):
self._setup(table_types)
t = table_types.Table(names=("a", "b", "c"), dtype=("(2,)i", "S4", "O"))
t.add_row()
assert np.all(t["a"][0] == [0, 0])
assert t["b"][0] == ""
assert t["c"][0] == 0
t.add_row()
assert np.all(t["a"][1] == [0, 0])
assert t["b"][1] == ""
assert t["c"][1] == 0
def test_add_stuff_to_empty_table(self, table_types):
self._setup(table_types)
t = table_types.Table(names=("a", "b", "obj"), dtype=("(2,)i", "S8", "O"))
t.add_row([[1, 2], "hello", "world"])
assert np.all(t["a"][0] == [1, 2])
assert t["b"][0] == "hello"
assert t["obj"][0] == "world"
# Make sure it is not repeating last row but instead
# adding zeros (as documented)
t.add_row()
assert np.all(t["a"][1] == [0, 0])
assert t["b"][1] == ""
assert t["obj"][1] == 0
def test_add_table_row(self, table_types):
self._setup(table_types)
t = self.t
t["d"] = self.d
t2 = table_types.Table([self.a, self.b, self.c, self.d])
t.add_row(t2[0])
assert len(t) == 4
assert np.all(t["a"] == np.array([1, 2, 3, 1]))
assert np.allclose(t["b"], np.array([4.0, 5.1, 6.2, 4.0]))
assert np.all(t["c"] == np.array(["7", "8", "9", "7"]))
assert np.all(t["d"] == np.array([[1, 2], [3, 4], [5, 6], [1, 2]]))
def test_add_table_row_obj(self, table_types):
self._setup(table_types)
t = table_types.Table([self.a, self.b, self.obj])
t.add_row([1, 4.0, [10]])
assert len(t) == 4
assert np.all(t["a"] == np.array([1, 2, 3, 1]))
assert np.allclose(t["b"], np.array([4.0, 5.1, 6.2, 4.0]))
assert np.all(t["obj"] == np.array([1, "string", 3, [10]], dtype="O"))
def test_add_qtable_row_multidimensional(self):
q = [[1, 2], [3, 4]] * u.m
qt = table.QTable([q])
qt.add_row(([5, 6] * u.km,))
assert np.all(qt["col0"] == [[1, 2], [3, 4], [5000, 6000]] * u.m)
def test_add_with_tuple(self, table_types):
self._setup(table_types)
t = self.t
t.add_row((4, 7.2, "1"))
assert len(t) == 4
assert np.all(t["a"] == np.array([1, 2, 3, 4]))
assert np.allclose(t["b"], np.array([4.0, 5.1, 6.2, 7.2]))
assert np.all(t["c"] == np.array(["7", "8", "9", "1"]))
def test_add_with_list(self, table_types):
self._setup(table_types)
t = self.t
t.add_row([4, 7.2, "10"])
assert len(t) == 4
assert np.all(t["a"] == np.array([1, 2, 3, 4]))
assert np.allclose(t["b"], np.array([4.0, 5.1, 6.2, 7.2]))
assert np.all(t["c"] == np.array(["7", "8", "9", "10"]))
def test_add_with_dict(self, table_types):
self._setup(table_types)
t = self.t
t.add_row({"a": 4, "b": 7.2})
assert len(t) == 4
assert np.all(t["a"] == np.array([1, 2, 3, 4]))
assert np.allclose(t["b"], np.array([4.0, 5.1, 6.2, 7.2]))
if t.masked:
assert np.all(t["c"] == np.array(["7", "8", "9", "7"]))
else:
assert np.all(t["c"] == np.array(["7", "8", "9", ""]))
def test_add_with_none(self, table_types):
self._setup(table_types)
t = self.t
t.add_row()
assert len(t) == 4
assert np.all(t["a"].data == np.array([1, 2, 3, 0]))
assert np.allclose(t["b"], np.array([4.0, 5.1, 6.2, 0.0]))
assert np.all(t["c"].data == np.array(["7", "8", "9", ""]))
def test_add_missing_column(self, table_types):
self._setup(table_types)
t = self.t
with pytest.raises(ValueError):
t.add_row({"bad_column": 1})
def test_wrong_size_tuple(self, table_types):
self._setup(table_types)
t = self.t
with pytest.raises(ValueError):
t.add_row((1, 2))
def test_wrong_vals_type(self, table_types):
self._setup(table_types)
t = self.t
with pytest.raises(TypeError):
t.add_row(1)
def test_add_row_failures(self, table_types):
self._setup(table_types)
t = self.t
t_copy = table_types.Table(t, copy=True)
# Wrong number of columns
try:
t.add_row([1, 2, 3, 4])
except ValueError:
pass
assert len(t) == 3
assert np.all(t.as_array() == t_copy.as_array())
# Wrong data type
try:
t.add_row(["one", 2, 3])
except ValueError:
pass
assert len(t) == 3
assert np.all(t.as_array() == t_copy.as_array())
def test_insert_table_row(self, table_types):
"""
Light testing of Table.insert_row() method. The deep testing is done via
the add_row() tests which calls insert_row(index=len(self), ...), so
here just test that the added index parameter is handled correctly.
"""
self._setup(table_types)
row = (10, 40.0, "x", [10, 20])
for index in range(-3, 4):
indices = np.insert(np.arange(3), index, 3)
t = table_types.Table([self.a, self.b, self.c, self.d])
t2 = t.copy()
t.add_row(row) # By now we know this works
t2.insert_row(index, row)
for name in t.colnames:
if t[name].dtype.kind == "f":
assert np.allclose(t[name][indices], t2[name])
else:
assert np.all(t[name][indices] == t2[name])
for index in (-4, 4):
t = table_types.Table([self.a, self.b, self.c, self.d])
with pytest.raises(IndexError):
t.insert_row(index, row)
@pytest.mark.parametrize(
"table_type, table_inputs, expected_column_type, expected_pformat, insert_ctx",
[
pytest.param(
table.Table,
dict(names=["a", "b", "c"]),
table.Column,
[
" a b c ",
"--- --- ---",
"1.0 2.0 3.0",
],
pytest.warns(
UserWarning, match="Units from inserted quantities will be ignored."
),
id="Table-Column",
),
pytest.param(
table.QTable,
dict(names=["a", "b", "c"]),
table.Column,
[
" a b c ",
"--- --- ---",
"1.0 2.0 3.0",
],
pytest.warns(
UserWarning,
match=(
"Units from inserted quantities will be ignored.\n"
"If you were hoping to fill a QTable row by row, "
"also initialize the units before starting, for instance\n"
r"QTable\(names=\['a', 'b', 'c'\], units=\['m', 'kg', None\]\)"
),
),
id="QTable-Column",
),
pytest.param(
table.QTable,
dict(names=["a", "b", "c"], units=["m", "kg", None]),
u.Quantity,
[
" a b c ",
" m kg ",
"--- --- ---",
"1.0 2.0 3.0",
],
nullcontext(),
id="QTable-Quantity",
),
pytest.param(
table.QTable,
dict(names=["a", "b", "c"], units=["cm", "g", None]),
u.Quantity,
[
" a b c ",
" cm g ",
"----- ------ ---",
"100.0 2000.0 3.0",
],
nullcontext(),
id="QTable-Quantity-other_units",
),
],
)
def test_inserting_quantity_row_in_empty_table(
table_type, table_inputs, expected_column_type, expected_pformat, insert_ctx
):
# see https://github.com/astropy/astropy/issues/15964
table = table_type(**table_inputs)
pre_unit_a = copy.copy(table["a"].unit)
pre_unit_b = copy.copy(table["b"].unit)
pre_unit_c = copy.copy(table["c"].unit)
assert type(table["a"]) is expected_column_type
assert type(table["b"]) is expected_column_type
assert type(table["c"]) is Column
with insert_ctx:
table.add_row([1 * u.m, 2 * u.kg, 3])
assert table["a"].unit == pre_unit_a
assert table["b"].unit == pre_unit_b
assert table["c"].unit == pre_unit_c
assert type(table["a"]) is expected_column_type
assert type(table["b"]) is expected_column_type
assert type(table["c"]) is Column
assert table.pformat() == expected_pformat
@pytest.mark.usefixtures("table_types")
| TestAddRow |
python | doocs__leetcode | solution/2100-2199/2104.Sum of Subarray Ranges/Solution2.py | {
"start": 0,
"end": 816
} | class ____:
def subArrayRanges(self, nums: List[int]) -> int:
def f(nums):
stk = []
n = len(nums)
left = [-1] * n
right = [n] * n
for i, v in enumerate(nums):
while stk and nums[stk[-1]] <= v:
stk.pop()
if stk:
left[i] = stk[-1]
stk.append(i)
stk = []
for i in range(n - 1, -1, -1):
while stk and nums[stk[-1]] < nums[i]:
stk.pop()
if stk:
right[i] = stk[-1]
stk.append(i)
return sum((i - left[i]) * (right[i] - i) * v for i, v in enumerate(nums))
mx = f(nums)
mi = f([-v for v in nums])
return mx + mi
| Solution |
python | sympy__sympy | sympy/codegen/matrix_nodes.py | {
"start": 932,
"end": 2284
} | class ____(Token, MatrixExpr):
"""Represents an operation to solve a linear matrix equation.
Parameters
==========
matrix : MatrixSymbol
Matrix representing the coefficients of variables in the linear
equation. This matrix must be square and full-rank (i.e. all columns must
be linearly independent) for the solving operation to be valid.
vector : MatrixSymbol
One-column matrix representing the solutions to the equations
represented in ``matrix``.
Examples
========
>>> from sympy import symbols, MatrixSymbol
>>> from sympy.codegen.matrix_nodes import MatrixSolve
>>> n = symbols('n', integer=True)
>>> A = MatrixSymbol('A', n, n)
>>> x = MatrixSymbol('x', n, 1)
>>> from sympy.printing.numpy import NumPyPrinter
>>> NumPyPrinter().doprint(MatrixSolve(A, x))
'numpy.linalg.solve(A, x)'
>>> from sympy import octave_code
>>> octave_code(MatrixSolve(A, x))
'A \\\\ x'
"""
__slots__ = _fields = ('matrix', 'vector')
_construct_matrix = staticmethod(sympify)
_construct_vector = staticmethod(sympify)
@property
def shape(self):
return self.vector.shape
def _eval_derivative(self, x):
A, b = self.matrix, self.vector
return MatrixSolve(A, b.diff(x) - A.diff(x) * MatrixSolve(A, b))
| MatrixSolve |
python | celery__celery | t/unit/app/test_log.py | {
"start": 11531,
"end": 11820
} | class ____(logging.Logger):
_records = None
def __init__(self, *args, **kwargs):
self._records = []
super().__init__(*args, **kwargs)
def handle(self, record):
self._records.append(record)
def isEnabledFor(self, level):
return True
| MockLogger |
python | sympy__sympy | sympy/physics/biomechanics/tests/test_curve.py | {
"start": 53270,
"end": 62897
} | class ____:
@pytest.fixture(autouse=True)
def _muscle_fiber_force_velocity_arguments_fixture(self):
self.v_M_tilde = Symbol('v_M_tilde')
self.c0 = Symbol('c_0')
self.c1 = Symbol('c_1')
self.c2 = Symbol('c_2')
self.c3 = Symbol('c_3')
self.constants = (self.c0, self.c1, self.c2, self.c3)
@staticmethod
def test_class():
assert issubclass(FiberForceVelocityDeGroote2016, Function)
assert issubclass(FiberForceVelocityDeGroote2016, CharacteristicCurveFunction)
assert FiberForceVelocityDeGroote2016.__name__ == 'FiberForceVelocityDeGroote2016'
def test_instance(self):
fv_M = FiberForceVelocityDeGroote2016(self.v_M_tilde, *self.constants)
assert isinstance(fv_M, FiberForceVelocityDeGroote2016)
assert str(fv_M) == 'FiberForceVelocityDeGroote2016(v_M_tilde, c_0, c_1, c_2, c_3)'
def test_doit(self):
fv_M = FiberForceVelocityDeGroote2016(self.v_M_tilde, *self.constants).doit()
expected = (
self.c0 * log((self.c1 * self.v_M_tilde + self.c2)
+ sqrt((self.c1 * self.v_M_tilde + self.c2)**2 + 1)) + self.c3
)
assert fv_M == expected
def test_doit_evaluate_false(self):
fv_M = FiberForceVelocityDeGroote2016(self.v_M_tilde, *self.constants).doit(evaluate=False)
expected = (
self.c0 * log((self.c1 * self.v_M_tilde + self.c2)
+ sqrt(UnevaluatedExpr(self.c1 * self.v_M_tilde + self.c2)**2 + 1)) + self.c3
)
assert fv_M == expected
def test_with_defaults(self):
constants = (
Float('-0.318'),
Float('-8.149'),
Float('-0.374'),
Float('0.886'),
)
fv_M_manual = FiberForceVelocityDeGroote2016(self.v_M_tilde, *constants)
fv_M_constants = FiberForceVelocityDeGroote2016.with_defaults(self.v_M_tilde)
assert fv_M_manual == fv_M_constants
def test_differentiate_wrt_v_M_tilde(self):
fv_M = FiberForceVelocityDeGroote2016(self.v_M_tilde, *self.constants)
expected = (
self.c0*self.c1
/sqrt(UnevaluatedExpr(self.c1*self.v_M_tilde + self.c2)**2 + 1)
)
assert fv_M.diff(self.v_M_tilde) == expected
def test_differentiate_wrt_c0(self):
fv_M = FiberForceVelocityDeGroote2016(self.v_M_tilde, *self.constants)
expected = log(
self.c1*self.v_M_tilde + self.c2
+ sqrt(UnevaluatedExpr(self.c1*self.v_M_tilde + self.c2)**2 + 1)
)
assert fv_M.diff(self.c0) == expected
def test_differentiate_wrt_c1(self):
fv_M = FiberForceVelocityDeGroote2016(self.v_M_tilde, *self.constants)
expected = (
self.c0*self.v_M_tilde
/sqrt(UnevaluatedExpr(self.c1*self.v_M_tilde + self.c2)**2 + 1)
)
assert fv_M.diff(self.c1) == expected
def test_differentiate_wrt_c2(self):
fv_M = FiberForceVelocityDeGroote2016(self.v_M_tilde, *self.constants)
expected = (
self.c0
/sqrt(UnevaluatedExpr(self.c1*self.v_M_tilde + self.c2)**2 + 1)
)
assert fv_M.diff(self.c2) == expected
def test_differentiate_wrt_c3(self):
fv_M = FiberForceVelocityDeGroote2016(self.v_M_tilde, *self.constants)
expected = Integer(1)
assert fv_M.diff(self.c3) == expected
def test_inverse(self):
fv_M = FiberForceVelocityDeGroote2016(self.v_M_tilde, *self.constants)
assert fv_M.inverse() is FiberForceVelocityInverseDeGroote2016
def test_function_print_latex(self):
fv_M = FiberForceVelocityDeGroote2016(self.v_M_tilde, *self.constants)
expected = r'\operatorname{fv}^M \left( v_{M tilde} \right)'
assert LatexPrinter().doprint(fv_M) == expected
def test_expression_print_latex(self):
fv_M = FiberForceVelocityDeGroote2016(self.v_M_tilde, *self.constants)
expected = (
r'c_{0} \log{\left(c_{1} v_{M tilde} + c_{2} + \sqrt{\left(c_{1} '
r'v_{M tilde} + c_{2}\right)^{2} + 1} \right)} + c_{3}'
)
assert LatexPrinter().doprint(fv_M.doit()) == expected
@pytest.mark.parametrize(
'code_printer, expected',
[
(
C89CodePrinter,
'(0.88600000000000001 - 0.318*log(-8.1489999999999991*v_M_tilde '
'- 0.374 + sqrt(1 + pow(-8.1489999999999991*v_M_tilde - 0.374, 2))))',
),
(
C99CodePrinter,
'(0.88600000000000001 - 0.318*log(-8.1489999999999991*v_M_tilde '
'- 0.374 + sqrt(1 + pow(-8.1489999999999991*v_M_tilde - 0.374, 2))))',
),
(
C11CodePrinter,
'(0.88600000000000001 - 0.318*log(-8.1489999999999991*v_M_tilde '
'- 0.374 + sqrt(1 + pow(-8.1489999999999991*v_M_tilde - 0.374, 2))))',
),
(
CXX98CodePrinter,
'(0.88600000000000001 - 0.318*log(-8.1489999999999991*v_M_tilde '
'- 0.374 + std::sqrt(1 + std::pow(-8.1489999999999991*v_M_tilde - 0.374, 2))))',
),
(
CXX11CodePrinter,
'(0.88600000000000001 - 0.318*std::log(-8.1489999999999991*v_M_tilde '
'- 0.374 + std::sqrt(1 + std::pow(-8.1489999999999991*v_M_tilde - 0.374, 2))))',
),
(
CXX17CodePrinter,
'(0.88600000000000001 - 0.318*std::log(-8.1489999999999991*v_M_tilde '
'- 0.374 + std::sqrt(1 + std::pow(-8.1489999999999991*v_M_tilde - 0.374, 2))))',
),
(
FCodePrinter,
' (0.886d0 - 0.318d0*log(-8.1489999999999991d0*v_M_tilde - 0.374d0 +\n'
' @ sqrt(1.0d0 + (-8.149d0*v_M_tilde - 0.374d0)**2)))',
),
(
OctaveCodePrinter,
'(0.886 - 0.318*log(-8.149*v_M_tilde - 0.374 '
'+ sqrt(1 + (-8.149*v_M_tilde - 0.374).^2)))',
),
(
PythonCodePrinter,
'(0.886 - 0.318*math.log(-8.149*v_M_tilde - 0.374 '
'+ math.sqrt(1 + (-8.149*v_M_tilde - 0.374)**2)))',
),
(
NumPyPrinter,
'(0.886 - 0.318*numpy.log(-8.149*v_M_tilde - 0.374 '
'+ numpy.sqrt(1 + (-8.149*v_M_tilde - 0.374)**2)))',
),
(
SciPyPrinter,
'(0.886 - 0.318*numpy.log(-8.149*v_M_tilde - 0.374 '
'+ numpy.sqrt(1 + (-8.149*v_M_tilde - 0.374)**2)))',
),
(
CuPyPrinter,
'(0.886 - 0.318*cupy.log(-8.149*v_M_tilde - 0.374 '
'+ cupy.sqrt(1 + (-8.149*v_M_tilde - 0.374)**2)))',
),
(
JaxPrinter,
'(0.886 - 0.318*jax.numpy.log(-8.149*v_M_tilde - 0.374 '
'+ jax.numpy.sqrt(1 + (-8.149*v_M_tilde - 0.374)**2)))',
),
(
MpmathPrinter,
'(mpmath.mpf((0, 7980378539700519, -53, 53)) '
'- mpmath.mpf((0, 5728578726015271, -54, 53))'
'*mpmath.log(-mpmath.mpf((0, 4587479170430271, -49, 53))*v_M_tilde '
'+ mpmath.mpf((1, 3368692521273131, -53, 52)) '
'+ mpmath.sqrt(1 + (-mpmath.mpf((0, 4587479170430271, -49, 53))*v_M_tilde '
'+ mpmath.mpf((1, 3368692521273131, -53, 52)))**2)))',
),
(
LambdaPrinter,
'(0.886 - 0.318*math.log(-8.149*v_M_tilde - 0.374 '
'+ sqrt(1 + (-8.149*v_M_tilde - 0.374)**2)))',
),
]
)
def test_print_code(self, code_printer, expected):
fv_M = FiberForceVelocityDeGroote2016.with_defaults(self.v_M_tilde)
assert code_printer().doprint(fv_M) == expected
def test_derivative_print_code(self):
fv_M = FiberForceVelocityDeGroote2016.with_defaults(self.v_M_tilde)
dfv_M_dv_M_tilde = fv_M.diff(self.v_M_tilde)
expected = '2.591382*(1 + (-8.149*v_M_tilde - 0.374)**2)**(-1/2)'
assert PythonCodePrinter().doprint(dfv_M_dv_M_tilde) == expected
def test_lambdify(self):
fv_M = FiberForceVelocityDeGroote2016.with_defaults(self.v_M_tilde)
fv_M_callable = lambdify(self.v_M_tilde, fv_M)
assert fv_M_callable(0.0) == pytest.approx(1.002320622548512)
@pytest.mark.skipif(numpy is None, reason='NumPy not installed')
def test_lambdify_numpy(self):
fv_M = FiberForceVelocityDeGroote2016.with_defaults(self.v_M_tilde)
fv_M_callable = lambdify(self.v_M_tilde, fv_M, 'numpy')
v_M_tilde = numpy.array([-1.0, -0.5, 0.0, 0.5])
expected = numpy.array([
0.0120816781,
0.2438336294,
1.0023206225,
1.5850003903,
])
numpy.testing.assert_allclose(fv_M_callable(v_M_tilde), expected)
@pytest.mark.skipif(jax is None, reason='JAX not installed')
def test_lambdify_jax(self):
fv_M = FiberForceVelocityDeGroote2016.with_defaults(self.v_M_tilde)
fv_M_callable = jax.jit(lambdify(self.v_M_tilde, fv_M, 'jax'))
v_M_tilde = jax.numpy.array([-1.0, -0.5, 0.0, 0.5])
expected = jax.numpy.array([
0.0120816781,
0.2438336294,
1.0023206225,
1.5850003903,
])
numpy.testing.assert_allclose(fv_M_callable(v_M_tilde), expected)
| TestFiberForceVelocityDeGroote2016 |
python | viewflow__viewflow | tests/components/test_field_input.py | {
"start": 298,
"end": 1016
} | class ____(LiveTestCase):
def test_field_input(self):
self.browser.get(f"{self.live_server_url}/application/form/")
self.assertNoJsErrors()
input = self.browser.find_element(By.CSS_SELECTOR, "vf-field-input input")
label = self.browser.find_element(By.CSS_SELECTOR, "vf-field-input label")
label_classes = label.get_attribute("class").split(" ")
self.assertNotIn("mdc-text-field--float-above", label_classes)
input.click()
label_classes = label.get_attribute("class").split(" ")
self.assertIn("mdc-text-field--focused", label_classes)
self.assertIn("mdc-text-field--label-floating", label_classes)
self.assertNoJsErrors()
| Test |
python | facebookresearch__faiss | tests/common_faiss_tests.py | {
"start": 1391,
"end": 3836
} | class ____(Randu10k):
def __init__(self):
Randu10k.__init__(self)
weights = 0.95 ** np.arange(self.d)
rs = np.random.RandomState(123)
weights = weights[rs.permutation(self.d)]
self.xb *= weights
self.xb /= np.linalg.norm(self.xb, axis=1)[:, np.newaxis]
self.xq *= weights
self.xq /= np.linalg.norm(self.xq, axis=1)[:, np.newaxis]
self.xt *= weights
self.xt /= np.linalg.norm(self.xt, axis=1)[:, np.newaxis]
dotprods = np.dot(self.xq, self.xb.T)
self.gt = dotprods.argmax(1)
self.k = 100
def get_dataset(d, nb, nt, nq):
rs = np.random.RandomState(123)
xb = rs.rand(nb, d).astype('float32')
xt = rs.rand(nt, d).astype('float32')
xq = rs.rand(nq, d).astype('float32')
return (xt, xb, xq)
def get_dataset_2(d, nt, nb, nq):
"""A dataset that is not completely random but still challenging to
index
"""
d1 = 10 # intrinsic dimension (more or less)
n = nb + nt + nq
rs = np.random.RandomState(1338)
x = rs.normal(size=(n, d1))
x = np.dot(x, rs.rand(d1, d))
# now we have a d1-dim ellipsoid in d-dimensional space
# higher factor (>4) -> higher frequency -> less linear
x = x * (rs.rand(d) * 4 + 0.1)
x = np.sin(x)
x = x.astype('float32')
return x[:nt], x[nt:nt + nb], x[nt + nb:]
def make_binary_dataset(d, nt, nb, nq):
assert d % 8 == 0
rs = np.random.RandomState(123)
x = rs.randint(256, size=(nb + nq + nt, int(d / 8))).astype('uint8')
return x[:nt], x[nt:-nq], x[-nq:]
def compare_binary_result_lists(D1, I1, D2, I2):
"""comparing result lists is difficult because there are many
ties. Here we sort by (distance, index) pairs and ignore the largest
distance of each result. Compatible result lists should pass this."""
assert D1.shape == I1.shape == D2.shape == I2.shape
n, k = D1.shape
ndiff = (D1 != D2).sum()
assert ndiff == 0, '%d differences in distance matrix %s' % (
ndiff, D1.shape)
def normalize_DI(D, I):
norm = I.max() + 1.0
Dr = D.astype('float64') + I / norm
# ignore -1s and elements on last column
Dr[I1 == -1] = 1e20
Dr[D == D[:, -1:]] = 1e20
Dr.sort(axis=1)
return Dr
ndiff = (normalize_DI(D1, I1) != normalize_DI(D2, I2)).sum()
assert ndiff == 0, '%d differences in normalized D matrix' % ndiff
| Randu10kUnbalanced |
python | sphinx-doc__sphinx | sphinx/domains/cpp/_ast.py | {
"start": 17745,
"end": 18888
} | class ____(ASTLiteral):
def __init__(self, literal: ASTLiteral, ident: ASTIdentifier) -> None:
self.literal = literal
self.ident = ident
def __eq__(self, other: object) -> bool:
if not isinstance(other, ASTUserDefinedLiteral):
return NotImplemented
return self.literal == other.literal and self.ident == other.ident
def __hash__(self) -> int:
return hash((self.literal, self.ident))
def _stringify(self, transform: StringifyTransform) -> str:
return transform(self.literal) + transform(self.ident)
def get_id(self, version: int) -> str:
# mangle as if it was a function call: ident(literal)
return f'clL_Zli{self.ident.get_id(version)}E{self.literal.get_id(version)}E'
def describe_signature(
self, signode: TextElement, mode: str, env: BuildEnvironment, symbol: Symbol
) -> None:
self.literal.describe_signature(signode, mode, env, symbol)
self.ident.describe_signature(signode, 'udl', env, '', '', symbol)
################################################################################
| ASTUserDefinedLiteral |
python | getsentry__sentry | fixtures/safe_migrations_apps/bad_flow_run_sql_disabled_app/migrations/0001_initial.py | {
"start": 145,
"end": 274
} | class ____(CheckedMigration):
initial = True
dependencies = []
operations = [migrations.RunSQL("select 1;")]
| Migration |
python | OmkarPathak__pygorithm | tests/test_binary.py | {
"start": 2181,
"end": 3553
} | class ____(unittest.TestCase):
def test_base16_to_base2(self):
self.assertEqual(base16.to_base2('DEADBEEF'), 11011110101011011011111011101111)
self.assertEqual(base16.to_base2('FFFFFFFFFFFFFFF'),
111111111111111111111111111111111111111111111111111111111111)
self.assertEqual(base16.to_base2('23F235E865A45C'), 100011111100100011010111101000011001011010010001011100)
def test_base16_to_base10(self):
self.assertEqual(base16.to_base10('DEADBEEF'), 3735928559)
self.assertEqual(base16.to_base10('FFFFFFFFFFFFFFF'), 1152921504606846976)
self.assertEqual(base16.to_base10('23F235E865A45C'), 10117937531036764)
def test_base16_to_ascii(self):
array = ['54', '68', '65', '20', '51', '75', '69', '63', '6B', '20', '42', '72', '6F', '77', '6E', '20', '46',
'6F', '78', '20', '4A', '75', '6D', '70', '73', '20', '4F', '76', '65', '72', '20', '74', '68', '65',
'20', '4C', '61', '7A', '79', '20', '44', '6F', '67']
array_2 = ['77', '48', '40', '74', '20', '5F', '54', '2D', '68', '33', '20', '2F', '2F', '2D', '46', '3D', '7E',
'21', '63', '6B']
self.assertEqual(base16.to_ascii(array), "The Quick Brown Fox Jumps Over the Lazy Dog")
self.assertEqual(base16.to_ascii(array_2), "wH@t _T-h3 //-F=~!ck")
| TestBase16 |
python | dagster-io__dagster | python_modules/dagster/dagster/_core/types/dagster_type.py | {
"start": 16106,
"end": 17686
} | class ____(DagsterType):
def __init__(self):
super(_Nothing, self).__init__(
key="Nothing",
name="Nothing",
kind=DagsterTypeKind.NOTHING,
loader=None,
type_check_fn=self.type_check_method,
is_builtin=True,
typing_type=type(None),
)
def type_check_method(
self, _context: "TypeCheckContext", value: object
) -> TypeCheck:
if value is not None and value != NoValueSentinel:
return TypeCheck(
success=False,
description=f"Value must be None or unset, got a {type(value)}",
)
return TypeCheck(success=True)
@property
def supports_fan_in(self) -> bool:
return True
def get_inner_type_for_fan_in(self) -> DagsterType:
return self
def isinstance_type_check_fn(
expected_python_type: t.Union[t.Type, t.Tuple[t.Type, ...]],
dagster_type_name: str,
expected_python_type_str: str,
) -> TypeCheckFn:
def type_check(_context: "TypeCheckContext", value: object) -> TypeCheck:
if not isinstance(value, expected_python_type):
return TypeCheck(
success=False,
description=(
f"Value of type {type(value)} failed type check for Dagster type"
f" {dagster_type_name}, expected value to be of Python type"
f" {expected_python_type_str}."
),
)
return TypeCheck(success=True)
return type_check
@public
| _Nothing |
python | huggingface__transformers | src/transformers/models/perceiver/modeling_perceiver.py | {
"start": 86778,
"end": 88287
} | class ____(PerceiverAbstractDecoder):
"""Cross-attention based optical flow decoder."""
def __init__(self, config, output_image_shape, output_num_channels=2, rescale_factor=100.0, **decoder_kwargs):
super().__init__()
self.output_image_shape = output_image_shape
self.output_num_channels = output_num_channels
self.rescale_factor = rescale_factor
self.decoder = PerceiverBasicDecoder(config, output_num_channels=output_num_channels, **decoder_kwargs)
@property
def num_query_channels(self) -> int:
return self.decoder.num_query_channels
def decoder_query(self, inputs, modality_sizes=None, inputs_without_pos=None, subsampled_points=None):
if subsampled_points is not None:
raise ValueError("FlowDecoder doesn't support subsampling yet.")
return inputs
def forward(
self,
query: torch.Tensor,
z: torch.FloatTensor,
query_mask: Optional[torch.FloatTensor] = None,
output_attentions: Optional[bool] = False,
) -> PerceiverDecoderOutput:
decoder_outputs = self.decoder(query, z, output_attentions=output_attentions)
preds = decoder_outputs.logits
# Output flow and rescale.
preds /= self.rescale_factor
preds = preds.reshape([preds.shape[0]] + list(self.output_image_shape) + [preds.shape[-1]])
return PerceiverDecoderOutput(logits=preds, cross_attentions=decoder_outputs.cross_attentions)
| PerceiverOpticalFlowDecoder |
python | rapidsai__cudf | python/cudf/cudf/core/udf/masked_typing.py | {
"start": 19746,
"end": 19974
} | class ____(AbstractTemplate):
key = "MaskedType.count"
def generic(self, args, kws):
return nb_signature(
MaskedType(size_type), MaskedType(string_view), recvr=self.this
)
| MaskedStringViewCount |
python | google__jax | jax/_src/numpy/array_api_metadata.py | {
"start": 1596,
"end": 3617
} | class ____:
"""Metadata for the `Python array API`_
.. _Python array API: https://data-apis.org/array-api/
"""
_capabilities = {
"boolean indexing": False, # within transformations
"data-dependent shapes": False, # within transformations
"max dimensions": 64, # XLA limitation
}
def _build_dtype_dict(self):
array_api_types = {
"bool", "int8", "int16",
"int32", "uint8", "uint16",
"uint32", "float32", "complex64"
}
if config.enable_x64.value:
array_api_types |= {"int64", "uint64", "float64", "complex128"}
return {category: {t.name: t for t in types if t.name in array_api_types}
for category, types in _dtypes._dtype_kinds.items()}
def default_device(self):
# By default JAX arrays are uncommitted (device=None), meaning that
# JAX is free to choose the most efficient device placement.
return None
def devices(self):
out = [None] # None indicates "uncommitted"
for backend in xb.backends():
out.extend(xb.devices(backend))
return out
def capabilities(self):
return self._capabilities
def default_dtypes(self, *, device: xc.Device | Sharding | None = None):
# Array API supported dtypes are device-independent in JAX
del device
return {
"real floating": _dtypes.default_float_dtype(),
"complex floating": _dtypes.default_complex_dtype(),
"integral": _dtypes.default_int_dtype(),
"indexing": _dtypes.default_int_dtype(),
}
def dtypes(
self, *,
device: xc.Device | Sharding | None = None,
kind: str | tuple[str, ...] | None = None):
# Array API supported dtypes are device-independent in JAX
del device
data_types = self._build_dtype_dict()
if kind is None:
out_dict = data_types["numeric"] | data_types["bool"]
elif isinstance(kind, tuple):
out_dict = {}
for _kind in kind:
out_dict |= data_types[_kind]
else:
out_dict = data_types[kind]
return out_dict
| ArrayNamespaceInfo |
python | matplotlib__matplotlib | lib/matplotlib/tests/test_cbook.py | {
"start": 524,
"end": 2000
} | class ____:
def test_bad_first_arg(self):
with pytest.raises(ValueError):
delete_masked_points('a string', np.arange(1.0, 7.0))
def test_string_seq(self):
a1 = ['a', 'b', 'c', 'd', 'e', 'f']
a2 = [1, 2, 3, np.nan, np.nan, 6]
result1, result2 = delete_masked_points(a1, a2)
ind = [0, 1, 2, 5]
assert_array_equal(result1, np.array(a1)[ind])
assert_array_equal(result2, np.array(a2)[ind])
def test_datetime(self):
dates = [datetime(2008, 1, 1), datetime(2008, 1, 2),
datetime(2008, 1, 3), datetime(2008, 1, 4),
datetime(2008, 1, 5), datetime(2008, 1, 6)]
a_masked = np.ma.array([1, 2, 3, np.nan, np.nan, 6],
mask=[False, False, True, True, False, False])
actual = delete_masked_points(dates, a_masked)
ind = [0, 1, 5]
assert_array_equal(actual[0], np.array(dates)[ind])
assert_array_equal(actual[1], a_masked[ind].compressed())
def test_rgba(self):
a_masked = np.ma.array([1, 2, 3, np.nan, np.nan, 6],
mask=[False, False, True, True, False, False])
a_rgba = mcolors.to_rgba_array(['r', 'g', 'b', 'c', 'm', 'y'])
actual = delete_masked_points(a_masked, a_rgba)
ind = [0, 1, 5]
assert_array_equal(actual[0], a_masked[ind].compressed())
assert_array_equal(actual[1], a_rgba[ind])
| Test_delete_masked_points |
python | getsentry__sentry | src/sentry/testutils/helpers/apigateway.py | {
"start": 1334,
"end": 4428
} | class ____(Endpoint):
permission_classes: tuple[type[BasePermission], ...] = (AllowAny,)
def get(self, request: Request) -> Response:
return Response({"proxy": False})
urlpatterns = [
re_path(
r"^organizations/(?P<organization_slug>[^/]+)/control/$",
ControlEndpoint.as_view(),
name="control-endpoint",
),
re_path(
r"^organizations/(?P<organization_slug>[^/]+)/region/$",
RegionEndpoint.as_view(),
name="region-endpoint",
),
re_path(
r"^organizations/(?P<organization_id_or_slug>[^/]+)/control/$",
ControlEndpoint.as_view(),
name="control-endpoint-id-or-slug",
),
re_path(
r"^organizations/(?P<organization_id_or_slug>[^/]+)/region/$",
RegionEndpoint.as_view(),
name="region-endpoint-id-or-slug",
),
re_path(
r"^api/embed/error-page/$",
RegionEndpoint.as_view(),
name="sentry-error-page-embed",
),
] + api_urls.urlpatterns
def verify_request_body(body, headers):
"""Wrapper for a callback function for responses.add_callback"""
def request_callback(request):
if request.headers.get("content-type") == "application/json":
assert json.load(request.body) == body
else:
assert request.body.read() == body
assert (request.headers[key] == headers[key] for key in headers)
return 200, {}, json.dumps({"proxy": True})
return request_callback
def verify_request_headers(headers):
"""Wrapper for a callback function for responses.add_callback"""
def request_callback(request):
assert (request.headers[key] == headers[key] for key in headers)
return 200, {}, json.dumps({"proxy": True})
return request_callback
def verify_request_params(params, headers):
"""Wrapper for a callback function for responses.add_callback"""
def request_callback(request):
request_params = parse_qs(request.url.split("?")[1])
assert (request.headers[key] == headers[key] for key in headers)
for key in params:
assert key in request_params
if len(request_params[key]) > 1:
assert request_params[key] == params[key]
else:
assert request_params[key][0] == params[key]
return 200, {}, json.dumps({"proxy": True})
return request_callback
def verify_file_body(file_body, headers):
"""Wrapper for a callback function for responses.add_callback"""
def request_callback(request):
assert file_body in request.body.read()
assert (request.headers[key] == headers[key] for key in headers)
return 200, {}, json.dumps({"proxy": True})
return request_callback
def provision_middleware():
middleware = list(settings.MIDDLEWARE)
if "sentry.hybridcloud.apigateway.middleware.ApiGatewayMiddleware" not in middleware:
middleware = ["sentry.hybridcloud.apigateway.middleware.ApiGatewayMiddleware"] + middleware
return middleware
@override_settings(ROOT_URLCONF=__name__)
| NoOrgRegionEndpoint |
python | pytest-dev__pytest | src/_pytest/fixtures.py | {
"start": 43827,
"end": 47196
} | class ____(FixtureDef[FixtureRequest]):
"""A custom FixtureDef for the special "request" fixture.
A new one is generated on-demand whenever "request" is requested.
"""
def __init__(self, request: FixtureRequest) -> None:
super().__init__(
config=request.config,
baseid=None,
argname="request",
func=lambda: request,
scope=Scope.Function,
params=None,
_ispytest=True,
)
self.cached_result = (request, [0], None)
def addfinalizer(self, finalizer: Callable[[], object]) -> None:
pass
def resolve_fixture_function(
fixturedef: FixtureDef[FixtureValue], request: FixtureRequest
) -> _FixtureFunc[FixtureValue]:
"""Get the actual callable that can be called to obtain the fixture
value."""
fixturefunc = fixturedef.func
# The fixture function needs to be bound to the actual
# request.instance so that code working with "fixturedef" behaves
# as expected.
instance = request.instance
if instance is not None:
# Handle the case where fixture is defined not in a test class, but some other class
# (for example a plugin class with a fixture), see #2270.
if hasattr(fixturefunc, "__self__") and not isinstance(
instance,
fixturefunc.__self__.__class__,
):
return fixturefunc
fixturefunc = getimfunc(fixturedef.func)
if fixturefunc != fixturedef.func:
fixturefunc = fixturefunc.__get__(instance)
return fixturefunc
def pytest_fixture_setup(
fixturedef: FixtureDef[FixtureValue], request: SubRequest
) -> FixtureValue:
"""Execution of fixture setup."""
kwargs = {}
for argname in fixturedef.argnames:
kwargs[argname] = request.getfixturevalue(argname)
fixturefunc = resolve_fixture_function(fixturedef, request)
my_cache_key = fixturedef.cache_key(request)
if inspect.isasyncgenfunction(fixturefunc) or inspect.iscoroutinefunction(
fixturefunc
):
auto_str = " with autouse=True" if fixturedef._autouse else ""
warnings.warn(
PytestRemovedIn9Warning(
f"{request.node.name!r} requested an async fixture "
f"{request.fixturename!r}{auto_str}, with no plugin or hook that "
"handled it. This is usually an error, as pytest does not natively "
"support it. "
"This will turn into an error in pytest 9.\n"
"See: https://docs.pytest.org/en/stable/deprecations.html#sync-test-depending-on-async-fixture"
),
# no stacklevel will point at users code, so we just point here
stacklevel=1,
)
try:
result = call_fixture_func(fixturefunc, request, kwargs)
except TEST_OUTCOME as e:
if isinstance(e, skip.Exception):
# The test requested a fixture which caused a skip.
# Don't show the fixture as the skip location, as then the user
# wouldn't know which test skipped.
e._use_item_location = True
fixturedef.cached_result = (None, my_cache_key, (e, e.__traceback__))
raise
fixturedef.cached_result = (result, my_cache_key, None)
return result
@final
@dataclasses.dataclass(frozen=True)
| RequestFixtureDef |
python | huggingface__transformers | src/transformers/time_series_utils.py | {
"start": 5790,
"end": 6190
} | class ____(DistributionOutput):
"""
Normal distribution output class.
"""
args_dim: dict[str, int] = {"loc": 1, "scale": 1}
distribution_class: type = Normal
@classmethod
def domain_map(cls, loc: torch.Tensor, scale: torch.Tensor):
scale = cls.squareplus(scale).clamp_min(torch.finfo(scale.dtype).eps)
return loc.squeeze(-1), scale.squeeze(-1)
| NormalOutput |
python | cherrypy__cherrypy | cherrypy/_cptools.py | {
"start": 11487,
"end": 14174
} | class ____(object):
"""A Controller (page handler collection) for XML-RPC.
To use it, have your controllers subclass this base class (it will
turn on the tool for you).
You can also supply the following optional config entries::
tools.xmlrpc.encoding: 'utf-8'
tools.xmlrpc.allow_none: 0
XML-RPC is a rather discontinuous layer over HTTP; dispatching to the
appropriate handler must first be performed according to the URL, and
then a second dispatch step must take place according to the RPC method
specified in the request body. It also allows a superfluous "/RPC2"
prefix in the URL, supplies its own handler args in the body, and
requires a 200 OK "Fault" response instead of 404 when the desired
method is not found.
Therefore, XML-RPC cannot be implemented for CherryPy via a Tool alone.
This Controller acts as the dispatch target for the first half (based
on the URL); it then reads the RPC method from the request body and
does its own second dispatch step based on that method. It also reads
body params, and returns a Fault on error.
The XMLRPCDispatcher strips any /RPC2 prefix; if you aren't using /RPC2
in your URL's, you can safely skip turning on the XMLRPCDispatcher.
Otherwise, you need to use declare it in config::
request.dispatch: cherrypy.dispatch.XMLRPCDispatcher()
"""
# Note we're hard-coding this into the 'tools' namespace. We could do
# a huge amount of work to make it relocatable, but the only reason why
# would be if someone actually disabled the default_toolbox. Meh.
_cp_config = {'tools.xmlrpc.on': True}
@expose
def default(self, *vpath, **params):
"""Process the unhandled XML-RPC methods."""
rpcparams, rpcmethod = _xmlrpc.process_body()
subhandler = self
for attr in str(rpcmethod).split('.'):
subhandler = getattr(subhandler, attr, None)
if subhandler and getattr(subhandler, 'exposed', False):
body = subhandler(*(vpath + rpcparams), **params)
else:
# https://github.com/cherrypy/cherrypy/issues/533
# if a method is not found, an xmlrpclib.Fault should be returned
# raising an exception here will do that; see
# cherrypy.lib.xmlrpcutil.on_error
raise Exception('method "%s" is not supported' % attr)
conf = cherrypy.serving.request.toolmaps['tools'].get('xmlrpc', {})
_xmlrpc.respond(
body,
conf.get('encoding', 'utf-8'),
conf.get('allow_none', 0),
)
return cherrypy.serving.response.body
| XMLRPCController |
python | apache__airflow | providers/google/src/airflow/providers/google/cloud/links/cloud_functions.py | {
"start": 1239,
"end": 1480
} | class ____(BaseGoogleLink):
"""Helper class for constructing Cloud Functions Details Link."""
name = "Cloud Functions Details"
key = "cloud_functions_details"
format_str = CLOUD_FUNCTIONS_DETAILS_LINK
| CloudFunctionsDetailsLink |
python | pytorch__pytorch | torch/autograd/profiler_util.py | {
"start": 39132,
"end": 39472
} | class ____(defaultdict):
def __missing__(self, key):
# manage cases like 't' (demangled to 'unsigned short') separately,
# for now simply check the length to avoid unexpected results for
# the short sequences
self[key] = torch._C._demangle(key) if len(key) > 1 else key
return self[key]
| StringTable |
python | keras-team__keras | keras/src/ops/numpy.py | {
"start": 179393,
"end": 180422
} | class ____(Operation):
def __init__(self, shift, axis=None, *, name=None):
super().__init__(name=name)
self.shift = shift
self.axis = axis
def call(self, x):
return backend.numpy.roll(x, self.shift, self.axis)
def compute_output_spec(self, x):
return KerasTensor(x.shape, dtype=x.dtype)
@keras_export(["keras.ops.roll", "keras.ops.numpy.roll"])
def roll(x, shift, axis=None):
"""Roll tensor elements along a given axis.
Elements that roll beyond the last position are re-introduced at the first.
Args:
x: Input tensor.
shift: The number of places by which elements are shifted.
axis: The axis along which elements are shifted. By default, the
array is flattened before shifting, after which the original
shape is restored.
Returns:
Output tensor.
"""
if any_symbolic_tensors((x,)):
return Roll(shift, axis=axis).symbolic_call(x)
return backend.numpy.roll(x, shift, axis=axis)
| Roll |
python | django__django | tests/backends/oracle/test_introspection.py | {
"start": 222,
"end": 3638
} | class ____(TransactionTestCase):
available_apps = []
def test_get_sequences(self):
with connection.cursor() as cursor:
seqs = connection.introspection.get_sequences(
cursor, Square._meta.db_table, Square._meta.local_fields
)
self.assertEqual(len(seqs), 1)
self.assertIsNotNone(seqs[0]["name"])
self.assertEqual(seqs[0]["table"], Square._meta.db_table)
self.assertEqual(seqs[0]["column"], "id")
def test_get_sequences_manually_created_index(self):
with connection.cursor() as cursor:
with connection.schema_editor() as editor:
editor._drop_identity(Square._meta.db_table, "id")
seqs = connection.introspection.get_sequences(
cursor, Square._meta.db_table, Square._meta.local_fields
)
self.assertEqual(
seqs, [{"table": Square._meta.db_table, "column": "id"}]
)
# Recreate model, because adding identity is impossible.
editor.delete_model(Square)
editor.create_model(Square)
@skipUnlessDBFeature("supports_collation_on_charfield")
def test_get_table_description_view_default_collation(self):
person_table = connection.introspection.identifier_converter(
Person._meta.db_table
)
first_name_column = connection.ops.quote_name(
Person._meta.get_field("first_name").column
)
person_view = connection.introspection.identifier_converter("TEST_PERSON_VIEW")
with connection.cursor() as cursor:
cursor.execute(
f"CREATE VIEW {person_view} "
f"AS SELECT {first_name_column} FROM {person_table}"
)
try:
columns = connection.introspection.get_table_description(
cursor, person_view
)
self.assertEqual(len(columns), 1)
self.assertIsNone(columns[0].collation)
finally:
cursor.execute(f"DROP VIEW {person_view}")
@skipUnlessDBFeature("supports_collation_on_charfield")
def test_get_table_description_materialized_view_non_default_collation(self):
person_table = connection.introspection.identifier_converter(
Person._meta.db_table
)
first_name_column = connection.ops.quote_name(
Person._meta.get_field("first_name").column
)
person_mview = connection.introspection.identifier_converter(
"TEST_PERSON_MVIEW"
)
collation = connection.features.test_collations.get("ci")
with connection.cursor() as cursor:
cursor.execute(
f"CREATE MATERIALIZED VIEW {person_mview} "
f"DEFAULT COLLATION {collation} "
f"AS SELECT {first_name_column} FROM {person_table}"
)
try:
columns = connection.introspection.get_table_description(
cursor, person_mview
)
self.assertEqual(len(columns), 1)
self.assertIsNotNone(columns[0].collation)
self.assertNotEqual(columns[0].collation, collation)
finally:
cursor.execute(f"DROP MATERIALIZED VIEW {person_mview}")
| DatabaseSequenceTests |
python | ZoranPandovski__al-go-rithms | data_structures/b_tree/Python/b_tree.py | {
"start": 674,
"end": 3364
} | class ____(object):
def __init__(self, t):
self.root = BTreeNode(leaf=True)
self.t = t
def search(self, k, x=None):
"""Search the B-Tree for the key k.
args
=====================
k : Key to search for
x : (optional) Node at which to begin search. Can be None, in which case the entire tree is searched.
"""
if isinstance(x, BTreeNode):
i = 0
while i < len(x.keys) and k > x.keys[i]: # look for index of k
i += 1
if i < len(x.keys) and k == x.keys[i]: # found exact match
return (x, i)
elif x.leaf: # no match in keys, and is leaf ==> no match exists
return None
else: # search children
return self.search(k, x.c[i])
else: # no node provided, search root of tree
return self.search(k, self.root)
def insert(self, k):
r = self.root
if len(r.keys) == (2*self.t) - 1: # keys are full, so we must split
s = BTreeNode()
self.root = s
s.c.insert(0, r) # former root is now 0th child of new root s
self._split_child(s, 0)
self._insert_nonfull(s, k)
else:
self._insert_nonfull(r, k)
def _insert_nonfull(self, x, k):
i = len(x.keys) - 1
if x.leaf:
# insert a key
x.keys.append(0)
while i >= 0 and k < x.keys[i]:
x.keys[i+1] = x.keys[i]
i -= 1
x.keys[i+1] = k
else:
# insert a child
while i >= 0 and k < x.keys[i]:
i -= 1
i += 1
if len(x.c[i].keys) == (2*self.t) - 1:
self._split_child(x, i)
if k > x.keys[i]:
i += 1
self._insert_nonfull(x.c[i], k)
def _split_child(self, x, i):
t = self.t
y = x.c[i]
z = BTreeNode(leaf=y.leaf)
# slide all children of x to the right and insert z at i+1.
x.c.insert(i+1, z)
x.keys.insert(i, y.keys[t-1])
# keys of z are t to 2t - 1,
# y is then 0 to t-2
z.keys = y.keys[t:(2*t - 1)]
y.keys = y.keys[0:(t-1)]
# children of z are t to 2t els of y.c
if not y.leaf:
z.c = y.c[t:(2*t)]
y.c = y.c[0:(t-1)]
def __str__(self):
r = self.root
return r.__str__() + '\n'.join([child.__str__() for child in r.c])
| BTree |
python | huggingface__transformers | src/transformers/models/glm4v/modular_glm4v.py | {
"start": 33307,
"end": 38055
} | class ____(Glm4vPreTrainedModel):
config: Glm4vVisionConfig
input_modalities = ("image", "video")
_no_split_modules = ["Glm4vVisionBlock"]
def __init__(self, config) -> None:
super().__init__(config)
self.spatial_merge_size = config.spatial_merge_size
self.patch_size = config.patch_size
self.embeddings = Glm4vVisionEmbeddings(config)
self.patch_embed = Glm4vVisionPatchEmbed(config)
head_dim = config.hidden_size // config.num_heads
self.rotary_pos_emb = Glm4vVisionRotaryEmbedding(head_dim // 2)
self.blocks = nn.ModuleList([Glm4vVisionBlock(config) for _ in range(config.depth)])
self.merger = Glm4vVisionPatchMerger(
dim=config.out_hidden_size, context_dim=config.intermediate_size, hidden_act=config.hidden_act
)
self.post_conv_layernorm = Glm4vRMSNorm(config.hidden_size, eps=config.rms_norm_eps)
self.downsample = nn.Conv2d(
in_channels=config.hidden_size,
out_channels=config.out_hidden_size,
kernel_size=config.spatial_merge_size,
stride=config.spatial_merge_size,
)
self.post_layernorm = Glm4vRMSNorm(config.hidden_size, eps=config.rms_norm_eps)
self.gradient_checkpointing = False
self.post_init()
def rot_pos_emb(self, grid_thw):
pos_ids = []
for t, h, w in grid_thw:
hpos_ids = torch.arange(h).unsqueeze(1).expand(-1, w)
hpos_ids = hpos_ids.reshape(
h // self.spatial_merge_size,
self.spatial_merge_size,
w // self.spatial_merge_size,
self.spatial_merge_size,
)
hpos_ids = hpos_ids.permute(0, 2, 1, 3)
hpos_ids = hpos_ids.flatten()
wpos_ids = torch.arange(w).unsqueeze(0).expand(h, -1)
wpos_ids = wpos_ids.reshape(
h // self.spatial_merge_size,
self.spatial_merge_size,
w // self.spatial_merge_size,
self.spatial_merge_size,
)
wpos_ids = wpos_ids.permute(0, 2, 1, 3)
wpos_ids = wpos_ids.flatten()
pos_ids.append(torch.stack([hpos_ids, wpos_ids], dim=-1).repeat(t, 1))
pos_ids = torch.cat(pos_ids, dim=0)
max_grid_size = grid_thw[:, 1:].max()
rotary_pos_emb_full = self.rotary_pos_emb(max_grid_size)
rotary_pos_emb = rotary_pos_emb_full[pos_ids].flatten(1)
return rotary_pos_emb, pos_ids
def forward(self, hidden_states: torch.Tensor, grid_thw: torch.Tensor) -> torch.Tensor:
"""
Args:
hidden_states (`torch.Tensor` of shape `(seq_len, hidden_size)`):
The final hidden states of the model.
grid_thw (`torch.Tensor` of shape `(num_images_or_videos, 3)`):
The temporal, height and width of feature shape of each image in LLM.
Returns:
`torch.Tensor`: hidden_states.
"""
hidden_states = self.patch_embed(hidden_states)
hidden_states = self.post_conv_layernorm(hidden_states)
rotary_pos_emb, image_type_ids = self.rot_pos_emb(grid_thw)
emb = torch.cat((rotary_pos_emb, rotary_pos_emb), dim=-1)
position_embeddings = (emb.cos(), emb.sin())
cu_seqlens = torch.repeat_interleave(grid_thw[:, 1] * grid_thw[:, 2], grid_thw[:, 0]).cumsum(
dim=0,
# Select dtype based on the following factors:
# - FA2 requires that cu_seqlens_q must have dtype int32
# - torch.onnx.export requires that cu_seqlens_q must have same dtype as grid_thw
# See https://github.com/huggingface/transformers/pull/34852 for more information
dtype=grid_thw.dtype if torch.jit.is_tracing() else torch.int32,
)
cu_seqlens = F.pad(cu_seqlens, (1, 0), value=0)
seqlens = (cu_seqlens[1:] - cu_seqlens[:-1]).tolist()
hidden_states = self.embeddings(hidden_states, seqlens, grid_thw, image_type_ids[:, 0], image_type_ids[:, 1])
for blk in self.blocks:
hidden_states = blk(
hidden_states,
cu_seqlens=cu_seqlens,
position_embeddings=position_embeddings,
)
hidden_states = self.post_layernorm(hidden_states)
hidden_states = hidden_states.view(
-1, self.spatial_merge_size, self.spatial_merge_size, hidden_states.shape[-1]
)
hidden_states = hidden_states.permute(0, 3, 1, 2)
hidden_states = self.downsample(hidden_states).view(-1, self.config.out_hidden_size)
hidden_states = self.merger(hidden_states)
return hidden_states
| Glm4vVisionModel |
python | run-llama__llama_index | llama-index-integrations/tools/llama-index-tools-google/llama_index/tools/google/gmail/base.py | {
"start": 361,
"end": 9618
} | class ____(BaseToolSpec):
"""
GMail tool spec.
Gives the agent the ability to read, draft and send gmail messages
"""
spec_functions = [
"load_data",
"search_messages",
"create_draft",
"update_draft",
"get_draft",
"send_draft",
]
query: str = None
use_iterative_parser: bool = False
max_results: int = 10
service: Any = None
def _cache_service(self) -> None:
from googleapiclient.discovery import build
credentials = self._get_credentials()
if not self.service:
self.service = build("gmail", "v1", credentials=credentials)
def load_data(self) -> List[Document]:
"""Load emails from the user's account."""
self._cache_service()
return self.search_messages(query="")
def _get_credentials(self) -> Any:
"""
Get valid user credentials from storage.
The file token.json stores the user's access and refresh tokens, and is
created automatically when the authorization flow completes for the first
time.
Returns:
Credentials, the obtained credential.
"""
import os
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import InstalledAppFlow
creds = None
if os.path.exists("token.json"):
creds = Credentials.from_authorized_user_file("token.json", SCOPES)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
"credentials.json", SCOPES
)
creds = flow.run_local_server(port=8080)
# Save the credentials for the next run
with open("token.json", "w") as token:
token.write(creds.to_json())
return creds
def search_messages(self, query: str, max_results: Optional[int] = None):
"""
Searches email messages given a query string and the maximum number
of results requested by the user
Returns: List of relevant message objects up to the maximum number of results.
Args:
query (str): The user's query
max_results (Optional[int]): The maximum number of search results
to return.
"""
if not max_results:
max_results = self.max_results
self._cache_service()
messages = (
self.service.users()
.messages()
.list(userId="me", q=query or None, maxResults=int(max_results))
.execute()
.get("messages", [])
)
results = []
try:
for message in messages:
message_data = self.get_message_data(message)
text = message_data.pop("body")
metadata = message_data
results.append(Document(text=text, metadata=metadata))
except Exception as e:
raise Exception("Can't get message data" + str(e))
return results
def get_message_data(self, message):
message_id = message["id"]
message_data = (
self.service.users()
.messages()
.get(format="raw", userId="me", id=message_id)
.execute()
)
if self.use_iterative_parser:
body = self.extract_message_body_iterative(message_data)
else:
body = self.extract_message_body(message_data)
if not body:
return None
return {
"id": message_data["id"],
"threadId": message_data["threadId"],
"snippet": message_data["snippet"],
"body": body,
}
def extract_message_body_iterative(self, message: dict):
if message["raw"]:
body = base64.urlsafe_b64decode(message["raw"].encode("utf8"))
mime_msg = email.message_from_bytes(body)
else:
mime_msg = message
body_text = ""
if mime_msg.get_content_type() == "text/plain":
plain_text = mime_msg.get_payload(decode=True)
charset = mime_msg.get_content_charset("utf-8")
body_text = plain_text.decode(charset).encode("utf-8").decode("utf-8")
elif mime_msg.get_content_maintype() == "multipart":
msg_parts = mime_msg.get_payload()
for msg_part in msg_parts:
body_text += self.extract_message_body_iterative(msg_part)
return body_text
def extract_message_body(self, message: dict):
from bs4 import BeautifulSoup
try:
body = base64.urlsafe_b64decode(message["raw"].encode("utf-8"))
mime_msg = email.message_from_bytes(body)
# If the message body contains HTML, parse it with BeautifulSoup
if "text/html" in mime_msg:
soup = BeautifulSoup(body, "html.parser")
body = soup.get_text()
return body.decode("utf-8")
except Exception as e:
raise Exception("Can't parse message body" + str(e))
def _build_draft(
self,
to: Optional[List[str]] = None,
subject: Optional[str] = None,
message: Optional[str] = None,
) -> str:
email_message = EmailMessage()
email_message.set_content(message)
email_message["To"] = to
email_message["Subject"] = subject
encoded_message = base64.urlsafe_b64encode(email_message.as_bytes()).decode()
return {"message": {"raw": encoded_message}}
def create_draft(
self,
to: Optional[List[str]] = None,
subject: Optional[str] = None,
message: Optional[str] = None,
) -> str:
"""
Create and insert a draft email.
Print the returned draft's message and id.
Returns: Draft object, including draft id and message meta data.
Args:
to (Optional[str]): The email addresses to send the message to
subject (Optional[str]): The subject for the event
message (Optional[str]): The message for the event
"""
self._cache_service()
service = self.service
return (
service.users()
.drafts()
.create(userId="me", body=self._build_draft(to, subject, message))
.execute()
)
def update_draft(
self,
to: Optional[List[str]] = None,
subject: Optional[str] = None,
message: Optional[str] = None,
draft_id: str = None,
) -> str:
"""
Update a draft email.
Print the returned draft's message and id.
This function is required to be passed a draft_id that is obtained when creating messages
Returns: Draft object, including draft id and message meta data.
Args:
to (Optional[str]): The email addresses to send the message to
subject (Optional[str]): The subject for the event
message (Optional[str]): The message for the event
draft_id (str): the id of the draft to be updated
"""
self._cache_service()
service = self.service
if draft_id is None:
return (
"You did not provide a draft id when calling this function. If you"
" previously created or retrieved the draft, the id is available in"
" context"
)
draft = self.get_draft(draft_id)
headers = draft["message"]["payload"]["headers"]
for header in headers:
if header["name"] == "To" and not to:
to = header["value"]
elif header["name"] == "Subject" and not subject:
subject = header["value"]
return (
service.users()
.drafts()
.update(
userId="me", id=draft_id, body=self._build_draft(to, subject, message)
)
.execute()
)
def get_draft(self, draft_id: str = None) -> str:
"""
Get a draft email.
Print the returned draft's message and id.
Returns: Draft object, including draft id and message meta data.
Args:
draft_id (str): the id of the draft to be updated
"""
self._cache_service()
service = self.service
return service.users().drafts().get(userId="me", id=draft_id).execute()
def send_draft(self, draft_id: str = None) -> str:
"""
Sends a draft email.
Print the returned draft's message and id.
Returns: Draft object, including draft id and message meta data.
Args:
draft_id (str): the id of the draft to be updated
"""
self._cache_service()
service = self.service
return (
service.users().drafts().send(userId="me", body={"id": draft_id}).execute()
)
| GmailToolSpec |
python | openai__openai-python | src/openai/resources/beta/chatkit/sessions.py | {
"start": 11201,
"end": 11509
} | class ____:
def __init__(self, sessions: Sessions) -> None:
self._sessions = sessions
self.create = to_streamed_response_wrapper(
sessions.create,
)
self.cancel = to_streamed_response_wrapper(
sessions.cancel,
)
| SessionsWithStreamingResponse |
python | Pylons__pyramid | src/pyramid/predicates.py | {
"start": 7558,
"end": 7836
} | class ____:
def __init__(self, val, config):
self.val = val
def text(self):
return f"is_authenticated = {self.val!r}"
phash = text
def __call__(self, context, request):
return request.is_authenticated == self.val
| IsAuthenticatedPredicate |
python | getsentry__sentry | src/sentry/organizations/services/organization/impl.py | {
"start": 32120,
"end": 32976
} | class ____(OrganizationSignalService):
def schedule_signal(
self, signal: Signal, organization_id: int, args: Mapping[str, str | int | None]
) -> None:
with outbox_context(flush=False):
payload: Any = {
"args": args,
"signal": int(RpcOrganizationSignal.from_signal(signal)),
}
for region_name in find_regions_for_orgs([organization_id]):
ControlOutbox(
shard_scope=OutboxScope.ORGANIZATION_SCOPE,
shard_identifier=organization_id,
region_name=region_name,
category=OutboxCategory.SEND_SIGNAL,
object_identifier=ControlOutbox.next_object_identifier(),
payload=payload,
).save()
| OutboxBackedOrganizationSignalService |
python | HypothesisWorks__hypothesis | hypothesis-python/src/hypothesis/internal/conjecture/providers.py | {
"start": 25263,
"end": 36464
} | class ____(PrimitiveProvider):
lifetime = "test_case"
def __init__(self, conjecturedata: Optional["ConjectureData"], /):
super().__init__(conjecturedata)
self._random = None if self._cd is None else self._cd._random
@cached_property
def _local_constants(self):
# defer computation of local constants until/if we need it
return _get_local_constants()
def _maybe_draw_constant(
self,
choice_type: ChoiceTypeT,
constraints: ChoiceConstraintsT,
*,
p: float = 0.05,
) -> Optional["ConstantT"]:
assert self._random is not None
assert choice_type != "boolean"
# check whether we even want a constant before spending time computing
# and caching the allowed constants.
if self._random.random() > p:
return None
# note: this property access results in computation being done
assert self._local_constants is not None
key = (choice_type, choice_constraints_key(choice_type, constraints))
if key not in CONSTANTS_CACHE:
CONSTANTS_CACHE[key] = (
tuple(
choice
for choice in GLOBAL_CONSTANTS.set_for_type(choice_type)
if choice_permitted(choice, constraints)
),
tuple(
choice
for choice in self._local_constants.set_for_type(choice_type)
if choice_permitted(choice, constraints)
),
)
# split constants into two pools, so we still have a good chance to draw
# global constants even if there are many local constants.
(global_constants, local_constants) = CONSTANTS_CACHE[key]
constants_lists = ([global_constants] if global_constants else []) + (
[local_constants] if local_constants else []
)
if not constants_lists:
return None
# At this point, we've decided to use a constant. Now we select which pool
# to draw that constant from.
#
# Note that this approach has a different probability distribution than
# attempting a random.random for both global_constants and local_constants.
constants = self._random.choice(constants_lists)
return self._random.choice(constants)
def draw_boolean(
self,
p: float = 0.5,
) -> bool:
assert self._random is not None
if p <= 0:
return False
if p >= 1:
return True
return self._random.random() < p
def draw_integer(
self,
min_value: int | None = None,
max_value: int | None = None,
*,
weights: dict[int, float] | None = None,
shrink_towards: int = 0,
) -> int:
assert self._cd is not None
if (
constant := self._maybe_draw_constant(
"integer",
{
"min_value": min_value,
"max_value": max_value,
"weights": weights,
"shrink_towards": shrink_towards,
},
)
) is not None:
assert isinstance(constant, int)
return constant
center = 0
if min_value is not None:
center = max(min_value, center)
if max_value is not None:
center = min(max_value, center)
if weights is not None:
assert min_value is not None
assert max_value is not None
# format of weights is a mapping of ints to p, where sum(p) < 1.
# The remaining probability mass is uniformly distributed over
# *all* ints (not just the unmapped ones; this is somewhat undesirable,
# but simplifies things).
#
# We assert that sum(p) is strictly less than 1 because it simplifies
# handling forced values when we can force into the unmapped probability
# mass. We should eventually remove this restriction.
sampler = Sampler(
[1 - sum(weights.values()), *weights.values()], observe=False
)
# if we're forcing, it's easiest to force into the unmapped probability
# mass and then force the drawn value after.
idx = sampler.sample(self._cd)
if idx == 0:
return self._draw_bounded_integer(min_value, max_value)
# implicit reliance on dicts being sorted for determinism
return list(weights)[idx - 1]
if min_value is None and max_value is None:
return self._draw_unbounded_integer()
if min_value is None:
assert max_value is not None
probe = max_value + 1
while max_value < probe:
probe = center + self._draw_unbounded_integer()
return probe
if max_value is None:
assert min_value is not None
probe = min_value - 1
while probe < min_value:
probe = center + self._draw_unbounded_integer()
return probe
return self._draw_bounded_integer(min_value, max_value)
def draw_float(
self,
*,
min_value: float = -math.inf,
max_value: float = math.inf,
allow_nan: bool = True,
smallest_nonzero_magnitude: float,
) -> float:
assert self._random is not None
constraints: FloatConstraints = {
"min_value": min_value,
"max_value": max_value,
"allow_nan": allow_nan,
"smallest_nonzero_magnitude": smallest_nonzero_magnitude,
}
if (
constant := self._maybe_draw_constant("float", constraints, p=0.15)
) is not None:
assert isinstance(constant, float)
return constant
# on top of the probability to draw a constant float, we independently
# upweight 0.0/-0.0, math.inf, -math.inf, nans, and boundary values.
weird_floats = [
f
for f in [
0.0,
-0.0,
math.inf,
-math.inf,
math.nan,
-math.nan,
SIGNALING_NAN,
-SIGNALING_NAN,
min_value,
next_up(min_value),
min_value + 1,
max_value - 1,
next_down(max_value),
max_value,
]
if choice_permitted(f, constraints)
]
if weird_floats and self._random.random() < 0.05:
return self._random.choice(weird_floats)
clamper = make_float_clamper(
min_value,
max_value,
smallest_nonzero_magnitude=smallest_nonzero_magnitude,
allow_nan=allow_nan,
)
result = self._draw_float()
if allow_nan and math.isnan(result):
clamped = result # pragma: no cover
else:
clamped = clamper(result)
if float_to_int(clamped) != float_to_int(result) and not (
math.isnan(result) and allow_nan
):
result = clamped
return result
def draw_string(
self,
intervals: IntervalSet,
*,
min_size: int = 0,
max_size: int = COLLECTION_DEFAULT_MAX_SIZE,
) -> str:
assert self._cd is not None
assert self._random is not None
if len(intervals) == 0:
return ""
if (
constant := self._maybe_draw_constant(
"string",
{"intervals": intervals, "min_size": min_size, "max_size": max_size},
)
) is not None:
assert isinstance(constant, str)
return constant
average_size = min(
max(min_size * 2, min_size + 5),
0.5 * (min_size + max_size),
)
chars = []
elements = many(
self._cd,
min_size=min_size,
max_size=max_size,
average_size=average_size,
observe=False,
)
while elements.more():
if len(intervals) > 256:
if self.draw_boolean(0.2):
i = self._random.randint(256, len(intervals) - 1)
else:
i = self._random.randint(0, 255)
else:
i = self._random.randint(0, len(intervals) - 1)
chars.append(intervals.char_in_shrink_order(i))
return "".join(chars)
def draw_bytes(
self,
min_size: int = 0,
max_size: int = COLLECTION_DEFAULT_MAX_SIZE,
) -> bytes:
assert self._cd is not None
assert self._random is not None
if (
constant := self._maybe_draw_constant(
"bytes", {"min_size": min_size, "max_size": max_size}
)
) is not None:
assert isinstance(constant, bytes)
return constant
buf = bytearray()
average_size = min(
max(min_size * 2, min_size + 5),
0.5 * (min_size + max_size),
)
elements = many(
self._cd,
min_size=min_size,
max_size=max_size,
average_size=average_size,
observe=False,
)
while elements.more():
buf += self._random.randbytes(1)
return bytes(buf)
def _draw_float(self) -> float:
assert self._random is not None
f = lex_to_float(self._random.getrandbits(64))
sign = 1 if self._random.getrandbits(1) else -1
return sign * f
def _draw_unbounded_integer(self) -> int:
assert self._cd is not None
assert self._random is not None
size = INT_SIZES[INT_SIZES_SAMPLER.sample(self._cd)]
r = self._random.getrandbits(size)
sign = r & 1
r >>= 1
if sign:
r = -r
return r
def _draw_bounded_integer(
self,
lower: int,
upper: int,
*,
vary_size: bool = True,
) -> int:
assert lower <= upper
assert self._cd is not None
assert self._random is not None
if lower == upper:
return lower
bits = (upper - lower).bit_length()
if bits > 24 and vary_size and self._random.random() < 7 / 8:
# For large ranges, we combine the uniform random distribution
# with a weighting scheme with moderate chance. Cutoff at 2 ** 24 so that our
# choice of unicode characters is uniform but the 32bit distribution is not.
idx = INT_SIZES_SAMPLER.sample(self._cd)
cap_bits = min(bits, INT_SIZES[idx])
upper = min(upper, lower + 2**cap_bits - 1)
return self._random.randint(lower, upper)
return self._random.randint(lower, upper)
# Masks for masking off the first byte of an n-bit buffer.
# The appropriate mask is stored at position n % 8.
BYTE_MASKS = [(1 << n) - 1 for n in range(8)]
BYTE_MASKS[0] = 255
| HypothesisProvider |
python | kamyu104__LeetCode-Solutions | Python/maximum-genetic-difference-query.py | {
"start": 3044,
"end": 3983
} | class ____(object):
def maxGeneticDifference(self, parents, queries):
"""
:type parents: List[int]
:type queries: List[List[int]]
:rtype: List[int]
"""
def dfs(adj, qs, node, trie, result):
trie.insert(node, 1)
for i, val in qs[node]:
result[i] = trie.query(val)
for child in adj[node]:
dfs(adj, qs, child, trie, result)
trie.insert(node, -1)
adj = collections.defaultdict(list)
for node, parent in enumerate(parents):
adj[parent].append(node)
qs = collections.defaultdict(list)
max_val = len(parents)-1
for i, (node, val) in enumerate(queries):
qs[node].append((i, val))
max_val = max(max_val, val)
result = [0]*len(queries)
dfs(adj, qs, adj[-1][0], Trie(max_val.bit_length()), result)
return result
| Solution2 |
python | sympy__sympy | sympy/matrices/sparse.py | {
"start": 14432,
"end": 14750
} | class ____(SparseRepMatrix, MutableRepMatrix):
@classmethod
def _new(cls, *args, **kwargs):
rows, cols, smat = cls._handle_creation_inputs(*args, **kwargs)
rep = cls._smat_to_DomainMatrix(rows, cols, smat)
return cls._fromrep(rep)
SparseMatrix = MutableSparseMatrix
| MutableSparseMatrix |
python | davidhalter__jedi | test/completion/pep0484_generic_mismatches.py | {
"start": 285,
"end": 4568
} | class ____(object):
pass
tpl = ("1", 2)
tpl_typed: Tuple[str, int] = ("2", 3)
collection = {"a": 1}
collection_typed: Dict[str, int] = {"a": 1}
list_of_ints: List[int] = [42]
list_of_funcs: List[Callable[[T], T]] = [foo]
custom_generic = CustomGeneric(123.45)
plain_instance = PlainClass()
# Test that simple parameters are handled
def list_t_to_list_t(the_list: List[T]) -> List[T]:
return the_list
x0 = list_t_to_list_t("abc")[0]
#?
x0
x1 = list_t_to_list_t(foo)[0]
#?
x1
x1 = list_t_to_list_t(typing)[0]
#?
x1
x2 = list_t_to_list_t(tpl)[0]
#?
x2
x3 = list_t_to_list_t(tpl_typed)[0]
#?
x3
x4 = list_t_to_list_t(collection)[0]
#?
x4
x5 = list_t_to_list_t(collection_typed)[0]
#?
x5
x6 = list_t_to_list_t(custom_generic)[0]
#?
x6
x7 = list_t_to_list_t(plain_instance)[0]
#?
x7
for a in list_t_to_list_t(12):
#?
a
# Test that simple parameters are handled
def list_type_t_to_list_t(the_list: List[Type[T]]) -> List[T]:
return [x() for x in the_list]
x0 = list_type_t_to_list_t("abc")[0]
#?
x0
x1 = list_type_t_to_list_t(foo)[0]
#?
x1
x2 = list_type_t_to_list_t(tpl)[0]
#?
x2
x3 = list_type_t_to_list_t(tpl_typed)[0]
#?
x3
x4 = list_type_t_to_list_t(collection)[0]
#?
x4
x5 = list_type_t_to_list_t(collection_typed)[0]
#?
x5
x6 = list_type_t_to_list_t(custom_generic)[0]
#?
x6
x7 = list_type_t_to_list_t(plain_instance)[0]
#?
x7
for a in list_type_t_to_list_t(12):
#?
a
x0 = list_type_t_to_list_t(["abc"])[0]
#?
x0
x1 = list_type_t_to_list_t([foo])[0]
#?
x1
x2 = list_type_t_to_list_t([tpl])[0]
#?
x2
x3 = list_type_t_to_list_t([tpl_typed])[0]
#?
x3
x4 = list_type_t_to_list_t([collection])[0]
#?
x4
x5 = list_type_t_to_list_t([collection_typed])[0]
#?
x5
x6 = list_type_t_to_list_t([custom_generic])[0]
#?
x6
x7 = list_type_t_to_list_t([plain_instance])[0]
#?
x7
for a in list_type_t_to_list_t([12]):
#?
a
def list_func_t_to_list_t(the_list: List[Callable[[T], T]]) -> List[T]:
# Not actually a viable signature, but should be enough to test our handling
# of the generic parameters.
pass
x0 = list_func_t_to_list_t("abc")[0]
#?
x0
x1 = list_func_t_to_list_t(foo)[0]
#?
x1
x2 = list_func_t_to_list_t(tpl)[0]
#?
x2
x3 = list_func_t_to_list_t(tpl_typed)[0]
#?
x3
x4 = list_func_t_to_list_t(collection)[0]
#?
x4
x5 = list_func_t_to_list_t(collection_typed)[0]
#?
x5
x6 = list_func_t_to_list_t(custom_generic)[0]
#?
x6
x7 = list_func_t_to_list_t(plain_instance)[0]
#?
x7
for a in list_func_t_to_list_t(12):
#?
a
x0 = list_func_t_to_list_t(["abc"])[0]
#?
x0
x2 = list_func_t_to_list_t([tpl])[0]
#?
x2
x3 = list_func_t_to_list_t([tpl_typed])[0]
#?
x3
x4 = list_func_t_to_list_t([collection])[0]
#?
x4
x5 = list_func_t_to_list_t([collection_typed])[0]
#?
x5
x6 = list_func_t_to_list_t([custom_generic])[0]
#?
x6
x7 = list_func_t_to_list_t([plain_instance])[0]
#?
x7
for a in list_func_t_to_list_t([12]):
#?
a
def tuple_t(tuple_in: Tuple[T]]) -> Sequence[T]:
return tuple_in
x0 = list_t_to_list_t("abc")[0]
#?
x0
x1 = list_t_to_list_t(foo)[0]
#?
x1
x2 = list_t_to_list_t(tpl)[0]
#?
x2
x3 = list_t_to_list_t(tpl_typed)[0]
#?
x3
x4 = list_t_to_list_t(collection)[0]
#?
x4
x5 = list_t_to_list_t(collection_typed)[0]
#?
x5
x6 = list_t_to_list_t(custom_generic)[0]
#?
x6
x7 = list_t_to_list_t(plain_instance)[0]
#?
x7
for a in list_t_to_list_t(12):
#?
a
def tuple_t_elipsis(tuple_in: Tuple[T, ...]]) -> Sequence[T]:
return tuple_in
x0 = list_t_to_list_t("abc")[0]
#?
x0
x1 = list_t_to_list_t(foo)[0]
#?
x1
x2 = list_t_to_list_t(tpl)[0]
#?
x2
x3 = list_t_to_list_t(tpl_typed)[0]
#?
x3
x4 = list_t_to_list_t(collection)[0]
#?
x4
x5 = list_t_to_list_t(collection_typed)[0]
#?
x5
x6 = list_t_to_list_t(custom_generic)[0]
#?
x6
x7 = list_t_to_list_t(plain_instance)[0]
#?
x7
for a in list_t_to_list_t(12):
#?
a
def list_tuple_t_to_tuple_list_t(the_list: List[Tuple[T]]) -> Tuple[List[T], ...]:
return tuple(list(x) for x in the_list)
for b in list_tuple_t_to_tuple_list_t(list_of_ints):
#?
b[0]
def list_tuple_t_elipsis_to_tuple_list_t(the_list: List[Tuple[T, ...]]) -> Tuple[List[T], ...]:
return tuple(list(x) for x in the_list)
for b in list_tuple_t_to_tuple_list_t(list_of_ints):
#?
b[0]
| PlainClass |
python | run-llama__llama_index | llama-index-integrations/readers/llama-index-readers-web/tests/test_zenrows_web.py | {
"start": 1084,
"end": 13031
} | class ____:
"""Test cases for ZenRowsWebReader."""
def test_init_with_api_key(self, api_key):
"""Test initialization with valid API key."""
reader = ZenRowsWebReader(api_key=api_key)
assert reader.api_key == api_key
assert reader.js_render is False
assert reader.premium_proxy is False
def test_init_without_api_key(self):
"""Test initialization without API key raises error."""
with pytest.raises(ValueError, match="ZenRows API key is required"):
ZenRowsWebReader(api_key="")
def test_init_with_custom_params(self, api_key):
"""Test initialization with custom parameters."""
custom_headers = {"User-Agent": "TestAgent"}
reader = ZenRowsWebReader(
api_key=api_key,
js_render=True,
premium_proxy=True,
proxy_country="US",
custom_headers=custom_headers,
wait=5000,
response_type="markdown",
)
assert reader.js_render is True
assert reader.premium_proxy is True
assert reader.proxy_country == "US"
assert reader.custom_headers == custom_headers
assert reader.wait == 5000
assert reader.response_type == "markdown"
@patch("requests.get")
def test_load_data_basic(self, mock_get, api_key, test_url, mock_html_response):
"""Test basic load_data functionality."""
# Mock response
mock_response = Mock()
mock_response.status_code = 200
mock_response.text = mock_html_response
mock_response.content = mock_html_response.encode()
mock_response.headers = {
"Content-Type": "text/html",
"X-Request-Cost": "1.0",
"X-Request-Id": "test_request_123",
}
mock_response.raise_for_status.return_value = None
mock_get.return_value = mock_response
reader = ZenRowsWebReader(api_key=api_key)
documents = reader.load_data(test_url)
assert len(documents) == 1
assert isinstance(documents[0], Document)
assert documents[0].text == mock_html_response
assert documents[0].metadata["source_url"] == test_url
assert documents[0].metadata["request_cost"] == 1.0
assert documents[0].metadata["request_id"] == "test_request_123"
@patch("requests.get")
def test_load_data_multiple_urls(self, mock_get, api_key, mock_html_response):
"""Test load_data with multiple URLs."""
urls = ["https://example1.com", "https://example2.com"]
mock_response = Mock()
mock_response.status_code = 200
mock_response.text = mock_html_response
mock_response.content = mock_html_response.encode()
mock_response.headers = {"Content-Type": "text/html"}
mock_response.raise_for_status.return_value = None
mock_get.return_value = mock_response
reader = ZenRowsWebReader(api_key=api_key)
documents = reader.load_data(urls)
assert len(documents) == 2
assert all(isinstance(doc, Document) for doc in documents)
assert documents[0].metadata["source_url"] == urls[0]
assert documents[1].metadata["source_url"] == urls[1]
@patch("requests.get")
def test_load_data_with_custom_headers(
self, mock_get, api_key, test_url, mock_html_response
):
"""Test load_data with custom headers."""
custom_headers = {"User-Agent": "TestAgent", "Authorization": "Bearer token"}
mock_response = Mock()
mock_response.status_code = 200
mock_response.text = mock_html_response
mock_response.content = mock_html_response.encode()
mock_response.headers = {"Content-Type": "text/html"}
mock_response.raise_for_status.return_value = None
mock_get.return_value = mock_response
# Test instance-level custom headers
reader = ZenRowsWebReader(api_key=api_key, custom_headers=custom_headers)
documents = reader.load_data(test_url)
# Verify request was made with custom headers
mock_get.assert_called_once()
call_args = mock_get.call_args
assert call_args[1]["headers"] == custom_headers
# Test per-request custom headers via extra_params
mock_get.reset_mock()
reader2 = ZenRowsWebReader(api_key=api_key)
per_request_headers = {"User-Agent": "PerRequestAgent"}
documents = reader2.load_data(
test_url, extra_params={"custom_headers": per_request_headers}
)
call_args = mock_get.call_args
assert call_args[1]["headers"] == per_request_headers
@patch("requests.get")
def test_load_data_with_js_render(
self, mock_get, api_key, test_url, mock_html_response
):
"""Test load_data with JavaScript rendering enabled."""
mock_response = Mock()
mock_response.status_code = 200
mock_response.text = mock_html_response
mock_response.content = mock_html_response.encode()
mock_response.headers = {"Content-Type": "text/html"}
mock_response.raise_for_status.return_value = None
mock_get.return_value = mock_response
reader = ZenRowsWebReader(
api_key=api_key, js_render=True, wait=3000, wait_for=".content"
)
documents = reader.load_data(test_url)
# Verify the request parameters include JS rendering options
call_args = mock_get.call_args
params = call_args[1]["params"]
assert params["js_render"] is True
assert params["wait"] == 3000
assert params["wait_for"] == ".content"
@patch("requests.get")
def test_load_data_with_premium_proxy(
self, mock_get, api_key, test_url, mock_html_response
):
"""Test load_data with premium proxy and geo-location."""
mock_response = Mock()
mock_response.status_code = 200
mock_response.text = mock_html_response
mock_response.content = mock_html_response.encode()
mock_response.headers = {"Content-Type": "text/html"}
mock_response.raise_for_status.return_value = None
mock_get.return_value = mock_response
reader = ZenRowsWebReader(
api_key=api_key, premium_proxy=True, proxy_country="GB"
)
documents = reader.load_data(test_url)
# Verify the request parameters include proxy options
call_args = mock_get.call_args
params = call_args[1]["params"]
assert params["premium_proxy"] is True
assert params["proxy_country"] == "GB"
@patch("requests.get")
def test_load_data_error_handling(self, mock_get, api_key, test_url):
"""Test error handling in load_data."""
# Mock a failed request
mock_get.side_effect = requests.exceptions.RequestException("Connection failed")
reader = ZenRowsWebReader(api_key=api_key)
documents = reader.load_data(test_url)
# Should return error document instead of raising exception
assert len(documents) == 1
assert "Error scraping" in documents[0].text
assert documents[0].metadata["status"] == "failed"
assert documents[0].metadata["source_url"] == test_url
@patch("requests.get")
def test_load_data_with_extra_params(
self, mock_get, api_key, test_url, mock_html_response
):
"""Test load_data with extra parameters."""
mock_response = Mock()
mock_response.status_code = 200
mock_response.text = mock_html_response
mock_response.content = mock_html_response.encode()
mock_response.headers = {"Content-Type": "text/html"}
mock_response.raise_for_status.return_value = None
mock_get.return_value = mock_response
reader = ZenRowsWebReader(api_key=api_key)
extra_params = {
"css_extractor": '{"title": "h1", "content": "p"}',
"autoparse": True,
"block_resources": "images,fonts",
}
documents = reader.load_data(test_url, extra_params=extra_params)
# Verify extra parameters were included in the request
call_args = mock_get.call_args
params = call_args[1]["params"]
assert params["css_extractor"] == '{"title": "h1", "content": "p"}'
assert params["autoparse"] is True
assert params["block_resources"] == "images,fonts"
def test_css_extractor_validation(self, api_key):
"""Test CSS extractor validation."""
# Valid JSON should work
reader = ZenRowsWebReader(
api_key=api_key, css_extractor='{"title": "h1", "content": "p"}'
)
assert reader.css_extractor == '{"title": "h1", "content": "p"}'
# Invalid JSON should raise error
with pytest.raises(ValueError, match="css_extractor must be valid JSON"):
ZenRowsWebReader(api_key=api_key, css_extractor="invalid json")
def test_proxy_country_validation(self, api_key):
"""Test proxy country validation."""
# Valid two-letter country code should work
reader = ZenRowsWebReader(api_key=api_key, proxy_country="US")
assert reader.proxy_country == "US"
# Invalid country code should raise error
with pytest.raises(
ValueError, match="proxy_country must be a two-letter country code"
):
ZenRowsWebReader(api_key=api_key, proxy_country="USA")
def test_class_name(self, api_key):
"""Test class name method."""
reader = ZenRowsWebReader(api_key=api_key)
assert reader.class_name() == "ZenRowsWebReader"
@patch("requests.get")
def test_metadata_extraction(self, mock_get, api_key, test_url, mock_html_response):
"""Test metadata extraction from response headers."""
mock_response = Mock()
mock_response.status_code = 200
mock_response.text = mock_html_response
mock_response.content = mock_html_response.encode()
mock_response.headers = {
"Content-Type": "text/html",
"X-Request-Cost": "2.5",
"X-Request-Id": "req_123456",
"Zr-Final-Url": "https://example.com/final",
"Concurrency-Remaining": "10",
"Concurrency-Limit": "100",
}
mock_response.raise_for_status.return_value = None
mock_get.return_value = mock_response
reader = ZenRowsWebReader(api_key=api_key, js_render=True)
documents = reader.load_data(test_url)
metadata = documents[0].metadata
assert metadata["request_cost"] == 2.5
assert metadata["request_id"] == "req_123456"
assert metadata["final_url"] == "https://example.com/final"
assert metadata["concurrency_remaining"] == 10
assert metadata["concurrency_limit"] == 100
assert metadata["status_code"] == 200
assert metadata["content_type"] == "text/html"
assert metadata["zenrows_config"]["js_render"] is True
@patch("requests.get")
def test_auto_js_render_enablement(
self, mock_get, api_key, test_url, mock_html_response
):
"""Test automatic JS render enablement for certain parameters."""
mock_response = Mock()
mock_response.status_code = 200
mock_response.text = mock_html_response
mock_response.content = mock_html_response.encode()
mock_response.headers = {"Content-Type": "text/html"}
mock_response.raise_for_status.return_value = None
mock_get.return_value = mock_response
# Test with screenshot parameter (should auto-enable js_render)
reader = ZenRowsWebReader(api_key=api_key, screenshot="true")
documents = reader.load_data(test_url)
call_args = mock_get.call_args
params = call_args[1]["params"]
assert params["js_render"] is True # Should be auto-enabled
assert params["screenshot"] == "true"
| TestZenRowsWebReader |
python | kubernetes-client__python | kubernetes/client/models/v1_api_versions.py | {
"start": 383,
"end": 8894
} | class ____(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'api_version': 'str',
'kind': 'str',
'server_address_by_client_cid_rs': 'list[V1ServerAddressByClientCIDR]',
'versions': 'list[str]'
}
attribute_map = {
'api_version': 'apiVersion',
'kind': 'kind',
'server_address_by_client_cid_rs': 'serverAddressByClientCIDRs',
'versions': 'versions'
}
def __init__(self, api_version=None, kind=None, server_address_by_client_cid_rs=None, versions=None, local_vars_configuration=None): # noqa: E501
"""V1APIVersions - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._api_version = None
self._kind = None
self._server_address_by_client_cid_rs = None
self._versions = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
if kind is not None:
self.kind = kind
self.server_address_by_client_cid_rs = server_address_by_client_cid_rs
self.versions = versions
@property
def api_version(self):
"""Gets the api_version of this V1APIVersions. # noqa: E501
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501
:return: The api_version of this V1APIVersions. # noqa: E501
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""Sets the api_version of this V1APIVersions.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501
:param api_version: The api_version of this V1APIVersions. # noqa: E501
:type: str
"""
self._api_version = api_version
@property
def kind(self):
"""Gets the kind of this V1APIVersions. # noqa: E501
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501
:return: The kind of this V1APIVersions. # noqa: E501
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""Sets the kind of this V1APIVersions.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501
:param kind: The kind of this V1APIVersions. # noqa: E501
:type: str
"""
self._kind = kind
@property
def server_address_by_client_cid_rs(self):
"""Gets the server_address_by_client_cid_rs of this V1APIVersions. # noqa: E501
a map of client CIDR to server address that is serving this group. This is to help clients reach servers in the most network-efficient way possible. Clients can use the appropriate server address as per the CIDR that they match. In case of multiple matches, clients should use the longest matching CIDR. The server returns only those CIDRs that it thinks that the client can match. For example: the master will return an internal IP CIDR only, if the client reaches the server using an internal IP. Server looks at X-Forwarded-For header or X-Real-Ip header or request.RemoteAddr (in that order) to get the client IP. # noqa: E501
:return: The server_address_by_client_cid_rs of this V1APIVersions. # noqa: E501
:rtype: list[V1ServerAddressByClientCIDR]
"""
return self._server_address_by_client_cid_rs
@server_address_by_client_cid_rs.setter
def server_address_by_client_cid_rs(self, server_address_by_client_cid_rs):
"""Sets the server_address_by_client_cid_rs of this V1APIVersions.
a map of client CIDR to server address that is serving this group. This is to help clients reach servers in the most network-efficient way possible. Clients can use the appropriate server address as per the CIDR that they match. In case of multiple matches, clients should use the longest matching CIDR. The server returns only those CIDRs that it thinks that the client can match. For example: the master will return an internal IP CIDR only, if the client reaches the server using an internal IP. Server looks at X-Forwarded-For header or X-Real-Ip header or request.RemoteAddr (in that order) to get the client IP. # noqa: E501
:param server_address_by_client_cid_rs: The server_address_by_client_cid_rs of this V1APIVersions. # noqa: E501
:type: list[V1ServerAddressByClientCIDR]
"""
if self.local_vars_configuration.client_side_validation and server_address_by_client_cid_rs is None: # noqa: E501
raise ValueError("Invalid value for `server_address_by_client_cid_rs`, must not be `None`") # noqa: E501
self._server_address_by_client_cid_rs = server_address_by_client_cid_rs
@property
def versions(self):
"""Gets the versions of this V1APIVersions. # noqa: E501
versions are the api versions that are available. # noqa: E501
:return: The versions of this V1APIVersions. # noqa: E501
:rtype: list[str]
"""
return self._versions
@versions.setter
def versions(self, versions):
"""Sets the versions of this V1APIVersions.
versions are the api versions that are available. # noqa: E501
:param versions: The versions of this V1APIVersions. # noqa: E501
:type: list[str]
"""
if self.local_vars_configuration.client_side_validation and versions is None: # noqa: E501
raise ValueError("Invalid value for `versions`, must not be `None`") # noqa: E501
self._versions = versions
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1APIVersions):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1APIVersions):
return True
return self.to_dict() != other.to_dict()
| V1APIVersions |
python | scipy__scipy | scipy/spatial/tests/test_kdtree.py | {
"start": 12814,
"end": 12974
} | class ____(_Test_random_ball_periodic):
def setup_method(self):
super().setup_method()
self.d = 2.
@KDTreeTest
| _Test_random_ball_far_periodic |
python | great-expectations__great_expectations | contrib/capitalone_dataprofiler_expectations/capitalone_dataprofiler_expectations/expectations/expect_profile_numeric_columns_percent_diff_less_than_threshold.py | {
"start": 952,
"end": 7670
} | class ____(
DataProfilerProfileMetricProvider
):
metric_name = "data_profiler.profile_numeric_columns_percent_diff_less_than_threshold"
value_keys = (
"profile_path",
"limit_check_report_keys",
"numerical_diff_statistics",
)
@metric_value(engine=PandasExecutionEngine)
def _pandas( # noqa: C901 - 22
cls,
execution_engine: PandasExecutionEngine,
metric_domain_kwargs: Dict,
metric_value_kwargs: Dict,
metrics: Dict[str, Any],
runtime_configuration: Dict,
):
profile_percent_diff = metrics.get("data_profiler.profile_percent_diff")
numeric_columns = metrics.get("data_profiler.profile_numeric_columns")
limit_check_report_keys = metric_value_kwargs["limit_check_report_keys"]
numerical_diff_statistics = metric_value_kwargs["numerical_diff_statistics"]
columns = list(profile_percent_diff["global_stats"]["profile_schema"][1].keys())
data_stats = profile_percent_diff["data_stats"]
requested_columns = {}
unavailable_stats = {}
# Adds columns if generic column key is provided
# Note: Copy is required for all metric arguments to ensure metric_value_id is identified correctly
limit_check_report_keys_copy = copy.deepcopy(limit_check_report_keys)
limit_check_report_keys_copy = replace_generic_operator_in_report_keys(
limit_check_report_keys_copy, numeric_columns
)
for col, stats in limit_check_report_keys_copy.items():
if col not in numeric_columns: # Makes sure column requested is numeric
requested_columns[col] = "Column is Non-Numeric"
continue
# adds stats if generic stat key is provided
numerical_diff_statistics_copy = copy.deepcopy(numerical_diff_statistics)
stats = replace_generic_operator_in_report_keys(stats, numerical_diff_statistics_copy)
if col not in columns: # Makes sure column exists within profile schema
requested_columns[col] = "Column requested was not found."
continue
col_data_stats = {}
for data_stat in data_stats:
if data_stat["column_name"] == col:
col_data_stats = data_stat["statistics"]
break
requested_columns[col] = {}
unavailable_stats[col] = {}
for stat, threshold in stats.items():
if stat not in col_data_stats:
requested_columns[col][stat] = "Statistic requested was not found."
continue
diff_val = col_data_stats[stat]
if diff_val == "ERR_divide_by_zero" or diff_val == "ERR_no_original_value":
unavailable_stats[col][stat] = diff_val
continue
if diff_val == "unchanged": # In the case there is no delta
diff_val = 0
below_threshold = is_value_less_than_threshold(diff_val, threshold)
if not below_threshold:
requested_columns[col][stat] = {
"threshold": threshold,
"value_found": diff_val,
}
else:
requested_columns[col][stat] = True
for column in list(unavailable_stats.keys()):
if unavailable_stats[column] == {}:
unavailable_stats.pop(column, None)
if unavailable_stats != {}:
div_by_zero_stats = []
no_original_value = []
for column, stats in unavailable_stats.items():
current_col = copy.deepcopy(limit_check_report_keys_copy[column])
for stat, val in stats.items():
if val == "ERR_divide_by_zero":
div_by_zero_stats.append(column + ": " + stat)
current_col.pop(stat, None)
elif val == "ERR_no_original_value":
no_original_value.append(column + ": " + stat)
current_col.pop(stat, None)
limit_check_report_keys_copy[column] = current_col
warning = "\nWARNING:\n"
if len(div_by_zero_stats) > 0:
warning += "Div By Zero ERROR:\nValue in profile report was 0 for the following column: stat\n"
for div_by_zero_stat in div_by_zero_stats:
warning += " " + div_by_zero_stat + "\n"
if len(no_original_value) > 0:
warning += "Value not Found ERROR:\nStatistic was not found in profile report for the following column: stat\n"
for no_original_value_string in no_original_value:
warning += " " + no_original_value_string + "\n"
warning += "\nTo avoid these errors, you should use the replace 'limit_check_report_keys' with the following:\n"
warning += r"" + json.dumps(limit_check_report_keys_copy, indent=2)
warning += "\n"
warnings.warn(warning)
return requested_columns
@classmethod
def _get_evaluation_dependencies(
cls,
metric: MetricConfiguration,
configuration: Optional[ExpectationConfiguration] = None,
execution_engine: Optional[ExecutionEngine] = None,
runtime_configuration: Optional[dict] = None,
):
"""
Returns a dictionary of given metric names and their corresponding configuration, specifying
the metric types and their respective domains"""
dependencies: dict = super()._get_evaluation_dependencies(
metric=metric,
configuration=configuration,
execution_engine=execution_engine,
runtime_configuration=runtime_configuration,
)
if (
metric.metric_name
== "data_profiler.profile_numeric_columns_percent_diff_less_than_threshold"
):
dependencies["data_profiler.profile_percent_diff"] = MetricConfiguration(
metric_name="data_profiler.profile_percent_diff",
metric_domain_kwargs=metric.metric_domain_kwargs,
metric_value_kwargs=metric.metric_value_kwargs,
)
dependencies["data_profiler.profile_numeric_columns"] = MetricConfiguration(
metric_name="data_profiler.profile_numeric_columns",
metric_domain_kwargs=metric.metric_domain_kwargs,
metric_value_kwargs=metric.metric_value_kwargs,
)
return dependencies
| DataProfilerProfileNumericColumnsPercentDiffLessThanThreshold |
python | huggingface__transformers | src/transformers/models/olmo/modular_olmo.py | {
"start": 4923,
"end": 7311
} | class ____(LlamaAttention):
def forward(
self,
hidden_states: torch.Tensor,
position_embeddings: tuple[torch.Tensor, torch.Tensor],
attention_mask: Optional[torch.Tensor],
past_key_values: Optional[Cache] = None,
cache_position: Optional[torch.LongTensor] = None,
position_ids: Optional[torch.LongTensor] = None,
**kwargs,
) -> tuple[torch.Tensor, Optional[torch.Tensor]]:
input_shape = hidden_states.shape[:-1]
hidden_shape = (*input_shape, -1, self.head_dim)
query_states = self.q_proj(hidden_states)
key_states = self.k_proj(hidden_states)
value_states = self.v_proj(hidden_states)
if self.config.clip_qkv is not None:
query_states.clamp_(min=-self.config.clip_qkv, max=self.config.clip_qkv)
key_states.clamp_(min=-self.config.clip_qkv, max=self.config.clip_qkv)
value_states.clamp_(min=-self.config.clip_qkv, max=self.config.clip_qkv)
query_states = query_states.view(hidden_shape).transpose(1, 2)
key_states = key_states.view(hidden_shape).transpose(1, 2)
value_states = value_states.view(hidden_shape).transpose(1, 2)
cos, sin = position_embeddings
query_states, key_states = apply_rotary_pos_emb(query_states, key_states, cos, sin)
if past_key_values is not None:
# sin and cos are specific to RoPE models; cache_position needed for the static cache
cache_kwargs = {"sin": sin, "cos": cos, "cache_position": cache_position}
key_states, value_states = past_key_values.update(key_states, value_states, self.layer_idx, cache_kwargs)
attention_interface: Callable = eager_attention_forward
if self.config._attn_implementation != "eager":
attention_interface = ALL_ATTENTION_FUNCTIONS[self.config._attn_implementation]
attn_output, attn_weights = attention_interface(
self,
query_states,
key_states,
value_states,
attention_mask,
dropout=0.0 if not self.training else self.attention_dropout,
scaling=self.scaling,
**kwargs,
)
attn_output = attn_output.reshape(*input_shape, -1).contiguous()
attn_output = self.o_proj(attn_output)
return attn_output, attn_weights
| OlmoAttention |
python | huggingface__transformers | tests/utils/test_modeling_utils.py | {
"start": 130968,
"end": 132425
} | class ____(TestCasePlus):
def test_disjoint(self):
main = torch.zeros(10)
a = main[:5]
b = main[5:]
state_dict = {"a": a, "b": b}
shared_names, disjoint_names = _find_disjoint([{"a", "b"}], state_dict)
self.assertEqual(shared_names, [])
self.assertEqual(disjoint_names, ["a", "b"])
a = main[::2]
b = main[1::2]
state_dict = {"a": a, "b": b}
shared_names, disjoint_names = _find_disjoint([{"a", "b"}], state_dict)
self.assertEqual(shared_names, [{"a", "b"}])
self.assertEqual(disjoint_names, [])
def test_identical(self):
a = torch.zeros(10)
b = a
state_dict = {"a": a, "b": b}
shared_names, identical_names = _find_identical([{"a", "b"}], state_dict)
self.assertEqual(shared_names, [])
self.assertEqual(identical_names, [{"a", "b"}])
b = a[:5]
state_dict = {"a": a, "b": b}
shared_names, identical_names = _find_identical([{"a", "b"}], state_dict)
self.assertEqual(shared_names, [{"a", "b"}])
self.assertEqual(identical_names, [])
@require_torch
@unittest.skip(
"These tests are currently failing and need to be fixed, but not sure we want to support this/not sure its even used! Fix this line:https://github.com/huggingface/transformers/blob/b750e6b9eeed5fb9adc2f8c7adb46639c8e41963/src/transformers/core_model_loading.py#L512"
)
| TestTensorSharing |
python | great-expectations__great_expectations | great_expectations/expectations/row_conditions.py | {
"start": 423,
"end": 659
} | class ____(ValueError):
"""Raised when unable to determine the Condition type from a dict."""
def __init__(self, value: Any):
super().__init__(f"Unable to determine Condition type from dict: {value}")
| ConditionParserError |
python | ansible__ansible | test/integration/targets/module_defaults/collections/ansible_collections/testns/othercoll/plugins/action/other_echoaction.py | {
"start": 135,
"end": 172
} | class ____(BaseAM):
pass
| ActionModule |
python | pytransitions__transitions | tests/test_codestyle.py | {
"start": 157,
"end": 1666
} | class ____(unittest.TestCase):
def test_conformance(self):
"""Test that we conform to PEP-8."""
style = pycodestyle.StyleGuide(quiet=False, ignore=['E501', 'W605', 'W503'])
if exists('transitions'): # when run from root directory (e.g. tox)
style.input_dir('transitions')
style.input_dir('tests')
else: # when run from test directory (e.g. pycharm)
style.input_dir('../transitions')
style.input_dir('.')
result = style.check_files()
self.assertEqual(result.total_errors, 0,
"Found code style errors (and warnings).")
@unittest.skipIf(mypy is None, "mypy not found")
def test_mypy_package(self):
call = ['mypy', '--config-file', 'mypy.ini', '--strict', 'transitions', 'tests/test_imports.py']
# when run from root directory (e.g. tox) else when run from test directory (e.g. pycharm)
project_root = '.' if exists('transitions') else '..'
subprocess.check_call(call, cwd=project_root)
@unittest.skipIf(mypy is None, "mypy not found")
def test_mypy_tests(self):
call = ['mypy', 'tests',
'--disable-error-code', 'attr-defined',
'--disable-error-code', 'no-untyped-def']
# when run from root directory (e.g. tox) else when run from test directory (e.g. pycharm)
project_root = '.' if exists('transitions') else '..'
subprocess.check_call(call, cwd=project_root)
| TestCodeFormat |
python | PrefectHQ__prefect | src/integrations/prefect-github/prefect_github/schemas/graphql_schema.py | {
"start": 489246,
"end": 489547
} | class ____(sgqlc.types.Type):
"""
See source code for more info.
"""
__schema__ = graphql_schema
__field_names__ = ("downloads_total_count",)
downloads_total_count = sgqlc.types.Field(
sgqlc.types.non_null(Int), graphql_name="downloadsTotalCount"
)
| PackageStatistics |
python | pandas-dev__pandas | pandas/core/arrays/arrow/accessors.py | {
"start": 6551,
"end": 14748
} | class ____(ArrowAccessor):
"""
Accessor object for structured data properties of the Series values.
Parameters
----------
data : Series
Series containing Arrow struct data.
"""
def __init__(self, data=None) -> None:
super().__init__(
data,
validation_msg=(
"Can only use the '.struct' accessor with 'struct[pyarrow]' "
"dtype, not {dtype}."
),
)
def _is_valid_pyarrow_dtype(self, pyarrow_dtype) -> bool:
return pa.types.is_struct(pyarrow_dtype)
@property
def dtypes(self) -> Series:
"""
Return the dtype object of each child field of the struct.
Returns
-------
pandas.Series
The data type of each child field.
See Also
--------
Series.dtype: Return the dtype object of the underlying data.
Examples
--------
>>> import pyarrow as pa
>>> s = pd.Series(
... [
... {"version": 1, "project": "pandas"},
... {"version": 2, "project": "pandas"},
... {"version": 1, "project": "numpy"},
... ],
... dtype=pd.ArrowDtype(
... pa.struct([("version", pa.int64()), ("project", pa.string())])
... ),
... )
>>> s.struct.dtypes
version int64[pyarrow]
project string[pyarrow]
dtype: object
"""
from pandas import (
Index,
Series,
)
pa_type = self._data.dtype.pyarrow_dtype
types = [ArrowDtype(struct.type) for struct in pa_type]
names = [struct.name for struct in pa_type]
return Series(types, index=Index(names))
def field(
self,
name_or_index: list[str]
| list[bytes]
| list[int]
| pc.Expression
| bytes
| str
| int,
) -> Series:
"""
Extract a child field of a struct as a Series.
Parameters
----------
name_or_index : str | bytes | int | expression | list
Name or index of the child field to extract.
For list-like inputs, this will index into a nested
struct.
Returns
-------
pandas.Series
The data corresponding to the selected child field.
See Also
--------
Series.struct.explode : Return all child fields as a DataFrame.
Notes
-----
The name of the resulting Series will be set using the following
rules:
- For string, bytes, or integer `name_or_index` (or a list of these, for
a nested selection), the Series name is set to the selected
field's name.
- For a :class:`pyarrow.compute.Expression`, this is set to
the string form of the expression.
- For list-like `name_or_index`, the name will be set to the
name of the final field selected.
Examples
--------
>>> import pyarrow as pa
>>> s = pd.Series(
... [
... {"version": 1, "project": "pandas"},
... {"version": 2, "project": "pandas"},
... {"version": 1, "project": "numpy"},
... ],
... dtype=pd.ArrowDtype(
... pa.struct([("version", pa.int64()), ("project", pa.string())])
... ),
... )
Extract by field name.
>>> s.struct.field("project")
0 pandas
1 pandas
2 numpy
Name: project, dtype: string[pyarrow]
Extract by field index.
>>> s.struct.field(0)
0 1
1 2
2 1
Name: version, dtype: int64[pyarrow]
Or an expression
>>> import pyarrow.compute as pc
>>> s.struct.field(pc.field("project"))
0 pandas
1 pandas
2 numpy
Name: project, dtype: string[pyarrow]
For nested struct types, you can pass a list of values to index
multiple levels:
>>> version_type = pa.struct(
... [
... ("major", pa.int64()),
... ("minor", pa.int64()),
... ]
... )
>>> s = pd.Series(
... [
... {"version": {"major": 1, "minor": 5}, "project": "pandas"},
... {"version": {"major": 2, "minor": 1}, "project": "pandas"},
... {"version": {"major": 1, "minor": 26}, "project": "numpy"},
... ],
... dtype=pd.ArrowDtype(
... pa.struct([("version", version_type), ("project", pa.string())])
... ),
... )
>>> s.struct.field(["version", "minor"])
0 5
1 1
2 26
Name: minor, dtype: int64[pyarrow]
>>> s.struct.field([0, 0])
0 1
1 2
2 1
Name: major, dtype: int64[pyarrow]
"""
from pandas import Series
def get_name(
level_name_or_index: list[str]
| list[bytes]
| list[int]
| pc.Expression
| bytes
| str
| int,
data: pa.ChunkedArray,
):
if isinstance(level_name_or_index, int):
name = data.type.field(level_name_or_index).name
elif isinstance(level_name_or_index, (str, bytes)):
name = level_name_or_index
elif isinstance(level_name_or_index, pc.Expression):
name = str(level_name_or_index)
elif is_list_like(level_name_or_index):
# For nested input like [2, 1, 2]
# iteratively get the struct and field name. The last
# one is used for the name of the index.
level_name_or_index = list(reversed(level_name_or_index))
selected = data
while level_name_or_index:
# we need the cast, otherwise mypy complains about
# getting ints, bytes, or str here, which isn't possible.
level_name_or_index = cast(list, level_name_or_index)
name_or_index = level_name_or_index.pop()
name = get_name(name_or_index, selected)
selected = selected.type.field(selected.type.get_field_index(name))
name = selected.name
else:
raise ValueError(
"name_or_index must be an int, str, bytes, "
"pyarrow.compute.Expression, or list of those"
)
return name
pa_arr = self._data.array._pa_array
name = get_name(name_or_index, pa_arr)
field_arr = pc.struct_field(pa_arr, name_or_index)
return Series(
field_arr,
dtype=ArrowDtype(field_arr.type),
index=self._data.index,
name=name,
)
def explode(self) -> DataFrame:
"""
Extract all child fields of a struct as a DataFrame.
Returns
-------
pandas.DataFrame
The data corresponding to all child fields.
See Also
--------
Series.struct.field : Return a single child field as a Series.
Examples
--------
>>> import pyarrow as pa
>>> s = pd.Series(
... [
... {"version": 1, "project": "pandas"},
... {"version": 2, "project": "pandas"},
... {"version": 1, "project": "numpy"},
... ],
... dtype=pd.ArrowDtype(
... pa.struct([("version", pa.int64()), ("project", pa.string())])
... ),
... )
>>> s.struct.explode()
version project
0 1 pandas
1 2 pandas
2 1 numpy
"""
from pandas import concat
pa_type = self._pa_array.type
return concat(
[self.field(i) for i in range(pa_type.num_fields)], axis="columns"
)
| StructAccessor |
python | scrapy__scrapy | tests/mockserver/http_resources.py | {
"start": 1274,
"end": 1531
} | class ____(resource.Resource):
"""
A testing resource which renders itself as the value of the host header
from the request.
"""
def render(self, request):
return request.requestHeaders.getRawHeaders(b"host")[0]
| HostHeaderResource |
python | pytorch__pytorch | test/torch_np/numpy_tests/lib/test_index_tricks.py | {
"start": 1300,
"end": 8902
} | class ____(TestCase):
def test_basic(self):
assert_equal(np.unravel_index(2, (2, 2)), (1, 0))
# test that new shape argument works properly
assert_equal(np.unravel_index(indices=2, shape=(2, 2)), (1, 0))
# test that an invalid second keyword argument
# is properly handled, including the old name `dims`.
with assert_raises(TypeError):
np.unravel_index(indices=2, hape=(2, 2))
with assert_raises(TypeError):
np.unravel_index(2, hape=(2, 2))
with assert_raises(TypeError):
np.unravel_index(254, ims=(17, 94))
with assert_raises(TypeError):
np.unravel_index(254, dims=(17, 94))
assert_equal(np.ravel_multi_index((1, 0), (2, 2)), 2)
assert_equal(np.unravel_index(254, (17, 94)), (2, 66))
assert_equal(np.ravel_multi_index((2, 66), (17, 94)), 254)
assert_raises(ValueError, np.unravel_index, -1, (2, 2))
assert_raises(TypeError, np.unravel_index, 0.5, (2, 2))
assert_raises(ValueError, np.unravel_index, 4, (2, 2))
assert_raises(ValueError, np.ravel_multi_index, (-3, 1), (2, 2))
assert_raises(ValueError, np.ravel_multi_index, (2, 1), (2, 2))
assert_raises(ValueError, np.ravel_multi_index, (0, -3), (2, 2))
assert_raises(ValueError, np.ravel_multi_index, (0, 2), (2, 2))
assert_raises(TypeError, np.ravel_multi_index, (0.1, 0.0), (2, 2))
assert_equal(np.unravel_index((2 * 3 + 1) * 6 + 4, (4, 3, 6)), [2, 1, 4])
assert_equal(np.ravel_multi_index([2, 1, 4], (4, 3, 6)), (2 * 3 + 1) * 6 + 4)
arr = np.array([[3, 6, 6], [4, 5, 1]])
assert_equal(np.ravel_multi_index(arr, (7, 6)), [22, 41, 37])
assert_equal(np.ravel_multi_index(arr, (7, 6), order="F"), [31, 41, 13])
assert_equal(np.ravel_multi_index(arr, (4, 6), mode="clip"), [22, 23, 19])
assert_equal(
np.ravel_multi_index(arr, (4, 4), mode=("clip", "wrap")), [12, 13, 13]
)
assert_equal(np.ravel_multi_index((3, 1, 4, 1), (6, 7, 8, 9)), 1621)
assert_equal(
np.unravel_index(np.array([22, 41, 37]), (7, 6)), [[3, 6, 6], [4, 5, 1]]
)
assert_equal(
np.unravel_index(np.array([31, 41, 13]), (7, 6), order="F"),
[[3, 6, 6], [4, 5, 1]],
)
assert_equal(np.unravel_index(1621, (6, 7, 8, 9)), [3, 1, 4, 1])
def test_empty_indices(self):
msg1 = "indices must be integral: the provided empty sequence was"
msg2 = "only int indices permitted"
assert_raises_regex(TypeError, msg1, np.unravel_index, [], (10, 3, 5))
assert_raises_regex(TypeError, msg1, np.unravel_index, (), (10, 3, 5))
assert_raises_regex(TypeError, msg2, np.unravel_index, np.array([]), (10, 3, 5))
assert_equal(
np.unravel_index(np.array([], dtype=int), (10, 3, 5)), [[], [], []]
)
assert_raises_regex(TypeError, msg1, np.ravel_multi_index, ([], []), (10, 3))
assert_raises_regex(
TypeError, msg1, np.ravel_multi_index, ([], ["abc"]), (10, 3)
)
assert_raises_regex(
TypeError, msg2, np.ravel_multi_index, (np.array([]), np.array([])), (5, 3)
)
assert_equal(
np.ravel_multi_index(
(np.array([], dtype=int), np.array([], dtype=int)), (5, 3)
),
[],
)
assert_equal(np.ravel_multi_index(np.array([[], []], dtype=int), (5, 3)), [])
def test_big_indices(self):
# ravel_multi_index for big indices (issue #7546)
if np.intp == np.int64:
arr = ([1, 29], [3, 5], [3, 117], [19, 2], [2379, 1284], [2, 2], [0, 1])
assert_equal(
np.ravel_multi_index(arr, (41, 7, 120, 36, 2706, 8, 6)),
[5627771580, 117259570957],
)
# test unravel_index for big indices (issue #9538)
assert_raises(ValueError, np.unravel_index, 1, (2**32 - 1, 2**31 + 1))
# test overflow checking for too big array (issue #7546)
dummy_arr = ([0], [0])
half_max = np.iinfo(np.intp).max // 2
assert_equal(np.ravel_multi_index(dummy_arr, (half_max, 2)), [0])
assert_raises(ValueError, np.ravel_multi_index, dummy_arr, (half_max + 1, 2))
assert_equal(np.ravel_multi_index(dummy_arr, (half_max, 2), order="F"), [0])
assert_raises(
ValueError, np.ravel_multi_index, dummy_arr, (half_max + 1, 2), order="F"
)
def test_dtypes(self):
# Test with different data types
for dtype in [np.int16, np.uint16, np.int32, np.uint32, np.int64, np.uint64]:
coords = np.array([[1, 0, 1, 2, 3, 4], [1, 6, 1, 3, 2, 0]], dtype=dtype)
shape = (5, 8)
uncoords = 8 * coords[0] + coords[1]
assert_equal(np.ravel_multi_index(coords, shape), uncoords)
assert_equal(coords, np.unravel_index(uncoords, shape))
uncoords = coords[0] + 5 * coords[1]
assert_equal(np.ravel_multi_index(coords, shape, order="F"), uncoords)
assert_equal(coords, np.unravel_index(uncoords, shape, order="F"))
coords = np.array(
[[1, 0, 1, 2, 3, 4], [1, 6, 1, 3, 2, 0], [1, 3, 1, 0, 9, 5]],
dtype=dtype,
)
shape = (5, 8, 10)
uncoords = 10 * (8 * coords[0] + coords[1]) + coords[2]
assert_equal(np.ravel_multi_index(coords, shape), uncoords)
assert_equal(coords, np.unravel_index(uncoords, shape))
uncoords = coords[0] + 5 * (coords[1] + 8 * coords[2])
assert_equal(np.ravel_multi_index(coords, shape, order="F"), uncoords)
assert_equal(coords, np.unravel_index(uncoords, shape, order="F"))
def test_clipmodes(self):
# Test clipmodes
assert_equal(
np.ravel_multi_index([5, 1, -1, 2], (4, 3, 7, 12), mode="wrap"),
np.ravel_multi_index([1, 1, 6, 2], (4, 3, 7, 12)),
)
assert_equal(
np.ravel_multi_index(
[5, 1, -1, 2], (4, 3, 7, 12), mode=("wrap", "raise", "clip", "raise")
),
np.ravel_multi_index([1, 1, 0, 2], (4, 3, 7, 12)),
)
assert_raises(ValueError, np.ravel_multi_index, [5, 1, -1, 2], (4, 3, 7, 12))
def test_writeability(self):
# See gh-7269
x, y = np.unravel_index([1, 2, 3], (4, 5))
assert_(x.flags.writeable)
assert_(y.flags.writeable)
def test_0d(self):
# gh-580
x = np.unravel_index(0, ())
assert_equal(x, ())
assert_raises_regex(ValueError, "0d array", np.unravel_index, [0], ())
assert_raises_regex(ValueError, "out of bounds", np.unravel_index, [1], ())
@parametrize("mode", ["clip", "wrap", "raise"])
def test_empty_array_ravel(self, mode):
res = np.ravel_multi_index(
np.zeros((3, 0), dtype=np.intp), (2, 1, 0), mode=mode
)
assert res.shape == (0,)
with assert_raises(ValueError):
np.ravel_multi_index(np.zeros((3, 1), dtype=np.intp), (2, 1, 0), mode=mode)
def test_empty_array_unravel(self):
res = np.unravel_index(np.zeros(0, dtype=np.intp), (2, 1, 0))
# res is a tuple of three empty arrays
assert len(res) == 3
assert all(a.shape == (0,) for a in res)
with assert_raises(ValueError):
np.unravel_index([1], (2, 1, 0))
@xfail # (reason="mgrid not implemented")
@instantiate_parametrized_tests
| TestRavelUnravelIndex |
python | getsentry__sentry | src/sentry/apidocs/extensions.py | {
"start": 446,
"end": 1266
} | class ____(OpenApiAuthenticationExtension):
"""
Extension that adds what scopes are needed to access an endpoint to the
OpenAPI Schema.
"""
target_class = "sentry.api.authentication.UserAuthTokenAuthentication"
name = "auth_token"
def get_security_requirement(self, auto_schema: AutoSchema) -> dict[str, list[Any]]:
scopes = set()
for permission in auto_schema.view.get_permissions():
for s in permission.scope_map.get(auto_schema.method, []):
scopes.add(s)
scope_list = list(scopes)
scope_list.sort()
return {self.name: scope_list}
def get_security_definition(
self, auto_schema: AutoSchema
) -> dict[str, Any] | list[dict[str, Any]]:
return {"type": "http", "scheme": "bearer"}
| TokenAuthExtension |
python | walkccc__LeetCode | solutions/1275. Find Winner on a Tic Tac Toe Game/1275.py | {
"start": 0,
"end": 478
} | class ____:
def tictactoe(self, moves: list[list[int]]) -> str:
row = [[0] * 3 for _ in range(2)]
col = [[0] * 3 for _ in range(2)]
diag1 = [0] * 2
diag2 = [0] * 2
i = 0
for r, c in moves:
row[i][r] += 1
col[i][c] += 1
diag1[i] += r == c
diag2[i] += r + c == 2
if 3 in (row[i][r], col[i][c], diag1[i], diag2[i]):
return 'A' if i == 0 else 'B'
i ^= 1
return 'Draw' if len(moves) == 9 else 'Pending'
| Solution |
python | huggingface__transformers | src/transformers/models/mpt/modeling_mpt.py | {
"start": 6934,
"end": 8836
} | class ____(GradientCheckpointingLayer):
def __init__(self, config: MptConfig, layer_idx: Optional[int] = None):
super().__init__()
hidden_size = config.hidden_size
self.norm_1 = LayerNorm(hidden_size, eps=config.layer_norm_epsilon)
# backward compatibility with weights on the Hub
self.norm_1.bias = None
self.num_heads = config.n_heads
self.attn = MptAttention(config, layer_idx)
self.norm_2 = LayerNorm(hidden_size, eps=config.layer_norm_epsilon)
# backward compatibility with weights on the Hub
self.norm_2.bias = None
self.ffn = MptMLP(config)
self.dropout_rate = config.attn_config.attn_pdrop
self.resid_attn_dropout = nn.Dropout(self.dropout_rate)
def forward(
self,
hidden_states: torch.Tensor,
position_bias: torch.Tensor,
attention_mask: torch.Tensor,
layer_past: Optional[Cache] = None,
use_cache: bool = False,
output_attentions: bool = False,
cache_position: Optional[torch.Tensor] = None,
):
# hidden_states: [batch_size, seq_length, hidden_size]
# Layer norm at the beginning of the transformer layer.
layernorm_output = self.norm_1(hidden_states)
residual = hidden_states
# Self attention.
attn_outputs, attn_weights = self.attn(
layernorm_output,
position_bias=position_bias,
attention_mask=attention_mask,
past_key_values=layer_past,
cache_position=cache_position,
)
hidden_states = self.resid_attn_dropout(attn_outputs) + residual
layernorm_output = self.norm_2(hidden_states)
# Get residual
residual = hidden_states
# MLP.
output = self.ffn(layernorm_output, residual)
return output, attn_weights
@auto_docstring
| MptBlock |
python | mozilla__bleach | tests/test_callbacks.py | {
"start": 1114,
"end": 1846
} | class ____:
def test_empty(self):
attrs = {}
assert target_blank(attrs) == attrs
def test_mailto(self):
attrs = {(None, "href"): "mailto:joe@example.com"}
assert target_blank(attrs) == attrs
def test_add_target(self):
attrs = {(None, "href"): "http://example.com"}
assert target_blank(attrs) == {
(None, "href"): "http://example.com",
(None, "target"): "_blank",
}
def test_stomp_target(self):
attrs = {(None, "href"): "http://example.com", (None, "target"): "foo"}
assert target_blank(attrs) == {
(None, "href"): "http://example.com",
(None, "target"): "_blank",
}
| TestTargetBlankCallback |
python | numpy__numpy | benchmarks/benchmarks/bench_creation.py | {
"start": 1612,
"end": 2015
} | class ____(Benchmark):
""" Benchmark for creation functions
"""
params = [[16, 32, (16, 16), (64, 64)],
TYPES1]
param_names = ['shape', 'npdtypes']
timeout = 10
def setup(self, shape, npdtypes):
values = get_squares_()
self.xarg = values.get(npdtypes)[0]
def time_from_dlpack(self, shape, npdtypes):
np.from_dlpack(self.xarg)
| UfuncsFromDLP |
python | astropy__astropy | astropy/time/formats.py | {
"start": 64958,
"end": 66180
} | class ____(TimeISO):
"""
ISO 8601 compliant date-time format "YYYY-MM-DDTHH:MM:SS.sss...".
This is the same as TimeISO except for a "T" instead of space between
the date and time.
For example, 2000-01-01T00:00:00.000 is midnight on January 1, 2000.
The allowed subformats are:
- 'date_hms': date + hours, mins, secs (and optional fractional secs)
- 'date_hm': date + hours, mins
- 'date': date
"""
name = "isot"
subfmts = (
(
"date_hms",
"%Y-%m-%dT%H:%M:%S",
"{year:d}-{mon:02d}-{day:02d}T{hour:02d}:{min:02d}:{sec:02d}",
),
(
"date_hm",
"%Y-%m-%dT%H:%M",
"{year:d}-{mon:02d}-{day:02d}T{hour:02d}:{min:02d}",
),
("date", "%Y-%m-%d", "{year:d}-{mon:02d}-{day:02d}"),
)
# See TimeISO for explanation
fast_parser_pars = dict(
delims=(0, ord("-"), ord("-"), ord("T"), ord(":"), ord(":"), ord(".")),
starts=(0, 4, 7, 10, 13, 16, 19),
stops=(3, 6, 9, 12, 15, 18, -1),
# Break allowed *before*
# y m d h m s f
break_allowed=(0, 0, 0, 1, 0, 1, 1),
has_day_of_year=0,
)
| TimeISOT |
python | huggingface__transformers | src/transformers/models/speecht5/modeling_speecht5.py | {
"start": 42419,
"end": 43397
} | class ____(nn.Module):
def __init__(self, config, intermediate_size):
super().__init__()
self.intermediate_dropout = nn.Dropout(config.activation_dropout)
self.intermediate_dense = nn.Linear(config.hidden_size, intermediate_size)
if isinstance(config.hidden_act, str):
self.intermediate_act_fn = ACT2FN[config.hidden_act]
else:
self.intermediate_act_fn = config.hidden_act
self.output_dense = nn.Linear(intermediate_size, config.hidden_size)
self.output_dropout = nn.Dropout(config.hidden_dropout)
def forward(self, hidden_states):
hidden_states = self.intermediate_dense(hidden_states)
hidden_states = self.intermediate_act_fn(hidden_states)
hidden_states = self.intermediate_dropout(hidden_states)
hidden_states = self.output_dense(hidden_states)
hidden_states = self.output_dropout(hidden_states)
return hidden_states
| SpeechT5FeedForward |
python | dask__distributed | distributed/http/routing.py | {
"start": 1637,
"end": 2548
} | class ____(web.Application):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.applications = []
self.add_handlers(".*$", [(r"/sitemap.json", DirectoryHandler)])
def find_handler( # type: ignore[no-untyped-def]
self, request: tornado.httputil.HTTPServerRequest, **kwargs
):
handler = super().find_handler(request, **kwargs)
if handler and not issubclass(handler.handler_class, web.ErrorHandler):
return handler
else:
for app in self.applications:
handler = app.find_handler(request, **kwargs) or handler
if handler and not issubclass(handler.handler_class, web.ErrorHandler):
break
return handler
def add_application(self, application: web.Application) -> None:
self.applications.append(application)
| RoutingApplication |
python | pandas-dev__pandas | pandas/tests/reshape/concat/test_datetimes.py | {
"start": 9334,
"end": 17348
} | class ____:
def test_concat_tz_series(self):
# gh-11755: tz and no tz
x = Series(date_range("20151124 08:00", "20151124 09:00", freq="1h", tz="UTC"))
y = Series(date_range("2012-01-01", "2012-01-02"))
expected = Series([x[0], x[1], y[0], y[1]], dtype="object")
result = concat([x, y], ignore_index=True)
tm.assert_series_equal(result, expected)
def test_concat_tz_series2(self):
# gh-11887: concat tz and object
x = Series(date_range("20151124 08:00", "20151124 09:00", freq="1h", tz="UTC"))
y = Series(["a", "b"])
expected = Series([x[0], x[1], y[0], y[1]], dtype="object")
result = concat([x, y], ignore_index=True)
tm.assert_series_equal(result, expected)
def test_concat_tz_series3(self, unit, unit2):
# see gh-12217 and gh-12306
# Concatenating two UTC times
first = DataFrame([[datetime(2016, 1, 1)]], dtype=f"M8[{unit}]")
first[0] = first[0].dt.tz_localize("UTC")
second = DataFrame([[datetime(2016, 1, 2)]], dtype=f"M8[{unit2}]")
second[0] = second[0].dt.tz_localize("UTC")
result = concat([first, second])
exp_unit = tm.get_finest_unit(unit, unit2)
assert result[0].dtype == f"datetime64[{exp_unit}, UTC]"
def test_concat_tz_series4(self, unit, unit2):
# Concatenating two London times
first = DataFrame([[datetime(2016, 1, 1)]], dtype=f"M8[{unit}]")
first[0] = first[0].dt.tz_localize("Europe/London")
second = DataFrame([[datetime(2016, 1, 2)]], dtype=f"M8[{unit2}]")
second[0] = second[0].dt.tz_localize("Europe/London")
result = concat([first, second])
exp_unit = tm.get_finest_unit(unit, unit2)
assert result[0].dtype == f"datetime64[{exp_unit}, Europe/London]"
def test_concat_tz_series5(self, unit, unit2):
# Concatenating 2+1 London times
first = DataFrame(
[[datetime(2016, 1, 1)], [datetime(2016, 1, 2)]], dtype=f"M8[{unit}]"
)
first[0] = first[0].dt.tz_localize("Europe/London")
second = DataFrame([[datetime(2016, 1, 3)]], dtype=f"M8[{unit2}]")
second[0] = second[0].dt.tz_localize("Europe/London")
result = concat([first, second])
exp_unit = tm.get_finest_unit(unit, unit2)
assert result[0].dtype == f"datetime64[{exp_unit}, Europe/London]"
def test_concat_tz_series6(self, unit, unit2):
# Concatenating 1+2 London times
first = DataFrame([[datetime(2016, 1, 1)]], dtype=f"M8[{unit}]")
first[0] = first[0].dt.tz_localize("Europe/London")
second = DataFrame(
[[datetime(2016, 1, 2)], [datetime(2016, 1, 3)]], dtype=f"M8[{unit2}]"
)
second[0] = second[0].dt.tz_localize("Europe/London")
result = concat([first, second])
exp_unit = tm.get_finest_unit(unit, unit2)
assert result[0].dtype == f"datetime64[{exp_unit}, Europe/London]"
def test_concat_tz_series_tzlocal(self):
# see gh-13583
x = [
Timestamp("2011-01-01", tz=dateutil.tz.tzlocal()),
Timestamp("2011-02-01", tz=dateutil.tz.tzlocal()),
]
y = [
Timestamp("2012-01-01", tz=dateutil.tz.tzlocal()),
Timestamp("2012-02-01", tz=dateutil.tz.tzlocal()),
]
result = concat([Series(x), Series(y)], ignore_index=True)
tm.assert_series_equal(result, Series(x + y))
assert result.dtype == "datetime64[us, tzlocal()]"
def test_concat_tz_series_with_datetimelike(self):
# see gh-12620: tz and timedelta
x = [
Timestamp("2011-01-01", tz="US/Eastern"),
Timestamp("2011-02-01", tz="US/Eastern"),
]
y = [pd.Timedelta("1 day"), pd.Timedelta("2 day")]
result = concat([Series(x), Series(y)], ignore_index=True)
tm.assert_series_equal(result, Series(x + y, dtype="object"))
# tz and period
y = [pd.Period("2011-03", freq="M"), pd.Period("2011-04", freq="M")]
result = concat([Series(x), Series(y)], ignore_index=True)
tm.assert_series_equal(result, Series(x + y, dtype="object"))
def test_concat_tz_frame(self):
df2 = DataFrame(
{
"A": Timestamp("20130102", tz="US/Eastern"),
"B": Timestamp("20130603", tz="CET"),
},
index=range(5),
)
# concat
df3 = concat([df2.A.to_frame(), df2.B.to_frame()], axis=1)
tm.assert_frame_equal(df2, df3)
def test_concat_multiple_tzs(self):
# GH#12467
# combining datetime tz-aware and naive DataFrames
ts1 = Timestamp("2015-01-01", tz=None)
ts2 = Timestamp("2015-01-01", tz="UTC")
ts3 = Timestamp("2015-01-01", tz="EST")
df1 = DataFrame({"time": [ts1]})
df2 = DataFrame({"time": [ts2]})
df3 = DataFrame({"time": [ts3]})
results = concat([df1, df2]).reset_index(drop=True)
expected = DataFrame({"time": [ts1, ts2]}, dtype=object)
tm.assert_frame_equal(results, expected)
results = concat([df1, df3]).reset_index(drop=True)
expected = DataFrame({"time": [ts1, ts3]}, dtype=object)
tm.assert_frame_equal(results, expected)
results = concat([df2, df3]).reset_index(drop=True)
expected = DataFrame({"time": [ts2, ts3]})
tm.assert_frame_equal(results, expected)
def test_concat_multiindex_with_tz(self):
# GH 6606
df = DataFrame(
{
"dt": DatetimeIndex(
[
datetime(2014, 1, 1),
datetime(2014, 1, 2),
datetime(2014, 1, 3),
],
dtype="M8[ns, US/Pacific]",
),
"b": ["A", "B", "C"],
"c": [1, 2, 3],
"d": [4, 5, 6],
}
)
df = df.set_index(["dt", "b"])
exp_idx1 = DatetimeIndex(
["2014-01-01", "2014-01-02", "2014-01-03"] * 2,
dtype="M8[ns, US/Pacific]",
name="dt",
)
exp_idx2 = Index(["A", "B", "C"] * 2, name="b")
exp_idx = MultiIndex.from_arrays([exp_idx1, exp_idx2])
expected = DataFrame(
{"c": [1, 2, 3] * 2, "d": [4, 5, 6] * 2}, index=exp_idx, columns=["c", "d"]
)
result = concat([df, df])
tm.assert_frame_equal(result, expected)
def test_concat_tz_not_aligned(self):
# GH#22796
ts = pd.to_datetime([1, 2]).tz_localize("UTC")
a = DataFrame({"A": ts})
b = DataFrame({"A": ts, "B": ts})
result = concat([a, b], sort=True, ignore_index=True)
expected = DataFrame(
{"A": list(ts) + list(ts), "B": [pd.NaT, pd.NaT] + list(ts)}
)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize(
"t1",
[
"2015-01-01",
pytest.param(
pd.NaT,
marks=pytest.mark.xfail(
reason="GH23037 incorrect dtype when concatenating"
),
),
],
)
def test_concat_tz_NaT(self, t1):
# GH#22796
# Concatenating tz-aware multicolumn DataFrames
ts1 = Timestamp(t1, tz="UTC")
ts2 = Timestamp("2015-01-01", tz="UTC")
ts3 = Timestamp("2015-01-01", tz="UTC")
df1 = DataFrame([[ts1, ts2]])
df2 = DataFrame([[ts3]])
result = concat([df1, df2])
expected = DataFrame([[ts1, ts2], [ts3, pd.NaT]], index=[0, 0])
tm.assert_frame_equal(result, expected)
def test_concat_tz_with_empty(self):
# GH 9188
result = concat(
[DataFrame(date_range("2000", periods=1, tz="UTC")), DataFrame()]
)
expected = DataFrame(date_range("2000", periods=1, tz="UTC"))
tm.assert_frame_equal(result, expected)
| TestTimezoneConcat |
python | django__django | tests/gis_tests/geoapp/models.py | {
"start": 252,
"end": 349
} | class ____(NamedModel):
mpoly = models.MultiPolygonField() # SRID, by default, is 4326
| Country |
python | sphinx-doc__sphinx | sphinx/ext/autodoc/_property_types.py | {
"start": 2943,
"end": 4191
} | class ____(_ItemProperties):
obj_type: Literal['class', 'exception']
bases: Sequence[tuple[str, ...]] | None
_obj___name__: str | None
_obj___qualname__: str | None
_obj_bases: tuple[str, ...]
_obj_is_new_type: bool
_obj_is_typevar: bool
_signature_method_name: str = ''
@property
def doc_as_attr(self) -> bool:
# if the class is documented under another name, document it
# as data/attribute
if self._obj___name__ is None:
return True
return self.parts[-1] != self._obj___name__
@property
def canonical_full_name(self) -> str | None:
modname = self._obj___module__
if modname is None:
modname = self.module_name
qualname = self._obj___qualname__
if qualname is None:
qualname = self._obj___name__
if not modname or not qualname or '<locals>' in qualname:
# No valid qualname found if the object is defined as locals
return None
return f'{modname}.{qualname}'
@property
def _groupwise_order_key(self) -> int:
return 10 if self.obj_type == 'exception' else 20
@dataclasses.dataclass(frozen=False, kw_only=True, slots=True)
| _ClassDefProperties |
python | jazzband__django-model-utils | tests/models.py | {
"start": 10740,
"end": 10991
} | class ____(SoftDeletableModel):
"""
Test model with additional manager for full access to model
instances.
"""
name = models.CharField(max_length=20)
all_objects: ClassVar[Manager[SoftDeletable]] = models.Manager()
| SoftDeletable |
python | tensorflow__tensorflow | tensorflow/python/autograph/converters/break_statements.py | {
"start": 1101,
"end": 1294
} | class ____(object):
def __init__(self):
self.used = False
self.control_var_name = None
def __repr__(self):
return 'used: %s, var: %s' % (self.used, self.control_var_name)
| _Break |
python | getsentry__sentry | tests/sentry/integrations/api/endpoints/test_external_team_details.py | {
"start": 117,
"end": 2580
} | class ____(APITestCase):
endpoint = "sentry-api-0-external-team-details"
method = "put"
def setUp(self) -> None:
super().setUp()
self.login_as(self.user)
self.external_team = self.create_external_team(
self.team, external_name="@getsentry/ecosystem"
)
def test_basic_delete(self) -> None:
self.get_success_response(
self.organization.slug, self.team.slug, self.external_team.id, method="delete"
)
assert not ExternalActor.objects.filter(id=str(self.external_team.id)).exists()
def test_basic_update(self) -> None:
with self.feature({"organizations:integrations-codeowners": True}):
data = {"externalName": "@getsentry/growth"}
response = self.get_success_response(
self.organization.slug, self.team.slug, self.external_team.id, **data
)
assert response.data["id"] == str(self.external_team.id)
assert response.data["externalName"] == "@getsentry/growth"
def test_ignore_camelcase_teamid(self) -> None:
other_team = self.create_team(organization=self.organization)
data = {
"externalName": "@getsentry/growth",
"teamId": other_team.id,
}
with self.feature({"organizations:integrations-codeowners": True}):
self.get_success_response(
self.organization.slug, self.team.slug, self.external_team.id, **data
)
assert not ExternalActor.objects.filter(team_id=other_team.id).exists()
def test_invalid_provider_update(self) -> None:
data = {"provider": "git"}
with self.feature({"organizations:integrations-codeowners": True}):
response = self.get_error_response(
self.organization.slug,
self.team.slug,
self.external_team.id,
status_code=400,
**data,
)
assert response.data == {"provider": ['"git" is not a valid choice.']}
def test_delete_another_orgs_external_team(self) -> None:
invalid_user = self.create_user()
invalid_organization = self.create_organization(owner=invalid_user)
self.login_as(user=invalid_user)
resp = self.get_error_response(
invalid_organization.slug, self.team.slug, self.external_team.id, method="delete"
)
assert resp.status_code == 404
| ExternalTeamDetailsTest |
python | pypa__warehouse | tests/conftest.py | {
"start": 18446,
"end": 21830
} | class ____:
def __init__(self):
self.queries = []
self.recording = False
def __enter__(self):
self.start()
def __exit__(self, type, value, traceback):
self.stop()
def record(self, conn, cursor, statement, *args):
if self.recording:
self.queries.append(statement)
def start(self):
self.recording = True
def stop(self):
self.recording = False
def clear(self):
self.queries = []
@pytest.fixture
def query_recorder(app_config):
recorder = QueryRecorder()
engine = app_config.registry["sqlalchemy.engine"]
event.listen(engine, "before_cursor_execute", recorder.record)
try:
yield recorder
finally:
event.remove(engine, "before_cursor_execute", recorder.record)
recorder.clear()
@pytest.fixture
def db_request(pyramid_request, db_session, tm):
pyramid_request.db = db_session
pyramid_request.tm = tm
pyramid_request.flags = admin.flags.Flags(pyramid_request)
pyramid_request.banned = admin.bans.Bans(pyramid_request)
pyramid_request.organization_access = True
pyramid_request.ip_address = IpAddressFactory.create(
ip_address=pyramid_request.remote_addr,
hashed_ip_address=pyramid_request.remote_addr_hashed,
)
return pyramid_request
@pytest.fixture
def _enable_all_oidc_providers(webtest):
flags = (
AdminFlagValue.DISALLOW_ACTIVESTATE_OIDC,
AdminFlagValue.DISALLOW_GITLAB_OIDC,
AdminFlagValue.DISALLOW_GITHUB_OIDC,
AdminFlagValue.DISALLOW_GOOGLE_OIDC,
)
original_flag_values = {}
db_sess = webtest.extra_environ["warehouse.db_session"]
for flag in flags:
flag_db = db_sess.get(AdminFlag, flag.value)
original_flag_values[flag] = flag_db.enabled
flag_db.enabled = False
yield
for flag in flags:
flag_db = db_sess.get(AdminFlag, flag.value)
flag_db.enabled = original_flag_values[flag]
@pytest.fixture
def _enable_organizations(db_request):
flag = db_request.db.get(AdminFlag, AdminFlagValue.DISABLE_ORGANIZATIONS.value)
flag.enabled = False
yield
flag.enabled = True
@pytest.fixture
def send_email(pyramid_request, monkeypatch):
send_email_stub = pretend.stub(
delay=pretend.call_recorder(lambda *args, **kwargs: None)
)
pyramid_request.task = pretend.call_recorder(
lambda *args, **kwargs: send_email_stub
)
pyramid_request.registry.settings = {"mail.sender": "noreply@example.com"}
monkeypatch.setattr(warehouse.email, "send_email", send_email_stub)
return send_email_stub
@pytest.fixture
def make_email_renderers(pyramid_config):
def _make_email_renderers(
name,
subject="Email Subject",
body="Email Body",
html="Email HTML Body",
):
subject_renderer = pyramid_config.testing_add_renderer(
f"email/{name}/subject.txt"
)
subject_renderer.string_response = subject
body_renderer = pyramid_config.testing_add_renderer(f"email/{name}/body.txt")
body_renderer.string_response = body
html_renderer = pyramid_config.testing_add_renderer(f"email/{name}/body.html")
html_renderer.string_response = html
return subject_renderer, body_renderer, html_renderer
return _make_email_renderers
| QueryRecorder |
python | plotly__plotly.py | plotly/graph_objs/choroplethmap/marker/_line.py | {
"start": 233,
"end": 5303
} | class ____(_BaseTraceHierarchyType):
_parent_path_str = "choroplethmap.marker"
_path_str = "choroplethmap.marker.line"
_valid_props = {"color", "colorsrc", "width", "widthsrc"}
@property
def color(self):
"""
Sets the marker.line color. It accepts either a specific color
or an array of numbers that are mapped to the colorscale
relative to the max and min values of the array or relative to
`marker.line.cmin` and `marker.line.cmax` if set.
The 'color' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color: see https://plotly.com/python/css-colors/ for a list
- A list or array of any of the above
Returns
-------
str|numpy.ndarray
"""
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
@property
def colorsrc(self):
"""
Sets the source reference on Chart Studio Cloud for `color`.
The 'colorsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["colorsrc"]
@colorsrc.setter
def colorsrc(self, val):
self["colorsrc"] = val
@property
def width(self):
"""
Sets the width (in px) of the lines bounding the marker points.
The 'width' property is a number and may be specified as:
- An int or float in the interval [0, inf]
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
int|float|numpy.ndarray
"""
return self["width"]
@width.setter
def width(self, val):
self["width"] = val
@property
def widthsrc(self):
"""
Sets the source reference on Chart Studio Cloud for `width`.
The 'widthsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["widthsrc"]
@widthsrc.setter
def widthsrc(self, val):
self["widthsrc"] = val
@property
def _prop_descriptions(self):
return """\
color
Sets the marker.line color. It accepts either a
specific color or an array of numbers that are mapped
to the colorscale relative to the max and min values of
the array or relative to `marker.line.cmin` and
`marker.line.cmax` if set.
colorsrc
Sets the source reference on Chart Studio Cloud for
`color`.
width
Sets the width (in px) of the lines bounding the marker
points.
widthsrc
Sets the source reference on Chart Studio Cloud for
`width`.
"""
def __init__(
self, arg=None, color=None, colorsrc=None, width=None, widthsrc=None, **kwargs
):
"""
Construct a new Line object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of
:class:`plotly.graph_objs.choroplethmap.marker.Line`
color
Sets the marker.line color. It accepts either a
specific color or an array of numbers that are mapped
to the colorscale relative to the max and min values of
the array or relative to `marker.line.cmin` and
`marker.line.cmax` if set.
colorsrc
Sets the source reference on Chart Studio Cloud for
`color`.
width
Sets the width (in px) of the lines bounding the marker
points.
widthsrc
Sets the source reference on Chart Studio Cloud for
`width`.
Returns
-------
Line
"""
super().__init__("line")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError("""\
The first argument to the plotly.graph_objs.choroplethmap.marker.Line
constructor must be a dict or
an instance of :class:`plotly.graph_objs.choroplethmap.marker.Line`""")
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
self._set_property("color", arg, color)
self._set_property("colorsrc", arg, colorsrc)
self._set_property("width", arg, width)
self._set_property("widthsrc", arg, widthsrc)
self._process_kwargs(**dict(arg, **kwargs))
self._skip_invalid = False
| Line |
python | lepture__authlib | authlib/integrations/base_client/sync_app.py | {
"start": 1776,
"end": 2560
} | class ____:
def _get_requested_token(self, request):
if self._fetch_token and request:
return self._fetch_token(request)
def _send_token_request(self, session, method, url, token, kwargs):
request = kwargs.pop("request", None)
withhold_token = kwargs.get("withhold_token")
if self.api_base_url and not url.startswith(("https://", "http://")):
url = urlparse.urljoin(self.api_base_url, url)
if withhold_token:
return session.request(method, url, **kwargs)
if token is None:
token = self._get_requested_token(request)
if token is None:
raise MissingTokenError()
session.token = token
return session.request(method, url, **kwargs)
| _RequestMixin |
python | apache__airflow | providers/microsoft/azure/src/airflow/providers/microsoft/azure/log/wasb_task_handler.py | {
"start": 1516,
"end": 7047
} | class ____(LoggingMixin): # noqa: D101
remote_base: str
base_log_folder: Path = attrs.field(converter=Path)
delete_local_copy: bool
wasb_container: str
processors = ()
def upload(self, path: str | os.PathLike, ti: RuntimeTI):
"""Upload the given log path to the remote storage."""
path = Path(path)
if path.is_absolute():
local_loc = path
remote_loc = os.path.join(self.remote_base, path.relative_to(self.base_log_folder))
else:
local_loc = self.base_log_folder.joinpath(path)
remote_loc = os.path.join(self.remote_base, path)
if local_loc.is_file():
# read log and remove old logs to get just the latest additions
log = local_loc.read_text()
has_uploaded = self.write(log, remote_loc)
if has_uploaded and self.delete_local_copy:
shutil.rmtree(os.path.dirname(local_loc))
@cached_property
def hook(self):
"""Return WasbHook."""
remote_conn_id = conf.get("logging", "REMOTE_LOG_CONN_ID")
try:
from airflow.providers.microsoft.azure.hooks.wasb import WasbHook
return WasbHook(remote_conn_id)
except Exception:
self.log.exception(
"Could not create a WasbHook with connection id '%s'. "
"Do you have apache-airflow[azure] installed? "
"Does connection the connection exist, and is it "
"configured properly?",
remote_conn_id,
)
return None
def read(self, relative_path, ti: RuntimeTI) -> tuple[LogSourceInfo, LogMessages | None]:
messages = []
logs = []
# TODO: fix this - "relative path" i.e currently REMOTE_BASE_LOG_FOLDER should start with "wasb"
# unlike others with shceme in URL itself to identify the correct handler.
# This puts limitations on ways users can name the base_path.
prefix = os.path.join(self.remote_base, relative_path)
blob_names = []
try:
blob_names = self.hook.get_blobs_list(container_name=self.wasb_container, prefix=prefix)
except HttpResponseError as e:
messages.append(f"tried listing blobs with prefix={prefix} and container={self.wasb_container}")
messages.append(f"could not list blobs {e}")
self.log.exception("can't list blobs")
if blob_names:
uris = [f"https://{self.wasb_container}.blob.core.windows.net/{b}" for b in blob_names]
if AIRFLOW_V_3_0_PLUS:
messages = uris
else:
messages.extend(["Found remote logs:", *[f" * {x}" for x in sorted(uris)]])
else:
return messages, None
for name in sorted(blob_names):
remote_log = ""
try:
remote_log = self.hook.read_file(self.wasb_container, name)
if remote_log:
logs.append(remote_log)
except Exception as e:
messages.append(
f"Unable to read remote blob '{name}' in container '{self.wasb_container}'\n{e}"
)
self.log.exception("Could not read blob")
return messages, logs
def wasb_log_exists(self, remote_log_location: str) -> bool:
"""
Check if remote_log_location exists in remote storage.
:param remote_log_location: log's location in remote storage
:return: True if location exists else False
"""
try:
return self.hook.check_for_blob(self.wasb_container, remote_log_location)
except Exception as e:
self.log.debug('Exception when trying to check remote location: "%s"', e)
return False
def wasb_read(self, remote_log_location: str, return_error: bool = False):
"""
Return the log found at the remote_log_location. Returns '' if no logs are found or there is an error.
:param remote_log_location: the log's location in remote storage
:param return_error: if True, returns a string error message if an
error occurs. Otherwise returns '' when an error occurs.
"""
try:
return self.hook.read_file(self.wasb_container, remote_log_location)
except Exception:
msg = f"Could not read logs from {remote_log_location}"
self.log.exception(msg)
# return error if needed
if return_error:
return msg
return ""
def write(self, log: str, remote_log_location: str, append: bool = True) -> bool:
"""
Write the log to the remote_log_location. Fails silently if no hook was created.
:param log: the log to write to the remote_log_location
:param remote_log_location: the log's location in remote storage
:param append: if False, any existing log file is overwritten. If True,
the new log is appended to any existing logs.
"""
if append and self.wasb_log_exists(remote_log_location):
old_log = self.wasb_read(remote_log_location)
log = f"{old_log}\n{log}" if old_log else log
try:
self.hook.load_string(log, self.wasb_container, remote_log_location, overwrite=True)
except Exception:
self.log.exception("Could not write logs to %s", remote_log_location)
return False
return True
| WasbRemoteLogIO |
python | plotly__plotly.py | plotly/graph_objs/contour/_colorbar.py | {
"start": 233,
"end": 61470
} | class ____(_BaseTraceHierarchyType):
_parent_path_str = "contour"
_path_str = "contour.colorbar"
_valid_props = {
"bgcolor",
"bordercolor",
"borderwidth",
"dtick",
"exponentformat",
"labelalias",
"len",
"lenmode",
"minexponent",
"nticks",
"orientation",
"outlinecolor",
"outlinewidth",
"separatethousands",
"showexponent",
"showticklabels",
"showtickprefix",
"showticksuffix",
"thickness",
"thicknessmode",
"tick0",
"tickangle",
"tickcolor",
"tickfont",
"tickformat",
"tickformatstopdefaults",
"tickformatstops",
"ticklabeloverflow",
"ticklabelposition",
"ticklabelstep",
"ticklen",
"tickmode",
"tickprefix",
"ticks",
"ticksuffix",
"ticktext",
"ticktextsrc",
"tickvals",
"tickvalssrc",
"tickwidth",
"title",
"x",
"xanchor",
"xpad",
"xref",
"y",
"yanchor",
"ypad",
"yref",
}
@property
def bgcolor(self):
"""
Sets the color of padded area.
The 'bgcolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color: see https://plotly.com/python/css-colors/ for a list
Returns
-------
str
"""
return self["bgcolor"]
@bgcolor.setter
def bgcolor(self, val):
self["bgcolor"] = val
@property
def bordercolor(self):
"""
Sets the axis line color.
The 'bordercolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color: see https://plotly.com/python/css-colors/ for a list
Returns
-------
str
"""
return self["bordercolor"]
@bordercolor.setter
def bordercolor(self, val):
self["bordercolor"] = val
@property
def borderwidth(self):
"""
Sets the width (in px) or the border enclosing this color bar.
The 'borderwidth' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["borderwidth"]
@borderwidth.setter
def borderwidth(self, val):
self["borderwidth"] = val
@property
def dtick(self):
"""
Sets the step in-between ticks on this axis. Use with `tick0`.
Must be a positive number, or special strings available to
"log" and "date" axes. If the axis `type` is "log", then ticks
are set every 10^(n*dtick) where n is the tick number. For
example, to set a tick mark at 1, 10, 100, 1000, ... set dtick
to 1. To set tick marks at 1, 100, 10000, ... set dtick to 2.
To set tick marks at 1, 5, 25, 125, 625, 3125, ... set dtick to
log_10(5), or 0.69897000433. "log" has several special values;
"L<f>", where `f` is a positive number, gives ticks linearly
spaced in value (but not position). For example `tick0` = 0.1,
`dtick` = "L0.5" will put ticks at 0.1, 0.6, 1.1, 1.6 etc. To
show powers of 10 plus small digits between, use "D1" (all
digits) or "D2" (only 2 and 5). `tick0` is ignored for "D1" and
"D2". If the axis `type` is "date", then you must convert the
time to milliseconds. For example, to set the interval between
ticks to one day, set `dtick` to 86400000.0. "date" also has
special values "M<n>" gives ticks spaced by a number of months.
`n` must be a positive integer. To set ticks on the 15th of
every third month, set `tick0` to "2000-01-15" and `dtick` to
"M3". To set ticks every 4 years, set `dtick` to "M48"
The 'dtick' property accepts values of any type
Returns
-------
Any
"""
return self["dtick"]
@dtick.setter
def dtick(self, val):
self["dtick"] = val
@property
def exponentformat(self):
"""
Determines a formatting rule for the tick exponents. For
example, consider the number 1,000,000,000. If "none", it
appears as 1,000,000,000. If "e", 1e+9. If "E", 1E+9. If
"power", 1x10^9 (with 9 in a super script). If "SI", 1G. If
"B", 1B. "SI" uses prefixes from "femto" f (10^-15) to "tera" T
(10^12). *SI extended* covers instead the full SI range from
"quecto" q (10^-30) to "quetta" Q (10^30). If "SI" or *SI
extended* is used and the exponent is beyond the above ranges,
the formatting rule will automatically be switched to the power
notation.
The 'exponentformat' property is an enumeration that may be specified as:
- One of the following enumeration values:
['none', 'e', 'E', 'power', 'SI', 'B', 'SI extended']
Returns
-------
Any
"""
return self["exponentformat"]
@exponentformat.setter
def exponentformat(self, val):
self["exponentformat"] = val
@property
def labelalias(self):
"""
Replacement text for specific tick or hover labels. For example
using {US: 'USA', CA: 'Canada'} changes US to USA and CA to
Canada. The labels we would have shown must match the keys
exactly, after adding any tickprefix or ticksuffix. For
negative numbers the minus sign symbol used (U+2212) is wider
than the regular ascii dash. That means you need to use −1
instead of -1. labelalias can be used with any axis type, and
both keys (if needed) and values (if desired) can include html-
like tags or MathJax.
The 'labelalias' property accepts values of any type
Returns
-------
Any
"""
return self["labelalias"]
@labelalias.setter
def labelalias(self, val):
self["labelalias"] = val
@property
def len(self):
"""
Sets the length of the color bar This measure excludes the
padding of both ends. That is, the color bar length is this
length minus the padding on both ends.
The 'len' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["len"]
@len.setter
def len(self, val):
self["len"] = val
@property
def lenmode(self):
"""
Determines whether this color bar's length (i.e. the measure in
the color variation direction) is set in units of plot
"fraction" or in *pixels. Use `len` to set the value.
The 'lenmode' property is an enumeration that may be specified as:
- One of the following enumeration values:
['fraction', 'pixels']
Returns
-------
Any
"""
return self["lenmode"]
@lenmode.setter
def lenmode(self, val):
self["lenmode"] = val
@property
def minexponent(self):
"""
Hide SI prefix for 10^n if |n| is below this number. This only
has an effect when `tickformat` is "SI" or "B".
The 'minexponent' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["minexponent"]
@minexponent.setter
def minexponent(self, val):
self["minexponent"] = val
@property
def nticks(self):
"""
Specifies the maximum number of ticks for the particular axis.
The actual number of ticks will be chosen automatically to be
less than or equal to `nticks`. Has an effect only if
`tickmode` is set to "auto".
The 'nticks' property is a integer and may be specified as:
- An int (or float that will be cast to an int)
in the interval [0, 9223372036854775807]
Returns
-------
int
"""
return self["nticks"]
@nticks.setter
def nticks(self, val):
self["nticks"] = val
@property
def orientation(self):
"""
Sets the orientation of the colorbar.
The 'orientation' property is an enumeration that may be specified as:
- One of the following enumeration values:
['h', 'v']
Returns
-------
Any
"""
return self["orientation"]
@orientation.setter
def orientation(self, val):
self["orientation"] = val
@property
def outlinecolor(self):
"""
Sets the axis line color.
The 'outlinecolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color: see https://plotly.com/python/css-colors/ for a list
Returns
-------
str
"""
return self["outlinecolor"]
@outlinecolor.setter
def outlinecolor(self, val):
self["outlinecolor"] = val
@property
def outlinewidth(self):
"""
Sets the width (in px) of the axis line.
The 'outlinewidth' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["outlinewidth"]
@outlinewidth.setter
def outlinewidth(self, val):
self["outlinewidth"] = val
@property
def separatethousands(self):
"""
If "true", even 4-digit integers are separated
The 'separatethousands' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["separatethousands"]
@separatethousands.setter
def separatethousands(self, val):
self["separatethousands"] = val
@property
def showexponent(self):
"""
If "all", all exponents are shown besides their significands.
If "first", only the exponent of the first tick is shown. If
"last", only the exponent of the last tick is shown. If "none",
no exponents appear.
The 'showexponent' property is an enumeration that may be specified as:
- One of the following enumeration values:
['all', 'first', 'last', 'none']
Returns
-------
Any
"""
return self["showexponent"]
@showexponent.setter
def showexponent(self, val):
self["showexponent"] = val
@property
def showticklabels(self):
"""
Determines whether or not the tick labels are drawn.
The 'showticklabels' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["showticklabels"]
@showticklabels.setter
def showticklabels(self, val):
self["showticklabels"] = val
@property
def showtickprefix(self):
"""
If "all", all tick labels are displayed with a prefix. If
"first", only the first tick is displayed with a prefix. If
"last", only the last tick is displayed with a suffix. If
"none", tick prefixes are hidden.
The 'showtickprefix' property is an enumeration that may be specified as:
- One of the following enumeration values:
['all', 'first', 'last', 'none']
Returns
-------
Any
"""
return self["showtickprefix"]
@showtickprefix.setter
def showtickprefix(self, val):
self["showtickprefix"] = val
@property
def showticksuffix(self):
"""
Same as `showtickprefix` but for tick suffixes.
The 'showticksuffix' property is an enumeration that may be specified as:
- One of the following enumeration values:
['all', 'first', 'last', 'none']
Returns
-------
Any
"""
return self["showticksuffix"]
@showticksuffix.setter
def showticksuffix(self, val):
self["showticksuffix"] = val
@property
def thickness(self):
"""
Sets the thickness of the color bar This measure excludes the
size of the padding, ticks and labels.
The 'thickness' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["thickness"]
@thickness.setter
def thickness(self, val):
self["thickness"] = val
@property
def thicknessmode(self):
"""
Determines whether this color bar's thickness (i.e. the measure
in the constant color direction) is set in units of plot
"fraction" or in "pixels". Use `thickness` to set the value.
The 'thicknessmode' property is an enumeration that may be specified as:
- One of the following enumeration values:
['fraction', 'pixels']
Returns
-------
Any
"""
return self["thicknessmode"]
@thicknessmode.setter
def thicknessmode(self, val):
self["thicknessmode"] = val
@property
def tick0(self):
"""
Sets the placement of the first tick on this axis. Use with
`dtick`. If the axis `type` is "log", then you must take the
log of your starting tick (e.g. to set the starting tick to
100, set the `tick0` to 2) except when `dtick`=*L<f>* (see
`dtick` for more info). If the axis `type` is "date", it should
be a date string, like date data. If the axis `type` is
"category", it should be a number, using the scale where each
category is assigned a serial number from zero in the order it
appears.
The 'tick0' property accepts values of any type
Returns
-------
Any
"""
return self["tick0"]
@tick0.setter
def tick0(self, val):
self["tick0"] = val
@property
def tickangle(self):
"""
Sets the angle of the tick labels with respect to the
horizontal. For example, a `tickangle` of -90 draws the tick
labels vertically.
The 'tickangle' property is a angle (in degrees) that may be
specified as a number between -180 and 180.
Numeric values outside this range are converted to the equivalent value
(e.g. 270 is converted to -90).
Returns
-------
int|float
"""
return self["tickangle"]
@tickangle.setter
def tickangle(self, val):
self["tickangle"] = val
@property
def tickcolor(self):
"""
Sets the tick color.
The 'tickcolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color: see https://plotly.com/python/css-colors/ for a list
Returns
-------
str
"""
return self["tickcolor"]
@tickcolor.setter
def tickcolor(self, val):
self["tickcolor"] = val
@property
def tickfont(self):
"""
Sets the color bar's tick label font
The 'tickfont' property is an instance of Tickfont
that may be specified as:
- An instance of :class:`plotly.graph_objs.contour.colorbar.Tickfont`
- A dict of string/value properties that will be passed
to the Tickfont constructor
Returns
-------
plotly.graph_objs.contour.colorbar.Tickfont
"""
return self["tickfont"]
@tickfont.setter
def tickfont(self, val):
self["tickfont"] = val
@property
def tickformat(self):
"""
Sets the tick label formatting rule using d3 formatting mini-
languages which are very similar to those in Python. For
numbers, see:
https://github.com/d3/d3-format/tree/v1.4.5#d3-format. And for
dates see: https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format. We add two items to d3's date
formatter: "%h" for half of the year as a decimal number as
well as "%{n}f" for fractional seconds with n digits. For
example, *2016-10-13 09:15:23.456* with tickformat
"%H~%M~%S.%2f" would display "09~15~23.46"
The 'tickformat' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self["tickformat"]
@tickformat.setter
def tickformat(self, val):
self["tickformat"] = val
@property
def tickformatstops(self):
"""
The 'tickformatstops' property is a tuple of instances of
Tickformatstop that may be specified as:
- A list or tuple of instances of plotly.graph_objs.contour.colorbar.Tickformatstop
- A list or tuple of dicts of string/value properties that
will be passed to the Tickformatstop constructor
Returns
-------
tuple[plotly.graph_objs.contour.colorbar.Tickformatstop]
"""
return self["tickformatstops"]
@tickformatstops.setter
def tickformatstops(self, val):
self["tickformatstops"] = val
@property
def tickformatstopdefaults(self):
"""
When used in a template (as
layout.template.data.contour.colorbar.tickformatstopdefaults),
sets the default property values to use for elements of
contour.colorbar.tickformatstops
The 'tickformatstopdefaults' property is an instance of Tickformatstop
that may be specified as:
- An instance of :class:`plotly.graph_objs.contour.colorbar.Tickformatstop`
- A dict of string/value properties that will be passed
to the Tickformatstop constructor
Returns
-------
plotly.graph_objs.contour.colorbar.Tickformatstop
"""
return self["tickformatstopdefaults"]
@tickformatstopdefaults.setter
def tickformatstopdefaults(self, val):
self["tickformatstopdefaults"] = val
@property
def ticklabeloverflow(self):
"""
Determines how we handle tick labels that would overflow either
the graph div or the domain of the axis. The default value for
inside tick labels is *hide past domain*. In other cases the
default is *hide past div*.
The 'ticklabeloverflow' property is an enumeration that may be specified as:
- One of the following enumeration values:
['allow', 'hide past div', 'hide past domain']
Returns
-------
Any
"""
return self["ticklabeloverflow"]
@ticklabeloverflow.setter
def ticklabeloverflow(self, val):
self["ticklabeloverflow"] = val
@property
def ticklabelposition(self):
"""
Determines where tick labels are drawn relative to the ticks.
Left and right options are used when `orientation` is "h", top
and bottom when `orientation` is "v".
The 'ticklabelposition' property is an enumeration that may be specified as:
- One of the following enumeration values:
['outside', 'inside', 'outside top', 'inside top',
'outside left', 'inside left', 'outside right', 'inside
right', 'outside bottom', 'inside bottom']
Returns
-------
Any
"""
return self["ticklabelposition"]
@ticklabelposition.setter
def ticklabelposition(self, val):
self["ticklabelposition"] = val
@property
def ticklabelstep(self):
"""
Sets the spacing between tick labels as compared to the spacing
between ticks. A value of 1 (default) means each tick gets a
label. A value of 2 means shows every 2nd label. A larger value
n means only every nth tick is labeled. `tick0` determines
which labels are shown. Not implemented for axes with `type`
"log" or "multicategory", or when `tickmode` is "array".
The 'ticklabelstep' property is a integer and may be specified as:
- An int (or float that will be cast to an int)
in the interval [1, 9223372036854775807]
Returns
-------
int
"""
return self["ticklabelstep"]
@ticklabelstep.setter
def ticklabelstep(self, val):
self["ticklabelstep"] = val
@property
def ticklen(self):
"""
Sets the tick length (in px).
The 'ticklen' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["ticklen"]
@ticklen.setter
def ticklen(self, val):
self["ticklen"] = val
@property
def tickmode(self):
"""
Sets the tick mode for this axis. If "auto", the number of
ticks is set via `nticks`. If "linear", the placement of the
ticks is determined by a starting position `tick0` and a tick
step `dtick` ("linear" is the default value if `tick0` and
`dtick` are provided). If "array", the placement of the ticks
is set via `tickvals` and the tick text is `ticktext`. ("array"
is the default value if `tickvals` is provided).
The 'tickmode' property is an enumeration that may be specified as:
- One of the following enumeration values:
['auto', 'linear', 'array']
Returns
-------
Any
"""
return self["tickmode"]
@tickmode.setter
def tickmode(self, val):
self["tickmode"] = val
@property
def tickprefix(self):
"""
Sets a tick label prefix.
The 'tickprefix' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self["tickprefix"]
@tickprefix.setter
def tickprefix(self, val):
self["tickprefix"] = val
@property
def ticks(self):
"""
Determines whether ticks are drawn or not. If "", this axis'
ticks are not drawn. If "outside" ("inside"), this axis' are
drawn outside (inside) the axis lines.
The 'ticks' property is an enumeration that may be specified as:
- One of the following enumeration values:
['outside', 'inside', '']
Returns
-------
Any
"""
return self["ticks"]
@ticks.setter
def ticks(self, val):
self["ticks"] = val
@property
def ticksuffix(self):
"""
Sets a tick label suffix.
The 'ticksuffix' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self["ticksuffix"]
@ticksuffix.setter
def ticksuffix(self, val):
self["ticksuffix"] = val
@property
def ticktext(self):
"""
Sets the text displayed at the ticks position via `tickvals`.
Only has an effect if `tickmode` is set to "array". Used with
`tickvals`.
The 'ticktext' property is an array that may be specified as a tuple,
list, numpy array, or pandas Series
Returns
-------
numpy.ndarray
"""
return self["ticktext"]
@ticktext.setter
def ticktext(self, val):
self["ticktext"] = val
@property
def ticktextsrc(self):
"""
Sets the source reference on Chart Studio Cloud for `ticktext`.
The 'ticktextsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["ticktextsrc"]
@ticktextsrc.setter
def ticktextsrc(self, val):
self["ticktextsrc"] = val
@property
def tickvals(self):
"""
Sets the values at which ticks on this axis appear. Only has an
effect if `tickmode` is set to "array". Used with `ticktext`.
The 'tickvals' property is an array that may be specified as a tuple,
list, numpy array, or pandas Series
Returns
-------
numpy.ndarray
"""
return self["tickvals"]
@tickvals.setter
def tickvals(self, val):
self["tickvals"] = val
@property
def tickvalssrc(self):
"""
Sets the source reference on Chart Studio Cloud for `tickvals`.
The 'tickvalssrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["tickvalssrc"]
@tickvalssrc.setter
def tickvalssrc(self, val):
self["tickvalssrc"] = val
@property
def tickwidth(self):
"""
Sets the tick width (in px).
The 'tickwidth' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["tickwidth"]
@tickwidth.setter
def tickwidth(self, val):
self["tickwidth"] = val
@property
def title(self):
"""
The 'title' property is an instance of Title
that may be specified as:
- An instance of :class:`plotly.graph_objs.contour.colorbar.Title`
- A dict of string/value properties that will be passed
to the Title constructor
Returns
-------
plotly.graph_objs.contour.colorbar.Title
"""
return self["title"]
@title.setter
def title(self, val):
self["title"] = val
@property
def x(self):
"""
Sets the x position with respect to `xref` of the color bar (in
plot fraction). When `xref` is "paper", defaults to 1.02 when
`orientation` is "v" and 0.5 when `orientation` is "h". When
`xref` is "container", defaults to 1 when `orientation` is "v"
and 0.5 when `orientation` is "h". Must be between 0 and 1 if
`xref` is "container" and between "-2" and 3 if `xref` is
"paper".
The 'x' property is a number and may be specified as:
- An int or float
Returns
-------
int|float
"""
return self["x"]
@x.setter
def x(self, val):
self["x"] = val
@property
def xanchor(self):
"""
Sets this color bar's horizontal position anchor. This anchor
binds the `x` position to the "left", "center" or "right" of
the color bar. Defaults to "left" when `orientation` is "v" and
"center" when `orientation` is "h".
The 'xanchor' property is an enumeration that may be specified as:
- One of the following enumeration values:
['left', 'center', 'right']
Returns
-------
Any
"""
return self["xanchor"]
@xanchor.setter
def xanchor(self, val):
self["xanchor"] = val
@property
def xpad(self):
"""
Sets the amount of padding (in px) along the x direction.
The 'xpad' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["xpad"]
@xpad.setter
def xpad(self, val):
self["xpad"] = val
@property
def xref(self):
"""
Sets the container `x` refers to. "container" spans the entire
`width` of the plot. "paper" refers to the width of the
plotting area only.
The 'xref' property is an enumeration that may be specified as:
- One of the following enumeration values:
['container', 'paper']
Returns
-------
Any
"""
return self["xref"]
@xref.setter
def xref(self, val):
self["xref"] = val
@property
def y(self):
"""
Sets the y position with respect to `yref` of the color bar (in
plot fraction). When `yref` is "paper", defaults to 0.5 when
`orientation` is "v" and 1.02 when `orientation` is "h". When
`yref` is "container", defaults to 0.5 when `orientation` is
"v" and 1 when `orientation` is "h". Must be between 0 and 1 if
`yref` is "container" and between "-2" and 3 if `yref` is
"paper".
The 'y' property is a number and may be specified as:
- An int or float
Returns
-------
int|float
"""
return self["y"]
@y.setter
def y(self, val):
self["y"] = val
@property
def yanchor(self):
"""
Sets this color bar's vertical position anchor This anchor
binds the `y` position to the "top", "middle" or "bottom" of
the color bar. Defaults to "middle" when `orientation` is "v"
and "bottom" when `orientation` is "h".
The 'yanchor' property is an enumeration that may be specified as:
- One of the following enumeration values:
['top', 'middle', 'bottom']
Returns
-------
Any
"""
return self["yanchor"]
@yanchor.setter
def yanchor(self, val):
self["yanchor"] = val
@property
def ypad(self):
"""
Sets the amount of padding (in px) along the y direction.
The 'ypad' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["ypad"]
@ypad.setter
def ypad(self, val):
self["ypad"] = val
@property
def yref(self):
"""
Sets the container `y` refers to. "container" spans the entire
`height` of the plot. "paper" refers to the height of the
plotting area only.
The 'yref' property is an enumeration that may be specified as:
- One of the following enumeration values:
['container', 'paper']
Returns
-------
Any
"""
return self["yref"]
@yref.setter
def yref(self, val):
self["yref"] = val
@property
def _prop_descriptions(self):
return """\
bgcolor
Sets the color of padded area.
bordercolor
Sets the axis line color.
borderwidth
Sets the width (in px) or the border enclosing this
color bar.
dtick
Sets the step in-between ticks on this axis. Use with
`tick0`. Must be a positive number, or special strings
available to "log" and "date" axes. If the axis `type`
is "log", then ticks are set every 10^(n*dtick) where n
is the tick number. For example, to set a tick mark at
1, 10, 100, 1000, ... set dtick to 1. To set tick marks
at 1, 100, 10000, ... set dtick to 2. To set tick marks
at 1, 5, 25, 125, 625, 3125, ... set dtick to
log_10(5), or 0.69897000433. "log" has several special
values; "L<f>", where `f` is a positive number, gives
ticks linearly spaced in value (but not position). For
example `tick0` = 0.1, `dtick` = "L0.5" will put ticks
at 0.1, 0.6, 1.1, 1.6 etc. To show powers of 10 plus
small digits between, use "D1" (all digits) or "D2"
(only 2 and 5). `tick0` is ignored for "D1" and "D2".
If the axis `type` is "date", then you must convert the
time to milliseconds. For example, to set the interval
between ticks to one day, set `dtick` to 86400000.0.
"date" also has special values "M<n>" gives ticks
spaced by a number of months. `n` must be a positive
integer. To set ticks on the 15th of every third month,
set `tick0` to "2000-01-15" and `dtick` to "M3". To set
ticks every 4 years, set `dtick` to "M48"
exponentformat
Determines a formatting rule for the tick exponents.
For example, consider the number 1,000,000,000. If
"none", it appears as 1,000,000,000. If "e", 1e+9. If
"E", 1E+9. If "power", 1x10^9 (with 9 in a super
script). If "SI", 1G. If "B", 1B. "SI" uses prefixes
from "femto" f (10^-15) to "tera" T (10^12). *SI
extended* covers instead the full SI range from
"quecto" q (10^-30) to "quetta" Q (10^30). If "SI" or
*SI extended* is used and the exponent is beyond the
above ranges, the formatting rule will automatically be
switched to the power notation.
labelalias
Replacement text for specific tick or hover labels. For
example using {US: 'USA', CA: 'Canada'} changes US to
USA and CA to Canada. The labels we would have shown
must match the keys exactly, after adding any
tickprefix or ticksuffix. For negative numbers the
minus sign symbol used (U+2212) is wider than the
regular ascii dash. That means you need to use −1
instead of -1. labelalias can be used with any axis
type, and both keys (if needed) and values (if desired)
can include html-like tags or MathJax.
len
Sets the length of the color bar This measure excludes
the padding of both ends. That is, the color bar length
is this length minus the padding on both ends.
lenmode
Determines whether this color bar's length (i.e. the
measure in the color variation direction) is set in
units of plot "fraction" or in *pixels. Use `len` to
set the value.
minexponent
Hide SI prefix for 10^n if |n| is below this number.
This only has an effect when `tickformat` is "SI" or
"B".
nticks
Specifies the maximum number of ticks for the
particular axis. The actual number of ticks will be
chosen automatically to be less than or equal to
`nticks`. Has an effect only if `tickmode` is set to
"auto".
orientation
Sets the orientation of the colorbar.
outlinecolor
Sets the axis line color.
outlinewidth
Sets the width (in px) of the axis line.
separatethousands
If "true", even 4-digit integers are separated
showexponent
If "all", all exponents are shown besides their
significands. If "first", only the exponent of the
first tick is shown. If "last", only the exponent of
the last tick is shown. If "none", no exponents appear.
showticklabels
Determines whether or not the tick labels are drawn.
showtickprefix
If "all", all tick labels are displayed with a prefix.
If "first", only the first tick is displayed with a
prefix. If "last", only the last tick is displayed with
a suffix. If "none", tick prefixes are hidden.
showticksuffix
Same as `showtickprefix` but for tick suffixes.
thickness
Sets the thickness of the color bar This measure
excludes the size of the padding, ticks and labels.
thicknessmode
Determines whether this color bar's thickness (i.e. the
measure in the constant color direction) is set in
units of plot "fraction" or in "pixels". Use
`thickness` to set the value.
tick0
Sets the placement of the first tick on this axis. Use
with `dtick`. If the axis `type` is "log", then you
must take the log of your starting tick (e.g. to set
the starting tick to 100, set the `tick0` to 2) except
when `dtick`=*L<f>* (see `dtick` for more info). If the
axis `type` is "date", it should be a date string, like
date data. If the axis `type` is "category", it should
be a number, using the scale where each category is
assigned a serial number from zero in the order it
appears.
tickangle
Sets the angle of the tick labels with respect to the
horizontal. For example, a `tickangle` of -90 draws the
tick labels vertically.
tickcolor
Sets the tick color.
tickfont
Sets the color bar's tick label font
tickformat
Sets the tick label formatting rule using d3 formatting
mini-languages which are very similar to those in
Python. For numbers, see:
https://github.com/d3/d3-format/tree/v1.4.5#d3-format.
And for dates see: https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format. We add two items to
d3's date formatter: "%h" for half of the year as a
decimal number as well as "%{n}f" for fractional
seconds with n digits. For example, *2016-10-13
09:15:23.456* with tickformat "%H~%M~%S.%2f" would
display "09~15~23.46"
tickformatstops
A tuple of :class:`plotly.graph_objects.contour.colorba
r.Tickformatstop` instances or dicts with compatible
properties
tickformatstopdefaults
When used in a template (as layout.template.data.contou
r.colorbar.tickformatstopdefaults), sets the default
property values to use for elements of
contour.colorbar.tickformatstops
ticklabeloverflow
Determines how we handle tick labels that would
overflow either the graph div or the domain of the
axis. The default value for inside tick labels is *hide
past domain*. In other cases the default is *hide past
div*.
ticklabelposition
Determines where tick labels are drawn relative to the
ticks. Left and right options are used when
`orientation` is "h", top and bottom when `orientation`
is "v".
ticklabelstep
Sets the spacing between tick labels as compared to the
spacing between ticks. A value of 1 (default) means
each tick gets a label. A value of 2 means shows every
2nd label. A larger value n means only every nth tick
is labeled. `tick0` determines which labels are shown.
Not implemented for axes with `type` "log" or
"multicategory", or when `tickmode` is "array".
ticklen
Sets the tick length (in px).
tickmode
Sets the tick mode for this axis. If "auto", the number
of ticks is set via `nticks`. If "linear", the
placement of the ticks is determined by a starting
position `tick0` and a tick step `dtick` ("linear" is
the default value if `tick0` and `dtick` are provided).
If "array", the placement of the ticks is set via
`tickvals` and the tick text is `ticktext`. ("array" is
the default value if `tickvals` is provided).
tickprefix
Sets a tick label prefix.
ticks
Determines whether ticks are drawn or not. If "", this
axis' ticks are not drawn. If "outside" ("inside"),
this axis' are drawn outside (inside) the axis lines.
ticksuffix
Sets a tick label suffix.
ticktext
Sets the text displayed at the ticks position via
`tickvals`. Only has an effect if `tickmode` is set to
"array". Used with `tickvals`.
ticktextsrc
Sets the source reference on Chart Studio Cloud for
`ticktext`.
tickvals
Sets the values at which ticks on this axis appear.
Only has an effect if `tickmode` is set to "array".
Used with `ticktext`.
tickvalssrc
Sets the source reference on Chart Studio Cloud for
`tickvals`.
tickwidth
Sets the tick width (in px).
title
:class:`plotly.graph_objects.contour.colorbar.Title`
instance or dict with compatible properties
x
Sets the x position with respect to `xref` of the color
bar (in plot fraction). When `xref` is "paper",
defaults to 1.02 when `orientation` is "v" and 0.5 when
`orientation` is "h". When `xref` is "container",
defaults to 1 when `orientation` is "v" and 0.5 when
`orientation` is "h". Must be between 0 and 1 if `xref`
is "container" and between "-2" and 3 if `xref` is
"paper".
xanchor
Sets this color bar's horizontal position anchor. This
anchor binds the `x` position to the "left", "center"
or "right" of the color bar. Defaults to "left" when
`orientation` is "v" and "center" when `orientation` is
"h".
xpad
Sets the amount of padding (in px) along the x
direction.
xref
Sets the container `x` refers to. "container" spans the
entire `width` of the plot. "paper" refers to the width
of the plotting area only.
y
Sets the y position with respect to `yref` of the color
bar (in plot fraction). When `yref` is "paper",
defaults to 0.5 when `orientation` is "v" and 1.02 when
`orientation` is "h". When `yref` is "container",
defaults to 0.5 when `orientation` is "v" and 1 when
`orientation` is "h". Must be between 0 and 1 if `yref`
is "container" and between "-2" and 3 if `yref` is
"paper".
yanchor
Sets this color bar's vertical position anchor This
anchor binds the `y` position to the "top", "middle" or
"bottom" of the color bar. Defaults to "middle" when
`orientation` is "v" and "bottom" when `orientation` is
"h".
ypad
Sets the amount of padding (in px) along the y
direction.
yref
Sets the container `y` refers to. "container" spans the
entire `height` of the plot. "paper" refers to the
height of the plotting area only.
"""
def __init__(
self,
arg=None,
bgcolor=None,
bordercolor=None,
borderwidth=None,
dtick=None,
exponentformat=None,
labelalias=None,
len=None,
lenmode=None,
minexponent=None,
nticks=None,
orientation=None,
outlinecolor=None,
outlinewidth=None,
separatethousands=None,
showexponent=None,
showticklabels=None,
showtickprefix=None,
showticksuffix=None,
thickness=None,
thicknessmode=None,
tick0=None,
tickangle=None,
tickcolor=None,
tickfont=None,
tickformat=None,
tickformatstops=None,
tickformatstopdefaults=None,
ticklabeloverflow=None,
ticklabelposition=None,
ticklabelstep=None,
ticklen=None,
tickmode=None,
tickprefix=None,
ticks=None,
ticksuffix=None,
ticktext=None,
ticktextsrc=None,
tickvals=None,
tickvalssrc=None,
tickwidth=None,
title=None,
x=None,
xanchor=None,
xpad=None,
xref=None,
y=None,
yanchor=None,
ypad=None,
yref=None,
**kwargs,
):
"""
Construct a new ColorBar object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of
:class:`plotly.graph_objs.contour.ColorBar`
bgcolor
Sets the color of padded area.
bordercolor
Sets the axis line color.
borderwidth
Sets the width (in px) or the border enclosing this
color bar.
dtick
Sets the step in-between ticks on this axis. Use with
`tick0`. Must be a positive number, or special strings
available to "log" and "date" axes. If the axis `type`
is "log", then ticks are set every 10^(n*dtick) where n
is the tick number. For example, to set a tick mark at
1, 10, 100, 1000, ... set dtick to 1. To set tick marks
at 1, 100, 10000, ... set dtick to 2. To set tick marks
at 1, 5, 25, 125, 625, 3125, ... set dtick to
log_10(5), or 0.69897000433. "log" has several special
values; "L<f>", where `f` is a positive number, gives
ticks linearly spaced in value (but not position). For
example `tick0` = 0.1, `dtick` = "L0.5" will put ticks
at 0.1, 0.6, 1.1, 1.6 etc. To show powers of 10 plus
small digits between, use "D1" (all digits) or "D2"
(only 2 and 5). `tick0` is ignored for "D1" and "D2".
If the axis `type` is "date", then you must convert the
time to milliseconds. For example, to set the interval
between ticks to one day, set `dtick` to 86400000.0.
"date" also has special values "M<n>" gives ticks
spaced by a number of months. `n` must be a positive
integer. To set ticks on the 15th of every third month,
set `tick0` to "2000-01-15" and `dtick` to "M3". To set
ticks every 4 years, set `dtick` to "M48"
exponentformat
Determines a formatting rule for the tick exponents.
For example, consider the number 1,000,000,000. If
"none", it appears as 1,000,000,000. If "e", 1e+9. If
"E", 1E+9. If "power", 1x10^9 (with 9 in a super
script). If "SI", 1G. If "B", 1B. "SI" uses prefixes
from "femto" f (10^-15) to "tera" T (10^12). *SI
extended* covers instead the full SI range from
"quecto" q (10^-30) to "quetta" Q (10^30). If "SI" or
*SI extended* is used and the exponent is beyond the
above ranges, the formatting rule will automatically be
switched to the power notation.
labelalias
Replacement text for specific tick or hover labels. For
example using {US: 'USA', CA: 'Canada'} changes US to
USA and CA to Canada. The labels we would have shown
must match the keys exactly, after adding any
tickprefix or ticksuffix. For negative numbers the
minus sign symbol used (U+2212) is wider than the
regular ascii dash. That means you need to use −1
instead of -1. labelalias can be used with any axis
type, and both keys (if needed) and values (if desired)
can include html-like tags or MathJax.
len
Sets the length of the color bar This measure excludes
the padding of both ends. That is, the color bar length
is this length minus the padding on both ends.
lenmode
Determines whether this color bar's length (i.e. the
measure in the color variation direction) is set in
units of plot "fraction" or in *pixels. Use `len` to
set the value.
minexponent
Hide SI prefix for 10^n if |n| is below this number.
This only has an effect when `tickformat` is "SI" or
"B".
nticks
Specifies the maximum number of ticks for the
particular axis. The actual number of ticks will be
chosen automatically to be less than or equal to
`nticks`. Has an effect only if `tickmode` is set to
"auto".
orientation
Sets the orientation of the colorbar.
outlinecolor
Sets the axis line color.
outlinewidth
Sets the width (in px) of the axis line.
separatethousands
If "true", even 4-digit integers are separated
showexponent
If "all", all exponents are shown besides their
significands. If "first", only the exponent of the
first tick is shown. If "last", only the exponent of
the last tick is shown. If "none", no exponents appear.
showticklabels
Determines whether or not the tick labels are drawn.
showtickprefix
If "all", all tick labels are displayed with a prefix.
If "first", only the first tick is displayed with a
prefix. If "last", only the last tick is displayed with
a suffix. If "none", tick prefixes are hidden.
showticksuffix
Same as `showtickprefix` but for tick suffixes.
thickness
Sets the thickness of the color bar This measure
excludes the size of the padding, ticks and labels.
thicknessmode
Determines whether this color bar's thickness (i.e. the
measure in the constant color direction) is set in
units of plot "fraction" or in "pixels". Use
`thickness` to set the value.
tick0
Sets the placement of the first tick on this axis. Use
with `dtick`. If the axis `type` is "log", then you
must take the log of your starting tick (e.g. to set
the starting tick to 100, set the `tick0` to 2) except
when `dtick`=*L<f>* (see `dtick` for more info). If the
axis `type` is "date", it should be a date string, like
date data. If the axis `type` is "category", it should
be a number, using the scale where each category is
assigned a serial number from zero in the order it
appears.
tickangle
Sets the angle of the tick labels with respect to the
horizontal. For example, a `tickangle` of -90 draws the
tick labels vertically.
tickcolor
Sets the tick color.
tickfont
Sets the color bar's tick label font
tickformat
Sets the tick label formatting rule using d3 formatting
mini-languages which are very similar to those in
Python. For numbers, see:
https://github.com/d3/d3-format/tree/v1.4.5#d3-format.
And for dates see: https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format. We add two items to
d3's date formatter: "%h" for half of the year as a
decimal number as well as "%{n}f" for fractional
seconds with n digits. For example, *2016-10-13
09:15:23.456* with tickformat "%H~%M~%S.%2f" would
display "09~15~23.46"
tickformatstops
A tuple of :class:`plotly.graph_objects.contour.colorba
r.Tickformatstop` instances or dicts with compatible
properties
tickformatstopdefaults
When used in a template (as layout.template.data.contou
r.colorbar.tickformatstopdefaults), sets the default
property values to use for elements of
contour.colorbar.tickformatstops
ticklabeloverflow
Determines how we handle tick labels that would
overflow either the graph div or the domain of the
axis. The default value for inside tick labels is *hide
past domain*. In other cases the default is *hide past
div*.
ticklabelposition
Determines where tick labels are drawn relative to the
ticks. Left and right options are used when
`orientation` is "h", top and bottom when `orientation`
is "v".
ticklabelstep
Sets the spacing between tick labels as compared to the
spacing between ticks. A value of 1 (default) means
each tick gets a label. A value of 2 means shows every
2nd label. A larger value n means only every nth tick
is labeled. `tick0` determines which labels are shown.
Not implemented for axes with `type` "log" or
"multicategory", or when `tickmode` is "array".
ticklen
Sets the tick length (in px).
tickmode
Sets the tick mode for this axis. If "auto", the number
of ticks is set via `nticks`. If "linear", the
placement of the ticks is determined by a starting
position `tick0` and a tick step `dtick` ("linear" is
the default value if `tick0` and `dtick` are provided).
If "array", the placement of the ticks is set via
`tickvals` and the tick text is `ticktext`. ("array" is
the default value if `tickvals` is provided).
tickprefix
Sets a tick label prefix.
ticks
Determines whether ticks are drawn or not. If "", this
axis' ticks are not drawn. If "outside" ("inside"),
this axis' are drawn outside (inside) the axis lines.
ticksuffix
Sets a tick label suffix.
ticktext
Sets the text displayed at the ticks position via
`tickvals`. Only has an effect if `tickmode` is set to
"array". Used with `tickvals`.
ticktextsrc
Sets the source reference on Chart Studio Cloud for
`ticktext`.
tickvals
Sets the values at which ticks on this axis appear.
Only has an effect if `tickmode` is set to "array".
Used with `ticktext`.
tickvalssrc
Sets the source reference on Chart Studio Cloud for
`tickvals`.
tickwidth
Sets the tick width (in px).
title
:class:`plotly.graph_objects.contour.colorbar.Title`
instance or dict with compatible properties
x
Sets the x position with respect to `xref` of the color
bar (in plot fraction). When `xref` is "paper",
defaults to 1.02 when `orientation` is "v" and 0.5 when
`orientation` is "h". When `xref` is "container",
defaults to 1 when `orientation` is "v" and 0.5 when
`orientation` is "h". Must be between 0 and 1 if `xref`
is "container" and between "-2" and 3 if `xref` is
"paper".
xanchor
Sets this color bar's horizontal position anchor. This
anchor binds the `x` position to the "left", "center"
or "right" of the color bar. Defaults to "left" when
`orientation` is "v" and "center" when `orientation` is
"h".
xpad
Sets the amount of padding (in px) along the x
direction.
xref
Sets the container `x` refers to. "container" spans the
entire `width` of the plot. "paper" refers to the width
of the plotting area only.
y
Sets the y position with respect to `yref` of the color
bar (in plot fraction). When `yref` is "paper",
defaults to 0.5 when `orientation` is "v" and 1.02 when
`orientation` is "h". When `yref` is "container",
defaults to 0.5 when `orientation` is "v" and 1 when
`orientation` is "h". Must be between 0 and 1 if `yref`
is "container" and between "-2" and 3 if `yref` is
"paper".
yanchor
Sets this color bar's vertical position anchor This
anchor binds the `y` position to the "top", "middle" or
"bottom" of the color bar. Defaults to "middle" when
`orientation` is "v" and "bottom" when `orientation` is
"h".
ypad
Sets the amount of padding (in px) along the y
direction.
yref
Sets the container `y` refers to. "container" spans the
entire `height` of the plot. "paper" refers to the
height of the plotting area only.
Returns
-------
ColorBar
"""
super().__init__("colorbar")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError("""\
The first argument to the plotly.graph_objs.contour.ColorBar
constructor must be a dict or
an instance of :class:`plotly.graph_objs.contour.ColorBar`""")
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
self._set_property("bgcolor", arg, bgcolor)
self._set_property("bordercolor", arg, bordercolor)
self._set_property("borderwidth", arg, borderwidth)
self._set_property("dtick", arg, dtick)
self._set_property("exponentformat", arg, exponentformat)
self._set_property("labelalias", arg, labelalias)
self._set_property("len", arg, len)
self._set_property("lenmode", arg, lenmode)
self._set_property("minexponent", arg, minexponent)
self._set_property("nticks", arg, nticks)
self._set_property("orientation", arg, orientation)
self._set_property("outlinecolor", arg, outlinecolor)
self._set_property("outlinewidth", arg, outlinewidth)
self._set_property("separatethousands", arg, separatethousands)
self._set_property("showexponent", arg, showexponent)
self._set_property("showticklabels", arg, showticklabels)
self._set_property("showtickprefix", arg, showtickprefix)
self._set_property("showticksuffix", arg, showticksuffix)
self._set_property("thickness", arg, thickness)
self._set_property("thicknessmode", arg, thicknessmode)
self._set_property("tick0", arg, tick0)
self._set_property("tickangle", arg, tickangle)
self._set_property("tickcolor", arg, tickcolor)
self._set_property("tickfont", arg, tickfont)
self._set_property("tickformat", arg, tickformat)
self._set_property("tickformatstops", arg, tickformatstops)
self._set_property("tickformatstopdefaults", arg, tickformatstopdefaults)
self._set_property("ticklabeloverflow", arg, ticklabeloverflow)
self._set_property("ticklabelposition", arg, ticklabelposition)
self._set_property("ticklabelstep", arg, ticklabelstep)
self._set_property("ticklen", arg, ticklen)
self._set_property("tickmode", arg, tickmode)
self._set_property("tickprefix", arg, tickprefix)
self._set_property("ticks", arg, ticks)
self._set_property("ticksuffix", arg, ticksuffix)
self._set_property("ticktext", arg, ticktext)
self._set_property("ticktextsrc", arg, ticktextsrc)
self._set_property("tickvals", arg, tickvals)
self._set_property("tickvalssrc", arg, tickvalssrc)
self._set_property("tickwidth", arg, tickwidth)
self._set_property("title", arg, title)
self._set_property("x", arg, x)
self._set_property("xanchor", arg, xanchor)
self._set_property("xpad", arg, xpad)
self._set_property("xref", arg, xref)
self._set_property("y", arg, y)
self._set_property("yanchor", arg, yanchor)
self._set_property("ypad", arg, ypad)
self._set_property("yref", arg, yref)
self._process_kwargs(**dict(arg, **kwargs))
self._skip_invalid = False
| ColorBar |
python | sympy__sympy | sympy/physics/biomechanics/curve.py | {
"start": 10350,
"end": 17874
} | class ____(CharacteristicCurveFunction):
r"""Inverse tendon force-length curve based on De Groote et al., 2016 [1]_.
Explanation
===========
Gives the normalized tendon length that produces a specific normalized
tendon force.
The function is defined by the equation:
${fl^T}^{-1} = frac{\log{\frac{fl^T + c_2}{c_0}}}{c_3} + c_1$
with constant values of $c_0 = 0.2$, $c_1 = 0.995$, $c_2 = 0.25$, and
$c_3 = 33.93669377311689$. This function is the exact analytical inverse
of the related tendon force-length curve ``TendonForceLengthDeGroote2016``.
While it is possible to change the constant values, these were carefully
selected in the original publication to give the characteristic curve
specific and required properties. For example, the function produces no
force when the tendon is in an unstrained state. It also produces a force
of 1 normalized unit when the tendon is under a 5% strain.
Examples
========
The preferred way to instantiate :class:`TendonForceLengthInverseDeGroote2016` is
using the :meth:`~.with_defaults` constructor because this will automatically
populate the constants within the characteristic curve equation with the
floating point values from the original publication. This constructor takes
a single argument corresponding to normalized tendon force-length, which is
equal to the tendon force. We'll create a :class:`~.Symbol` called ``fl_T`` to
represent this.
>>> from sympy import Symbol
>>> from sympy.physics.biomechanics import TendonForceLengthInverseDeGroote2016
>>> fl_T = Symbol('fl_T')
>>> l_T_tilde = TendonForceLengthInverseDeGroote2016.with_defaults(fl_T)
>>> l_T_tilde
TendonForceLengthInverseDeGroote2016(fl_T, 0.2, 0.995, 0.25,
33.93669377311689)
It's also possible to populate the four constants with your own values too.
>>> from sympy import symbols
>>> c0, c1, c2, c3 = symbols('c0 c1 c2 c3')
>>> l_T_tilde = TendonForceLengthInverseDeGroote2016(fl_T, c0, c1, c2, c3)
>>> l_T_tilde
TendonForceLengthInverseDeGroote2016(fl_T, c0, c1, c2, c3)
To inspect the actual symbolic expression that this function represents,
we can call the :meth:`~.doit` method on an instance. We'll use the keyword
argument ``evaluate=False`` as this will keep the expression in its
canonical form and won't simplify any constants.
>>> l_T_tilde.doit(evaluate=False)
c1 + log((c2 + fl_T)/c0)/c3
The function can also be differentiated. We'll differentiate with respect
to l_T using the ``diff`` method on an instance with the single positional
argument ``l_T``.
>>> l_T_tilde.diff(fl_T)
1/(c3*(c2 + fl_T))
References
==========
.. [1] De Groote, F., Kinney, A. L., Rao, A. V., & Fregly, B. J., Evaluation
of direct collocation optimal control problem formulations for
solving the muscle redundancy problem, Annals of biomedical
engineering, 44(10), (2016) pp. 2922-2936
"""
@classmethod
def with_defaults(cls, fl_T):
r"""Recommended constructor that will use the published constants.
Explanation
===========
Returns a new instance of the inverse tendon force-length function
using the four constant values specified in the original publication.
These have the values:
$c_0 = 0.2$
$c_1 = 0.995$
$c_2 = 0.25$
$c_3 = 33.93669377311689$
Parameters
==========
fl_T : Any (sympifiable)
Normalized tendon force as a function of tendon length.
"""
c0 = Float('0.2')
c1 = Float('0.995')
c2 = Float('0.25')
c3 = Float('33.93669377311689')
return cls(fl_T, c0, c1, c2, c3)
@classmethod
def eval(cls, fl_T, c0, c1, c2, c3):
"""Evaluation of basic inputs.
Parameters
==========
fl_T : Any (sympifiable)
Normalized tendon force as a function of tendon length.
c0 : Any (sympifiable)
The first constant in the characteristic equation. The published
value is ``0.2``.
c1 : Any (sympifiable)
The second constant in the characteristic equation. The published
value is ``0.995``.
c2 : Any (sympifiable)
The third constant in the characteristic equation. The published
value is ``0.25``.
c3 : Any (sympifiable)
The fourth constant in the characteristic equation. The published
value is ``33.93669377311689``.
"""
pass
def _eval_evalf(self, prec):
"""Evaluate the expression numerically using ``evalf``."""
return self.doit(deep=False, evaluate=False)._eval_evalf(prec)
def doit(self, deep=True, evaluate=True, **hints):
"""Evaluate the expression defining the function.
Parameters
==========
deep : bool
Whether ``doit`` should be recursively called. Default is ``True``.
evaluate : bool.
Whether the SymPy expression should be evaluated as it is
constructed. If ``False``, then no constant folding will be
conducted which will leave the expression in a more numerically-
stable for values of ``l_T_tilde`` that correspond to a sensible
operating range for a musculotendon. Default is ``True``.
**kwargs : dict[str, Any]
Additional keyword argument pairs to be recursively passed to
``doit``.
"""
fl_T, *constants = self.args
if deep:
hints['evaluate'] = evaluate
fl_T = fl_T.doit(deep=deep, **hints)
c0, c1, c2, c3 = [c.doit(deep=deep, **hints) for c in constants]
else:
c0, c1, c2, c3 = constants
if evaluate:
return log((fl_T + c2)/c0)/c3 + c1
return log(UnevaluatedExpr((fl_T + c2)/c0))/c3 + c1
def fdiff(self, argindex=1):
"""Derivative of the function with respect to a single argument.
Parameters
==========
argindex : int
The index of the function's arguments with respect to which the
derivative should be taken. Argument indexes start at ``1``.
Default is ``1``.
"""
fl_T, c0, c1, c2, c3 = self.args
if argindex == 1:
return 1/(c3*(fl_T + c2))
elif argindex == 2:
return -1/(c0*c3)
elif argindex == 3:
return Integer(1)
elif argindex == 4:
return 1/(c3*(fl_T + c2))
elif argindex == 5:
return -log(UnevaluatedExpr((fl_T + c2)/c0))/c3**2
raise ArgumentIndexError(self, argindex)
def inverse(self, argindex=1):
"""Inverse function.
Parameters
==========
argindex : int
Value to start indexing the arguments at. Default is ``1``.
"""
return TendonForceLengthDeGroote2016
def _latex(self, printer):
"""Print a LaTeX representation of the function defining the curve.
Parameters
==========
printer : Printer
The printer to be used to print the LaTeX string representation.
"""
fl_T = self.args[0]
_fl_T = printer._print(fl_T)
return r'\left( \operatorname{fl}^T \right)^{-1} \left( %s \right)' % _fl_T
| TendonForceLengthInverseDeGroote2016 |
python | apache__airflow | providers/amazon/tests/unit/amazon/aws/sensors/test_comprehend.py | {
"start": 3575,
"end": 7024
} | class ____:
SENSOR = ComprehendCreateDocumentClassifierCompletedSensor
DOCUMENT_CLASSIFIER_ARN = (
"arn:aws:comprehend:us-east-1:123456789012:document-classifier/insurance-classifier/version/v1"
)
EVALUATION_METRICS = {
"EvaluationMetrics": {
"Accuracy": 1,
"Precision": 1,
"Recall": 1,
"F1Score": 1,
"MicroPrecision": 1,
"MicroRecall": 1,
"MicroF1Score": 1,
"HammingLoss": 0,
}
}
def setup_method(self):
self.default_op_kwargs = dict(
task_id="test_create_document_classifier_sensor",
document_classifier_arn=self.DOCUMENT_CLASSIFIER_ARN,
fail_on_warnings=False,
poke_interval=5,
max_retries=1,
)
self.sensor = self.SENSOR(**self.default_op_kwargs, aws_conn_id=None)
def test_base_aws_op_attributes(self):
op = self.SENSOR(**self.default_op_kwargs)
assert op.hook.aws_conn_id == "aws_default"
assert op.hook._region_name is None
assert op.hook._verify is None
assert op.hook._config is None
op = self.SENSOR(
**self.default_op_kwargs,
aws_conn_id="aws-test-custom-conn",
region_name="eu-west-1",
verify=False,
botocore_config={"read_timeout": 42},
)
assert op.hook.aws_conn_id == "aws-test-custom-conn"
assert op.hook._region_name == "eu-west-1"
assert op.hook._verify is False
assert op.hook._config is not None
assert op.hook._config.read_timeout == 42
@pytest.mark.parametrize(
("state", "message", "output"),
[
pytest.param("TRAINED", "", "s3://test-output", id="training succeeded"),
pytest.param(
"TRAINED_WITH_WARNING",
"Unable to parse some documents. See details in the output S3 location",
"s3://test-output",
id="trained with warning",
),
],
)
@mock.patch.object(ComprehendHook, "conn")
def test_poke_success_state(self, mock_conn, state, message, output):
mock_conn.describe_document_classifier.return_value = {
"DocumentClassifierProperties": {
"Status": state,
"Message": message,
"OutputDataConfig": {"S3Uri": output},
"ClassifierMetadata": self.EVALUATION_METRICS,
}
}
assert self.sensor.poke({}) is True
@pytest.mark.parametrize("state", SENSOR.INTERMEDIATE_STATES)
@mock.patch.object(ComprehendHook, "conn")
def test_intermediate_state(self, mock_conn, state):
mock_conn.describe_document_classifier.return_value = {
"DocumentClassifierProperties": {"Status": state}
}
assert self.sensor.poke({}) is False
@pytest.mark.parametrize("state", SENSOR.FAILURE_STATES)
@mock.patch.object(ComprehendHook, "conn")
def test_poke_failure_states(self, mock_conn, state):
mock_conn.describe_document_classifier.return_value = {
"DocumentClassifierProperties": {"Status": state}
}
sensor = self.SENSOR(**self.default_op_kwargs, aws_conn_id=None)
with pytest.raises(AirflowException, match=sensor.FAILURE_MESSAGE):
sensor.poke({})
| TestComprehendCreateDocumentClassifierCompletedSensor |
python | PyCQA__pylint | tests/functional/u/use/use_implicit_booleaness_not_comparison.py | {
"start": 2005,
"end": 2063
} | class ____:
def __init__(self):
self.a = 2
| NoBool |
python | weaviate__weaviate-python-client | weaviate/collections/batch/base.py | {
"start": 5764,
"end": 5834
} | class ____:
requests_per_minute: int
@dataclass
| _RateLimitedBatching |
python | spack__spack | lib/spack/spack/reporters/base.py | {
"start": 156,
"end": 649
} | class ____:
"""Base class for report writers."""
def build_report(self, filename: str, specs: List[Dict[str, Any]]):
raise NotImplementedError("must be implemented by derived classes")
def test_report(self, filename: str, specs: List[Dict[str, Any]]):
raise NotImplementedError("must be implemented by derived classes")
def concretization_report(self, filename: str, msg: str):
raise NotImplementedError("must be implemented by derived classes")
| Reporter |
python | jschneier__django-storages | storages/backends/apache_libcloud.py | {
"start": 667,
"end": 6086
} | class ____(Storage):
"""Django storage derived class using apache libcloud to operate
on supported providers"""
def __init__(self, provider_name=None, option=None):
if provider_name is None:
provider_name = getattr(settings, "DEFAULT_LIBCLOUD_PROVIDER", "default")
self.provider = settings.LIBCLOUD_PROVIDERS.get(provider_name)
if not self.provider:
raise ImproperlyConfigured(
"LIBCLOUD_PROVIDERS %s not defined or invalid" % provider_name
)
extra_kwargs = {}
if "region" in self.provider:
extra_kwargs["region"] = self.provider["region"]
# Used by the GoogleStorageDriver
if "project" in self.provider:
extra_kwargs["project"] = self.provider["project"]
try:
provider_type = self.provider["type"]
if isinstance(provider_type, str):
module_path, tag = provider_type.rsplit(".", 1)
if module_path != "libcloud.storage.types.Provider":
raise ValueError("Invalid module path")
provider_type = getattr(Provider, tag)
Driver = get_driver(provider_type)
self.driver = Driver(
self.provider["user"], self.provider["key"], **extra_kwargs
)
except Exception as e:
raise ImproperlyConfigured(
"Unable to create libcloud driver type %s: %s"
% (self.provider.get("type"), e)
)
self.bucket = self.provider["bucket"] # Limit to one container
def _get_bucket(self):
"""Helper to get bucket object (libcloud container)"""
return self.driver.get_container(self.bucket)
def _get_object(self, name):
"""Get object by its name. ObjectDoesNotExistError will be raised if object not
found"""
return self.driver.get_object(self.bucket, clean_name(name))
def delete(self, name):
"""Delete object on remote"""
try:
obj = self._get_object(name)
return self.driver.delete_object(obj)
except ObjectDoesNotExistError:
pass
def exists(self, name):
try:
_ = self._get_object(name)
except ObjectDoesNotExistError:
return False
return True
def listdir(self, path="/"):
"""Lists the contents of the specified path,
returning a 2-tuple of lists; the first item being
directories, the second item being files.
"""
container = self._get_bucket()
objects = self.driver.list_container_objects(container)
path = clean_name(path)
if not path.endswith("/"):
path = "%s/" % path
files = []
dirs = []
# TOFIX: better algorithm to filter correctly
# (and not depend on google-storage empty folder naming)
for o in objects:
if path == "/":
if o.name.count("/") == 0:
files.append(o.name)
elif o.name.count("/") == 1:
dir_name = o.name[: o.name.index("/")]
if dir_name not in dirs:
dirs.append(dir_name)
elif o.name.startswith(path):
if o.name.count("/") <= path.count("/"):
# TOFIX : special case for google storage with empty dir
if o.name.endswith("_$folder$"):
name = o.name[:-9]
name = name[len(path) :]
dirs.append(name)
else:
name = o.name[len(path) :]
files.append(name)
return (dirs, files)
def size(self, name):
obj = self._get_object(name)
return obj.size if obj else -1
def url(self, name):
provider_type = self.provider["type"].lower()
obj = self._get_object(name)
if not obj:
return None
try:
url = self.driver.get_object_cdn_url(obj)
except NotImplementedError as e:
object_path = "{}/{}".format(self.bucket, obj.name)
if "s3" in provider_type:
base_url = "https://%s" % self.driver.connection.host
url = urljoin(base_url, object_path)
elif "google" in provider_type:
url = urljoin("https://storage.googleapis.com", object_path)
elif "azure" in provider_type:
base_url = "https://%s.blob.core.windows.net" % self.provider["user"]
url = urljoin(base_url, object_path)
elif "backblaze" in provider_type:
url = urljoin("api.backblaze.com/b2api/v1/", object_path)
else:
raise e
return url
def _open(self, name, mode="rb"):
remote_file = LibCloudFile(name, self, mode=mode)
return remote_file
def _read(self, name):
try:
obj = self._get_object(name)
except ObjectDoesNotExistError as e:
raise FileNotFoundError(str(e))
# TOFIX : we should be able to read chunk by chunk
return next(self.driver.download_object_as_stream(obj, obj.size))
def _save(self, name, file):
self.driver.upload_object_via_stream(iter(file), self._get_bucket(), name)
return name
| LibCloudStorage |
python | google__flatbuffers | tests/monster_test_generated.py | {
"start": 1693,
"end": 2252
} | class ____(object):
NONE = 0
M1 = 1
M2 = 2
M3 = 3
def AnyAmbiguousAliasesCreator(unionType, table):
from flatbuffers.table import Table
if not isinstance(table, Table):
return None
if unionType == AnyAmbiguousAliases.M1:
return MonsterT.InitFromBuf(table.Bytes, table.Pos)
if unionType == AnyAmbiguousAliases.M2:
return MonsterT.InitFromBuf(table.Bytes, table.Pos)
if unionType == AnyAmbiguousAliases.M3:
return MonsterT.InitFromBuf(table.Bytes, table.Pos)
return None
| AnyAmbiguousAliases |
python | Pylons__pyramid | src/pyramid/predicates.py | {
"start": 4312,
"end": 4572
} | class ____:
def __init__(self, val, config):
self.val = val
def text(self):
return f'request_type = {self.val}'
phash = text
def __call__(self, context, request):
return self.val.providedBy(request)
| RequestTypePredicate |
python | xlwings__xlwings | xlwings/_xlwindows.py | {
"start": 58142,
"end": 58844
} | class ____(Collection, base_classes.Tables):
_wrap = Table
def add(
self,
source_type=None,
source=None,
link_source=None,
has_headers=None,
destination=None,
table_style_name=None,
name=None,
):
table = Table(
xl=self.xl.Add(
SourceType=ListObjectSourceType.xlSrcRange,
Source=source.api,
LinkSource=link_source,
XlListObjectHasHeaders=True,
Destination=destination,
TableStyleName=table_style_name,
)
)
if name is not None:
table.name = name
return table
| Tables |
python | jazzband__django-polymorphic | src/polymorphic/tests/migrations/0001_initial.py | {
"start": 200,
"end": 52957
} | class ____(migrations.Migration):
initial = True
dependencies = [
('auth', '0012_alter_user_first_name_max_length'),
('contenttypes', '0002_remove_content_type_name'),
]
operations = [
migrations.CreateModel(
name='Base',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('field_b', models.CharField(max_length=30)),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=(polymorphic.showfields.ShowFieldType, models.Model),
),
migrations.CreateModel(
name='BlogBase',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30)),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=(polymorphic.showfields.ShowFieldTypeAndContent, models.Model),
),
migrations.CreateModel(
name='CustomPkBase',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('b', models.CharField(max_length=1)),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=(polymorphic.showfields.ShowFieldTypeAndContent, models.Model),
),
migrations.CreateModel(
name='Enhance_Base',
fields=[
('base_id', models.AutoField(primary_key=True, serialize=False)),
('field_b', models.CharField(max_length=30)),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=(polymorphic.showfields.ShowFieldTypeAndContent, models.Model),
),
migrations.CreateModel(
name='Enhance_Plain',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('field_p', models.CharField(max_length=30)),
],
),
migrations.CreateModel(
name='InitTestModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('bar', models.CharField(max_length=300)),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=(polymorphic.showfields.ShowFieldType, models.Model),
),
migrations.CreateModel(
name='InlineModelA',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('field1', models.CharField(max_length=30)),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
),
migrations.CreateModel(
name='InlineParent',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=30)),
],
),
migrations.CreateModel(
name='Model2A',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('field1', models.CharField(max_length=30)),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=(polymorphic.showfields.ShowFieldType, models.Model),
),
migrations.CreateModel(
name='ModelArticle',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sales_points', models.IntegerField()),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
),
migrations.CreateModel(
name='ModelExtraA',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('field1', models.CharField(max_length=30)),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=(polymorphic.showfields.ShowFieldTypeAndContent, models.Model),
),
migrations.CreateModel(
name='ModelExtraExternal',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('topic', models.CharField(max_length=30)),
],
),
migrations.CreateModel(
name='ModelShow1_plain',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('field1', models.CharField(max_length=30)),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
),
migrations.CreateModel(
name='MROBase1',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('field1', models.CharField(max_length=30)),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=(polymorphic.showfields.ShowFieldType, models.Model),
),
migrations.CreateModel(
name='MROBase3',
fields=[
('base_3_id', models.AutoField(primary_key=True, serialize=False)),
],
),
migrations.CreateModel(
name='MultiTableBase',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('field1', models.CharField(max_length=30)),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
),
migrations.CreateModel(
name='One2OneRelatingModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('field1', models.CharField(max_length=30)),
('one2one', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='tests.model2a')),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
),
migrations.CreateModel(
name='PlainA',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('field1', models.CharField(max_length=30)),
],
),
migrations.CreateModel(
name='PlainParentModelWithManager',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
),
migrations.CreateModel(
name='ProxyBase',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('some_data', models.CharField(max_length=128)),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
),
migrations.CreateModel(
name='RelationBase',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('field_base', models.CharField(max_length=30)),
('fk', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='relationbase_set', to='tests.relationbase')),
('m2m', models.ManyToManyField(to='tests.relationbase')),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=(polymorphic.showfields.ShowFieldTypeAndContent, models.Model),
),
migrations.CreateModel(
name='SubclassSelectorAbstractBaseModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('base_field', models.CharField(default='test_bf', max_length=30)),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
),
migrations.CreateModel(
name='Top',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
),
migrations.CreateModel(
name='UUIDPlainA',
fields=[
('uuid_primary_key', models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)),
('field1', models.CharField(max_length=30)),
],
),
migrations.CreateModel(
name='UUIDProject',
fields=[
('uuid_primary_key', models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)),
('topic', models.CharField(max_length=30)),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=(polymorphic.showfields.ShowFieldTypeAndContent, models.Model),
),
migrations.CreateModel(
name='BlogA',
fields=[
('blogbase_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.blogbase')),
('info', models.CharField(max_length=30)),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('tests.blogbase',),
),
migrations.CreateModel(
name='BlogB',
fields=[
('blogbase_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.blogbase')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('tests.blogbase',),
),
migrations.CreateModel(
name='CustomPkInherit',
fields=[
('custompkbase_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, to='tests.custompkbase')),
('custom_id', models.AutoField(primary_key=True, serialize=False)),
('i', models.CharField(max_length=1)),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('tests.custompkbase',),
),
migrations.CreateModel(
name='Enhance_Inherit',
fields=[
('enhance_plain_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, to='tests.enhance_plain')),
('enhance_base_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.enhance_base')),
('field_i', models.CharField(max_length=30)),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('tests.enhance_base', 'tests.enhance_plain'),
),
migrations.CreateModel(
name='InitTestModelSubclass',
fields=[
('inittestmodel_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.inittestmodel')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('tests.inittestmodel',),
),
migrations.CreateModel(
name='InlineModelB',
fields=[
('inlinemodela_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.inlinemodela')),
('field2', models.CharField(max_length=30)),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('tests.inlinemodela',),
),
migrations.CreateModel(
name='Middle',
fields=[
('top_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.top')),
('description', models.TextField()),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('tests.top',),
),
migrations.CreateModel(
name='Model2B',
fields=[
('model2a_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.model2a')),
('field2', models.CharField(max_length=30)),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('tests.model2a',),
),
migrations.CreateModel(
name='ModelComponent',
fields=[
('modelarticle_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.modelarticle')),
('name', models.CharField(max_length=300)),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('tests.modelarticle',),
),
migrations.CreateModel(
name='ModelExtraB',
fields=[
('modelextraa_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.modelextraa')),
('field2', models.CharField(max_length=30)),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('tests.modelextraa',),
),
migrations.CreateModel(
name='ModelPackage',
fields=[
('modelarticle_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.modelarticle')),
('name', models.CharField(max_length=300)),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('tests.modelarticle',),
),
migrations.CreateModel(
name='ModelShow2_plain',
fields=[
('modelshow1_plain_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.modelshow1_plain')),
('field2', models.CharField(max_length=30)),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('tests.modelshow1_plain',),
),
migrations.CreateModel(
name='ModelWithMyManager',
fields=[
('model2a_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.model2a')),
('field4', models.CharField(max_length=30)),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=(polymorphic.showfields.ShowFieldTypeAndContent, 'tests.model2a'),
),
migrations.CreateModel(
name='ModelWithMyManager2',
fields=[
('model2a_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.model2a')),
('field4', models.CharField(max_length=30)),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=(polymorphic.showfields.ShowFieldTypeAndContent, 'tests.model2a'),
),
migrations.CreateModel(
name='ModelWithMyManagerDefault',
fields=[
('model2a_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.model2a')),
('field4', models.CharField(max_length=30)),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=(polymorphic.showfields.ShowFieldTypeAndContent, 'tests.model2a'),
managers=[
('my_objects', django.db.models.manager.Manager()),
('objects', django.db.models.manager.Manager()),
],
),
migrations.CreateModel(
name='ModelWithMyManagerNoDefault',
fields=[
('model2a_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.model2a')),
('field4', models.CharField(max_length=30)),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=(polymorphic.showfields.ShowFieldTypeAndContent, 'tests.model2a'),
),
migrations.CreateModel(
name='ModelX',
fields=[
('base_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.base')),
('field_x', models.CharField(max_length=30)),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('tests.base',),
),
migrations.CreateModel(
name='ModelY',
fields=[
('base_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.base')),
('field_y', models.CharField(max_length=30)),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('tests.base',),
),
migrations.CreateModel(
name='MROBase2',
fields=[
('mrobase1_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.mrobase1')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('tests.mrobase1',),
),
migrations.CreateModel(
name='MultiTableDerived',
fields=[
('multitablebase_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.multitablebase')),
('field2', models.CharField(max_length=30)),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('tests.multitablebase',),
),
migrations.CreateModel(
name='NonProxyChild',
fields=[
('proxybase_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.proxybase')),
('name', models.CharField(max_length=30)),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('tests.proxybase',),
),
migrations.CreateModel(
name='One2OneRelatingModelDerived',
fields=[
('one2onerelatingmodel_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.one2onerelatingmodel')),
('field2', models.CharField(max_length=30)),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('tests.one2onerelatingmodel',),
),
migrations.CreateModel(
name='PlainB',
fields=[
('plaina_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.plaina')),
('field2', models.CharField(max_length=30)),
],
bases=('tests.plaina',),
),
migrations.CreateModel(
name='RelationA',
fields=[
('relationbase_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.relationbase')),
('field_a', models.CharField(max_length=30)),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('tests.relationbase',),
),
migrations.CreateModel(
name='RelationB',
fields=[
('relationbase_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.relationbase')),
('field_b', models.CharField(max_length=30)),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('tests.relationbase',),
),
migrations.CreateModel(
name='SubclassSelectorAbstractConcreteModel',
fields=[
('subclassselectorabstractbasemodel_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.subclassselectorabstractbasemodel')),
('abstract_field', models.CharField(default='test_af', max_length=30)),
('concrete_field', models.CharField(default='test_cf', max_length=30)),
],
options={
'abstract': False,
},
bases=('tests.subclassselectorabstractbasemodel',),
),
migrations.CreateModel(
name='TestParentLinkAndRelatedName',
fields=[
('superclass', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, related_name='related_name_subclass', serialize=False, to='tests.modelshow1_plain')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('tests.modelshow1_plain',),
),
migrations.CreateModel(
name='UUIDArtProject',
fields=[
('uuidproject_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.uuidproject')),
('artist', models.CharField(max_length=30)),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('tests.uuidproject',),
),
migrations.CreateModel(
name='UUIDPlainB',
fields=[
('uuidplaina_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.uuidplaina')),
('field2', models.CharField(max_length=30)),
],
bases=('tests.uuidplaina',),
),
migrations.CreateModel(
name='UUIDResearchProject',
fields=[
('uuidproject_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.uuidproject')),
('supervisor', models.CharField(max_length=30)),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('tests.uuidproject',),
),
migrations.CreateModel(
name='SwappedModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='SubclassSelectorProxyBaseModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('base_field', models.CharField(default='test_bf', max_length=30)),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
),
migrations.CreateModel(
name='RelatingModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('many2many', models.ManyToManyField(to='tests.model2a')),
],
),
migrations.CreateModel(
name='RelatedNameClash',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype')),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=(polymorphic.showfields.ShowFieldType, models.Model),
),
migrations.CreateModel(
name='ProxiedBase',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30)),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=(polymorphic.showfields.ShowFieldTypeAndContent, models.Model),
),
migrations.CreateModel(
name='PlainChildModelWithManager',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('fk', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='childmodel_set', to='tests.plainparentmodelwithmanager')),
],
),
migrations.CreateModel(
name='ParentModelWithManager',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
),
migrations.CreateModel(
name='NonPolymorphicParent',
fields=[
('group_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='auth.group')),
('test', models.CharField(default='test_non_polymorphic_parent', max_length=30)),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('auth.group', models.Model),
),
migrations.CreateModel(
name='ModelUnderRelParent',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('field1', models.CharField(max_length=30)),
('_private', models.CharField(max_length=30)),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
),
migrations.CreateModel(
name='ModelUnderRelChild',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('_private2', models.CharField(max_length=30)),
('parent', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='children', to='tests.modelunderrelparent')),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
),
migrations.CreateModel(
name='ModelShow3',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('field1', models.CharField(max_length=30)),
('m2m', models.ManyToManyField(to='tests.modelshow3')),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=(polymorphic.showfields.ShowFieldTypeAndContent, models.Model),
),
migrations.CreateModel(
name='ModelShow2',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('field1', models.CharField(max_length=30)),
('m2m', models.ManyToManyField(to='tests.modelshow2')),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=(polymorphic.showfields.ShowFieldContent, models.Model),
),
migrations.CreateModel(
name='ModelShow1',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('field1', models.CharField(max_length=30)),
('m2m', models.ManyToManyField(to='tests.modelshow1')),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=(polymorphic.showfields.ShowFieldType, models.Model),
),
migrations.CreateModel(
name='ModelOrderLine',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('articles', models.ManyToManyField(related_name='orderline', to='tests.modelarticle')),
],
),
migrations.CreateModel(
name='ModelFieldNameTest',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('modelfieldnametest', models.CharField(max_length=30)),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=(polymorphic.showfields.ShowFieldType, models.Model),
),
migrations.AddField(
model_name='inlinemodela',
name='parent',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='inline_children', to='tests.inlineparent'),
),
migrations.AddField(
model_name='inlinemodela',
name='polymorphic_ctype',
field=models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype'),
),
migrations.CreateModel(
name='Duck',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30)),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
),
migrations.CreateModel(
name='DateModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateTimeField()),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
),
migrations.CreateModel(
name='ChildModelWithManager',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('field1', models.CharField(max_length=30)),
('fk', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='childmodel_set', to='tests.parentmodelwithmanager')),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
),
migrations.CreateModel(
name='BlogEntry_limit_choices_to',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.CharField(max_length=30)),
('blog', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tests.blogbase')),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=(polymorphic.showfields.ShowFieldTypeAndContent, models.Model),
),
migrations.CreateModel(
name='ArtProject',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('topic', models.CharField(max_length=30)),
('artist', models.CharField(max_length=30)),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='SwappableModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
],
options={
'swappable': 'POLYMORPHIC_TEST_SWAPPABLE',
},
),
migrations.CreateModel(
name='ProxyChild',
fields=[
],
options={
'proxy': True,
'indexes': [],
'constraints': [],
},
bases=('tests.proxybase',),
),
migrations.CreateModel(
name='ProxyModelBase',
fields=[
],
options={
'proxy': True,
'indexes': [],
'constraints': [],
},
bases=('tests.proxiedbase',),
),
migrations.CreateModel(
name='RedheadDuck',
fields=[
],
options={
'proxy': True,
'indexes': [],
'constraints': [],
},
bases=('tests.duck',),
),
migrations.CreateModel(
name='RubberDuck',
fields=[
],
options={
'proxy': True,
'indexes': [],
'constraints': [],
},
bases=('tests.duck',),
),
migrations.CreateModel(
name='SubclassSelectorProxyModel',
fields=[
],
options={
'proxy': True,
'indexes': [],
'constraints': [],
},
bases=('tests.subclassselectorproxybasemodel',),
),
migrations.CreateModel(
name='Bottom',
fields=[
('middle_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.middle')),
('author', models.CharField(max_length=50)),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('tests.middle',),
),
migrations.CreateModel(
name='Model2C',
fields=[
('model2b_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.model2b')),
('field3', models.CharField(max_length=30)),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('tests.model2b',),
),
migrations.CreateModel(
name='ModelExtraC',
fields=[
('modelextrab_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.modelextrab')),
('field3', models.CharField(max_length=30)),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('tests.modelextrab',),
),
migrations.CreateModel(
name='MRODerived',
fields=[
('mrobase3_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, to='tests.mrobase3')),
('mrobase2_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.mrobase2')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('tests.mrobase2', 'tests.mrobase3'),
),
migrations.CreateModel(
name='PlainC',
fields=[
('plainb_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.plainb')),
('field3', models.CharField(max_length=30)),
],
bases=('tests.plainb',),
),
migrations.CreateModel(
name='ProxyModelA',
fields=[
('proxiedbase_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.proxiedbase')),
('field1', models.CharField(max_length=30)),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('tests.proxymodelbase',),
),
migrations.CreateModel(
name='ProxyModelB',
fields=[
('proxiedbase_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.proxiedbase')),
('field2', models.CharField(max_length=30)),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('tests.proxymodelbase',),
),
migrations.CreateModel(
name='RelationBC',
fields=[
('relationb_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.relationb')),
('field_c', models.CharField(max_length=30)),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('tests.relationb',),
),
migrations.CreateModel(
name='SubclassSelectorProxyConcreteModel',
fields=[
('subclassselectorproxybasemodel_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.subclassselectorproxybasemodel')),
('concrete_field', models.CharField(default='test_cf', max_length=30)),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('tests.subclassselectorproxymodel',),
),
migrations.CreateModel(
name='UUIDPlainC',
fields=[
('uuidplainb_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.uuidplainb')),
('field3', models.CharField(max_length=30)),
],
bases=('tests.uuidplainb',),
),
migrations.CreateModel(
name='BlogEntry',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.CharField(max_length=30)),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
('blog', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tests.bloga')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=(polymorphic.showfields.ShowFieldTypeAndContent, models.Model),
),
migrations.CreateModel(
name='Model2D',
fields=[
('model2c_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='tests.model2c')),
('field4', models.CharField(max_length=30)),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('tests.model2c',),
),
]
| Migration |
python | pypa__twine | twine/commands/__init__.py | {
"start": 1838,
"end": 3017
} | class ____(NamedTuple):
"""Represents structured user inputs."""
dists: List[str]
signatures: Dict[str, str]
attestations_by_dist: Dict[str, List[str]]
def _split_inputs(
inputs: List[str],
) -> Inputs:
"""
Split the unstructured list of input files provided by the user into groups.
Three groups are returned: upload files (i.e. dists), signatures, and attestations.
Upload files are returned as a linear list, signatures are returned as a
dict of ``basename -> path``, and attestations are returned as a dict of
``dist-path -> [attestation-path]``.
"""
signatures = {os.path.basename(i): i for i in fnmatch.filter(inputs, "*.asc")}
attestations = fnmatch.filter(inputs, "*.*.attestation")
dists = [
dist
for dist in inputs
if dist not in (set(signatures.values()) | set(attestations))
]
attestations_by_dist = {}
for dist in dists:
dist_basename = os.path.basename(dist)
attestations_by_dist[dist] = [
a for a in attestations if os.path.basename(a).startswith(dist_basename)
]
return Inputs(dists, signatures, attestations_by_dist)
| Inputs |
python | django__django | tests/admin_views/admin.py | {
"start": 18408,
"end": 18782
} | class ____(admin.ModelAdmin):
"""
A ModelAdmin with a custom get_queryset() method that uses defer(), to test
verbose_name display in messages shown after adding/editing ShortMessage
instances.
For testing fix for ticket #14529.
"""
def get_queryset(self, request):
return super().get_queryset(request).defer("timestamp")
| ShortMessageAdmin |
python | readthedocs__readthedocs.org | readthedocs/projects/backends/views.py | {
"start": 310,
"end": 450
} | class ____(SettingsOverrideObject):
_default_class = private.ImportWizardView
_override_setting = "PROJECT_IMPORT_VIEW"
| ImportWizardView |
python | numpy__numpy | benchmarks/benchmarks/bench_ma.py | {
"start": 2435,
"end": 3405
} | class ____(Benchmark):
param_names = ['mode', 'n']
params = [
['ndarray', 'unmasked',
'ndarray+masked', 'unmasked+masked',
'masked'],
[2, 100, 2000]
]
def setup(self, mode, n):
# avoid np.zeros's lazy allocation that cause page faults during benchmark.
# np.fill will cause pagefaults to happen during setup.
normal = np.full((n, n), 0, int)
unmasked = np.ma.zeros((n, n), int)
masked = np.ma.array(normal, mask=True)
mode_parts = mode.split('+')
base = mode_parts[0]
promote = 'masked' in mode_parts[1:]
if base == 'ndarray':
args = 10 * (normal,)
elif base == 'unmasked':
args = 10 * (unmasked,)
else:
args = 10 * (masked,)
if promote:
args = args[:-1] + (masked,)
self.args = args
def time_it(self, mode, n):
np.ma.concatenate(self.args)
| Concatenate |
python | jmcnamara__XlsxWriter | xlsxwriter/test/comparison/test_print_options01.py | {
"start": 315,
"end": 1190
} | class ____(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.set_filename("print_options01.xlsx")
self.ignore_files = [
"xl/printerSettings/printerSettings1.bin",
"xl/worksheets/_rels/sheet1.xml.rels",
]
self.ignore_elements = {
"[Content_Types].xml": ['<Default Extension="bin"'],
"xl/worksheets/sheet1.xml": ["<pageMargins", "<pageSetup"],
}
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file with print options."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
worksheet.hide_gridlines(0)
worksheet.write("A1", "Foo")
workbook.close()
self.assertExcelEqual()
| TestCompareXLSXFiles |
python | kamyu104__LeetCode-Solutions | Python/number-of-closed-islands.py | {
"start": 33,
"end": 960
} | class ____(object):
def closedIsland(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
directions = [(0, 1), (1, 0), (0, -1), (-1, 0)]
def fill(grid, i, j):
if not (0 <= i < len(grid) and
0 <= j < len(grid[0]) and
grid[i][j] == 0):
return False
grid[i][j] = 1
for dx, dy in directions:
fill(grid, i+dx, j+dy)
return True
for j in xrange(len(grid[0])):
fill(grid, 0, j)
fill(grid, len(grid)-1, j)
for i in xrange(1, len(grid)):
fill(grid, i, 0)
fill(grid, i, len(grid[0])-1)
result = 0
for i in xrange(1, len(grid)-1):
for j in xrange(1, len(grid[0])-1):
if fill(grid, i, j):
result += 1
return result
| Solution |
python | falconry__falcon | tests/test_headers.py | {
"start": 6788,
"end": 7048
} | class ____:
def __init__(self, filename):
self.filename = filename
def on_get(self, req, resp):
resp.text = 'Hello, World!\n'
resp.content_type = falcon.MEDIA_TEXT
resp.downloadable_as = self.filename
| DownloadableResource |
python | gevent__gevent | src/greentest/3.10/test_smtpd.py | {
"start": 10964,
"end": 31216
} | class ____(unittest.TestCase):
def setUp(self):
smtpd.socket = asyncore.socket = mock_socket
self.old_debugstream = smtpd.DEBUGSTREAM
self.debug = smtpd.DEBUGSTREAM = io.StringIO()
self.server = DummyServer((socket_helper.HOST, 0), ('b', 0),
decode_data=True)
conn, addr = self.server.accept()
self.channel = smtpd.SMTPChannel(self.server, conn, addr,
decode_data=True)
def tearDown(self):
asyncore.close_all()
asyncore.socket = smtpd.socket = socket
smtpd.DEBUGSTREAM = self.old_debugstream
def write_line(self, line):
self.channel.socket.queue_recv(line)
self.channel.handle_read()
def test_broken_connect(self):
self.assertRaises(
DummyDispatcherBroken, BrokenDummyServer,
(socket_helper.HOST, 0), ('b', 0), decode_data=True)
def test_decode_data_and_enable_SMTPUTF8_raises(self):
self.assertRaises(
ValueError, smtpd.SMTPChannel,
self.server, self.channel.conn, self.channel.addr,
enable_SMTPUTF8=True, decode_data=True)
def test_server_accept(self):
self.server.handle_accept()
def test_missing_data(self):
self.write_line(b'')
self.assertEqual(self.channel.socket.last,
b'500 Error: bad syntax\r\n')
def test_EHLO(self):
self.write_line(b'EHLO example')
self.assertEqual(self.channel.socket.last, b'250 HELP\r\n')
def test_EHLO_bad_syntax(self):
self.write_line(b'EHLO')
self.assertEqual(self.channel.socket.last,
b'501 Syntax: EHLO hostname\r\n')
def test_EHLO_duplicate(self):
self.write_line(b'EHLO example')
self.write_line(b'EHLO example')
self.assertEqual(self.channel.socket.last,
b'503 Duplicate HELO/EHLO\r\n')
def test_EHLO_HELO_duplicate(self):
self.write_line(b'EHLO example')
self.write_line(b'HELO example')
self.assertEqual(self.channel.socket.last,
b'503 Duplicate HELO/EHLO\r\n')
def test_HELO(self):
name = smtpd.socket.getfqdn()
self.write_line(b'HELO example')
self.assertEqual(self.channel.socket.last,
'250 {}\r\n'.format(name).encode('ascii'))
def test_HELO_EHLO_duplicate(self):
self.write_line(b'HELO example')
self.write_line(b'EHLO example')
self.assertEqual(self.channel.socket.last,
b'503 Duplicate HELO/EHLO\r\n')
def test_HELP(self):
self.write_line(b'HELP')
self.assertEqual(self.channel.socket.last,
b'250 Supported commands: EHLO HELO MAIL RCPT ' + \
b'DATA RSET NOOP QUIT VRFY\r\n')
def test_HELP_command(self):
self.write_line(b'HELP MAIL')
self.assertEqual(self.channel.socket.last,
b'250 Syntax: MAIL FROM: <address>\r\n')
def test_HELP_command_unknown(self):
self.write_line(b'HELP SPAM')
self.assertEqual(self.channel.socket.last,
b'501 Supported commands: EHLO HELO MAIL RCPT ' + \
b'DATA RSET NOOP QUIT VRFY\r\n')
def test_HELO_bad_syntax(self):
self.write_line(b'HELO')
self.assertEqual(self.channel.socket.last,
b'501 Syntax: HELO hostname\r\n')
def test_HELO_duplicate(self):
self.write_line(b'HELO example')
self.write_line(b'HELO example')
self.assertEqual(self.channel.socket.last,
b'503 Duplicate HELO/EHLO\r\n')
def test_HELO_parameter_rejected_when_extensions_not_enabled(self):
self.extended_smtp = False
self.write_line(b'HELO example')
self.write_line(b'MAIL from:<foo@example.com> SIZE=1234')
self.assertEqual(self.channel.socket.last,
b'501 Syntax: MAIL FROM: <address>\r\n')
def test_MAIL_allows_space_after_colon(self):
self.write_line(b'HELO example')
self.write_line(b'MAIL from: <foo@example.com>')
self.assertEqual(self.channel.socket.last,
b'250 OK\r\n')
def test_extended_MAIL_allows_space_after_colon(self):
self.write_line(b'EHLO example')
self.write_line(b'MAIL from: <foo@example.com> size=20')
self.assertEqual(self.channel.socket.last,
b'250 OK\r\n')
def test_NOOP(self):
self.write_line(b'NOOP')
self.assertEqual(self.channel.socket.last, b'250 OK\r\n')
def test_HELO_NOOP(self):
self.write_line(b'HELO example')
self.write_line(b'NOOP')
self.assertEqual(self.channel.socket.last, b'250 OK\r\n')
def test_NOOP_bad_syntax(self):
self.write_line(b'NOOP hi')
self.assertEqual(self.channel.socket.last,
b'501 Syntax: NOOP\r\n')
def test_QUIT(self):
self.write_line(b'QUIT')
self.assertEqual(self.channel.socket.last, b'221 Bye\r\n')
def test_HELO_QUIT(self):
self.write_line(b'HELO example')
self.write_line(b'QUIT')
self.assertEqual(self.channel.socket.last, b'221 Bye\r\n')
def test_QUIT_arg_ignored(self):
self.write_line(b'QUIT bye bye')
self.assertEqual(self.channel.socket.last, b'221 Bye\r\n')
def test_bad_state(self):
self.channel.smtp_state = 'BAD STATE'
self.write_line(b'HELO example')
self.assertEqual(self.channel.socket.last,
b'451 Internal confusion\r\n')
def test_command_too_long(self):
self.write_line(b'HELO example')
self.write_line(b'MAIL from: ' +
b'a' * self.channel.command_size_limit +
b'@example')
self.assertEqual(self.channel.socket.last,
b'500 Error: line too long\r\n')
def test_MAIL_command_limit_extended_with_SIZE(self):
self.write_line(b'EHLO example')
fill_len = self.channel.command_size_limit - len('MAIL from:<@example>')
self.write_line(b'MAIL from:<' +
b'a' * fill_len +
b'@example> SIZE=1234')
self.assertEqual(self.channel.socket.last, b'250 OK\r\n')
self.write_line(b'MAIL from:<' +
b'a' * (fill_len + 26) +
b'@example> SIZE=1234')
self.assertEqual(self.channel.socket.last,
b'500 Error: line too long\r\n')
def test_MAIL_command_rejects_SMTPUTF8_by_default(self):
self.write_line(b'EHLO example')
self.write_line(
b'MAIL from: <naive@example.com> BODY=8BITMIME SMTPUTF8')
self.assertEqual(self.channel.socket.last[0:1], b'5')
def test_data_longer_than_default_data_size_limit(self):
# Hack the default so we don't have to generate so much data.
self.channel.data_size_limit = 1048
self.write_line(b'HELO example')
self.write_line(b'MAIL From:eggs@example')
self.write_line(b'RCPT To:spam@example')
self.write_line(b'DATA')
self.write_line(b'A' * self.channel.data_size_limit +
b'A\r\n.')
self.assertEqual(self.channel.socket.last,
b'552 Error: Too much mail data\r\n')
def test_MAIL_size_parameter(self):
self.write_line(b'EHLO example')
self.write_line(b'MAIL FROM:<eggs@example> SIZE=512')
self.assertEqual(self.channel.socket.last,
b'250 OK\r\n')
def test_MAIL_invalid_size_parameter(self):
self.write_line(b'EHLO example')
self.write_line(b'MAIL FROM:<eggs@example> SIZE=invalid')
self.assertEqual(self.channel.socket.last,
b'501 Syntax: MAIL FROM: <address> [SP <mail-parameters>]\r\n')
def test_MAIL_RCPT_unknown_parameters(self):
self.write_line(b'EHLO example')
self.write_line(b'MAIL FROM:<eggs@example> ham=green')
self.assertEqual(self.channel.socket.last,
b'555 MAIL FROM parameters not recognized or not implemented\r\n')
self.write_line(b'MAIL FROM:<eggs@example>')
self.write_line(b'RCPT TO:<eggs@example> ham=green')
self.assertEqual(self.channel.socket.last,
b'555 RCPT TO parameters not recognized or not implemented\r\n')
def test_MAIL_size_parameter_larger_than_default_data_size_limit(self):
self.channel.data_size_limit = 1048
self.write_line(b'EHLO example')
self.write_line(b'MAIL FROM:<eggs@example> SIZE=2096')
self.assertEqual(self.channel.socket.last,
b'552 Error: message size exceeds fixed maximum message size\r\n')
def test_need_MAIL(self):
self.write_line(b'HELO example')
self.write_line(b'RCPT to:spam@example')
self.assertEqual(self.channel.socket.last,
b'503 Error: need MAIL command\r\n')
def test_MAIL_syntax_HELO(self):
self.write_line(b'HELO example')
self.write_line(b'MAIL from eggs@example')
self.assertEqual(self.channel.socket.last,
b'501 Syntax: MAIL FROM: <address>\r\n')
def test_MAIL_syntax_EHLO(self):
self.write_line(b'EHLO example')
self.write_line(b'MAIL from eggs@example')
self.assertEqual(self.channel.socket.last,
b'501 Syntax: MAIL FROM: <address> [SP <mail-parameters>]\r\n')
def test_MAIL_missing_address(self):
self.write_line(b'HELO example')
self.write_line(b'MAIL from:')
self.assertEqual(self.channel.socket.last,
b'501 Syntax: MAIL FROM: <address>\r\n')
def test_MAIL_chevrons(self):
self.write_line(b'HELO example')
self.write_line(b'MAIL from:<eggs@example>')
self.assertEqual(self.channel.socket.last, b'250 OK\r\n')
def test_MAIL_empty_chevrons(self):
self.write_line(b'EHLO example')
self.write_line(b'MAIL from:<>')
self.assertEqual(self.channel.socket.last, b'250 OK\r\n')
def test_MAIL_quoted_localpart(self):
self.write_line(b'EHLO example')
self.write_line(b'MAIL from: <"Fred Blogs"@example.com>')
self.assertEqual(self.channel.socket.last, b'250 OK\r\n')
self.assertEqual(self.channel.mailfrom, '"Fred Blogs"@example.com')
def test_MAIL_quoted_localpart_no_angles(self):
self.write_line(b'EHLO example')
self.write_line(b'MAIL from: "Fred Blogs"@example.com')
self.assertEqual(self.channel.socket.last, b'250 OK\r\n')
self.assertEqual(self.channel.mailfrom, '"Fred Blogs"@example.com')
def test_MAIL_quoted_localpart_with_size(self):
self.write_line(b'EHLO example')
self.write_line(b'MAIL from: <"Fred Blogs"@example.com> SIZE=1000')
self.assertEqual(self.channel.socket.last, b'250 OK\r\n')
self.assertEqual(self.channel.mailfrom, '"Fred Blogs"@example.com')
def test_MAIL_quoted_localpart_with_size_no_angles(self):
self.write_line(b'EHLO example')
self.write_line(b'MAIL from: "Fred Blogs"@example.com SIZE=1000')
self.assertEqual(self.channel.socket.last, b'250 OK\r\n')
self.assertEqual(self.channel.mailfrom, '"Fred Blogs"@example.com')
def test_nested_MAIL(self):
self.write_line(b'HELO example')
self.write_line(b'MAIL from:eggs@example')
self.write_line(b'MAIL from:spam@example')
self.assertEqual(self.channel.socket.last,
b'503 Error: nested MAIL command\r\n')
def test_VRFY(self):
self.write_line(b'VRFY eggs@example')
self.assertEqual(self.channel.socket.last,
b'252 Cannot VRFY user, but will accept message and attempt ' + \
b'delivery\r\n')
def test_VRFY_syntax(self):
self.write_line(b'VRFY')
self.assertEqual(self.channel.socket.last,
b'501 Syntax: VRFY <address>\r\n')
def test_EXPN_not_implemented(self):
self.write_line(b'EXPN')
self.assertEqual(self.channel.socket.last,
b'502 EXPN not implemented\r\n')
def test_no_HELO_MAIL(self):
self.write_line(b'MAIL from:<foo@example.com>')
self.assertEqual(self.channel.socket.last,
b'503 Error: send HELO first\r\n')
def test_need_RCPT(self):
self.write_line(b'HELO example')
self.write_line(b'MAIL From:eggs@example')
self.write_line(b'DATA')
self.assertEqual(self.channel.socket.last,
b'503 Error: need RCPT command\r\n')
def test_RCPT_syntax_HELO(self):
self.write_line(b'HELO example')
self.write_line(b'MAIL From: eggs@example')
self.write_line(b'RCPT to eggs@example')
self.assertEqual(self.channel.socket.last,
b'501 Syntax: RCPT TO: <address>\r\n')
def test_RCPT_syntax_EHLO(self):
self.write_line(b'EHLO example')
self.write_line(b'MAIL From: eggs@example')
self.write_line(b'RCPT to eggs@example')
self.assertEqual(self.channel.socket.last,
b'501 Syntax: RCPT TO: <address> [SP <mail-parameters>]\r\n')
def test_RCPT_lowercase_to_OK(self):
self.write_line(b'HELO example')
self.write_line(b'MAIL From: eggs@example')
self.write_line(b'RCPT to: <eggs@example>')
self.assertEqual(self.channel.socket.last, b'250 OK\r\n')
def test_no_HELO_RCPT(self):
self.write_line(b'RCPT to eggs@example')
self.assertEqual(self.channel.socket.last,
b'503 Error: send HELO first\r\n')
def test_data_dialog(self):
self.write_line(b'HELO example')
self.write_line(b'MAIL From:eggs@example')
self.assertEqual(self.channel.socket.last, b'250 OK\r\n')
self.write_line(b'RCPT To:spam@example')
self.assertEqual(self.channel.socket.last, b'250 OK\r\n')
self.write_line(b'DATA')
self.assertEqual(self.channel.socket.last,
b'354 End data with <CR><LF>.<CR><LF>\r\n')
self.write_line(b'data\r\nmore\r\n.')
self.assertEqual(self.channel.socket.last, b'250 OK\r\n')
self.assertEqual(self.server.messages,
[(('peer-address', 'peer-port'),
'eggs@example',
['spam@example'],
'data\nmore')])
def test_DATA_syntax(self):
self.write_line(b'HELO example')
self.write_line(b'MAIL From:eggs@example')
self.write_line(b'RCPT To:spam@example')
self.write_line(b'DATA spam')
self.assertEqual(self.channel.socket.last, b'501 Syntax: DATA\r\n')
def test_no_HELO_DATA(self):
self.write_line(b'DATA spam')
self.assertEqual(self.channel.socket.last,
b'503 Error: send HELO first\r\n')
def test_data_transparency_section_4_5_2(self):
self.write_line(b'HELO example')
self.write_line(b'MAIL From:eggs@example')
self.write_line(b'RCPT To:spam@example')
self.write_line(b'DATA')
self.write_line(b'..\r\n.\r\n')
self.assertEqual(self.channel.received_data, '.')
def test_multiple_RCPT(self):
self.write_line(b'HELO example')
self.write_line(b'MAIL From:eggs@example')
self.write_line(b'RCPT To:spam@example')
self.write_line(b'RCPT To:ham@example')
self.write_line(b'DATA')
self.write_line(b'data\r\n.')
self.assertEqual(self.server.messages,
[(('peer-address', 'peer-port'),
'eggs@example',
['spam@example','ham@example'],
'data')])
def test_manual_status(self):
# checks that the Channel is able to return a custom status message
self.write_line(b'HELO example')
self.write_line(b'MAIL From:eggs@example')
self.write_line(b'RCPT To:spam@example')
self.write_line(b'DATA')
self.write_line(b'return status\r\n.')
self.assertEqual(self.channel.socket.last, b'250 Okish\r\n')
def test_RSET(self):
self.write_line(b'HELO example')
self.write_line(b'MAIL From:eggs@example')
self.write_line(b'RCPT To:spam@example')
self.write_line(b'RSET')
self.assertEqual(self.channel.socket.last, b'250 OK\r\n')
self.write_line(b'MAIL From:foo@example')
self.write_line(b'RCPT To:eggs@example')
self.write_line(b'DATA')
self.write_line(b'data\r\n.')
self.assertEqual(self.server.messages,
[(('peer-address', 'peer-port'),
'foo@example',
['eggs@example'],
'data')])
def test_HELO_RSET(self):
self.write_line(b'HELO example')
self.write_line(b'RSET')
self.assertEqual(self.channel.socket.last, b'250 OK\r\n')
def test_RSET_syntax(self):
self.write_line(b'RSET hi')
self.assertEqual(self.channel.socket.last, b'501 Syntax: RSET\r\n')
def test_unknown_command(self):
self.write_line(b'UNKNOWN_CMD')
self.assertEqual(self.channel.socket.last,
b'500 Error: command "UNKNOWN_CMD" not ' + \
b'recognized\r\n')
def test_attribute_deprecations(self):
with warnings_helper.check_warnings(('', DeprecationWarning)):
spam = self.channel._SMTPChannel__server
with warnings_helper.check_warnings(('', DeprecationWarning)):
self.channel._SMTPChannel__server = 'spam'
with warnings_helper.check_warnings(('', DeprecationWarning)):
spam = self.channel._SMTPChannel__line
with warnings_helper.check_warnings(('', DeprecationWarning)):
self.channel._SMTPChannel__line = 'spam'
with warnings_helper.check_warnings(('', DeprecationWarning)):
spam = self.channel._SMTPChannel__state
with warnings_helper.check_warnings(('', DeprecationWarning)):
self.channel._SMTPChannel__state = 'spam'
with warnings_helper.check_warnings(('', DeprecationWarning)):
spam = self.channel._SMTPChannel__greeting
with warnings_helper.check_warnings(('', DeprecationWarning)):
self.channel._SMTPChannel__greeting = 'spam'
with warnings_helper.check_warnings(('', DeprecationWarning)):
spam = self.channel._SMTPChannel__mailfrom
with warnings_helper.check_warnings(('', DeprecationWarning)):
self.channel._SMTPChannel__mailfrom = 'spam'
with warnings_helper.check_warnings(('', DeprecationWarning)):
spam = self.channel._SMTPChannel__rcpttos
with warnings_helper.check_warnings(('', DeprecationWarning)):
self.channel._SMTPChannel__rcpttos = 'spam'
with warnings_helper.check_warnings(('', DeprecationWarning)):
spam = self.channel._SMTPChannel__data
with warnings_helper.check_warnings(('', DeprecationWarning)):
self.channel._SMTPChannel__data = 'spam'
with warnings_helper.check_warnings(('', DeprecationWarning)):
spam = self.channel._SMTPChannel__fqdn
with warnings_helper.check_warnings(('', DeprecationWarning)):
self.channel._SMTPChannel__fqdn = 'spam'
with warnings_helper.check_warnings(('', DeprecationWarning)):
spam = self.channel._SMTPChannel__peer
with warnings_helper.check_warnings(('', DeprecationWarning)):
self.channel._SMTPChannel__peer = 'spam'
with warnings_helper.check_warnings(('', DeprecationWarning)):
spam = self.channel._SMTPChannel__conn
with warnings_helper.check_warnings(('', DeprecationWarning)):
self.channel._SMTPChannel__conn = 'spam'
with warnings_helper.check_warnings(('', DeprecationWarning)):
spam = self.channel._SMTPChannel__addr
with warnings_helper.check_warnings(('', DeprecationWarning)):
self.channel._SMTPChannel__addr = 'spam'
@unittest.skipUnless(socket_helper.IPV6_ENABLED, "IPv6 not enabled")
| SMTPDChannelTest |
python | apache__airflow | providers/fab/tests/unit/fab/www/views/test_views_custom_user_views.py | {
"start": 6454,
"end": 13172
} | class ____:
@pytest.fixture(autouse=True)
def app_context(self, app):
self.app = app
self.session = app.appbuilder.session
self.security_manager = app.appbuilder.sm
self.interface = app.session_interface
self.model = self.interface.sql_session_model
self.serializer = self.interface.serializer
with app.app_context():
self.session.execute(delete(self.model))
self.session.commit()
self.session.flush()
self.user_1 = create_user(
app,
username="user_to_delete_1",
role_name="user_to_delete",
)
self.user_2 = create_user(
app,
username="user_to_delete_2",
role_name="user_to_delete",
)
self.session.commit()
self.session.flush()
yield
delete_user(app, "user_to_delete_1")
delete_user(app, "user_to_delete_2")
def create_user_db_session(self, session_id: str, time_delta: timedelta, user_id: int):
self.session.add(
self.model(
session_id=session_id,
data=self.serializer.encode({"_user_id": user_id}),
expiry=datetime.now() + time_delta,
)
)
@pytest.mark.parametrize(
("time_delta", "user_sessions_deleted"),
[
pytest.param(timedelta(days=-1), True, id="Both expired"),
pytest.param(timedelta(hours=1), True, id="Both fresh"),
pytest.param(timedelta(days=1), True, id="Both future"),
],
)
def test_reset_user_sessions_delete(self, time_delta: timedelta, user_sessions_deleted: bool):
self.create_user_db_session("session_id_1", time_delta, self.user_1.id)
self.create_user_db_session("session_id_2", time_delta, self.user_2.id)
self.session.commit()
self.session.flush()
assert self.session.scalar(select(func.count()).select_from(self.model)) == 2
assert self.get_session_by_id("session_id_1") is not None
assert self.get_session_by_id("session_id_2") is not None
with self.app.app_context():
self.security_manager.reset_password(self.user_1.id, "new_password")
self.session.commit()
self.session.flush()
if user_sessions_deleted:
assert self.session.scalar(select(func.count()).select_from(self.model)) == 1
assert self.get_session_by_id("session_id_1") is None
else:
assert self.session.scalar(select(func.count()).select_from(self.model)) == 2
assert self.get_session_by_id("session_id_1") is not None
def get_session_by_id(self, session_id: str):
return self.session.scalar(select(self.model).where(self.model.session_id == session_id))
@mock.patch("airflow.providers.fab.auth_manager.security_manager.override.flash")
@mock.patch(
"airflow.providers.fab.auth_manager.security_manager.override.has_request_context", return_value=True
)
@mock.patch(
"airflow.providers.fab.auth_manager.security_manager.override.MAX_NUM_DATABASE_USER_SESSIONS", 1
)
def test_refuse_delete(self, _mock_has_context, flash_mock):
self.create_user_db_session("session_id_1", timedelta(days=1), self.user_1.id)
self.create_user_db_session("session_id_2", timedelta(days=1), self.user_2.id)
self.session.commit()
self.session.flush()
assert self.session.scalar(select(func.count()).select_from(self.model)) == 2
assert self.get_session_by_id("session_id_1") is not None
assert self.get_session_by_id("session_id_2") is not None
with self.app.app_context():
self.security_manager.reset_password(self.user_1.id, "new_password")
assert flash_mock.called
assert (
"The old sessions for user user_to_delete_1 have <b>NOT</b> been deleted!"
in flash_mock.call_args[0][0]
)
assert self.session.scalar(select(func.count()).select_from(self.model)) == 2
assert self.get_session_by_id("session_id_1") is not None
assert self.get_session_by_id("session_id_2") is not None
@mock.patch("airflow.providers.fab.auth_manager.security_manager.override.flash")
@mock.patch(
"airflow.providers.fab.auth_manager.security_manager.override.has_request_context", return_value=True
)
def test_warn_securecookie(self, _mock_has_context, flash_mock):
self.app.session_interface = SecureCookieSessionInterface()
with self.app.app_context():
self.security_manager.reset_password(self.user_1.id, "new_password")
assert flash_mock.called
assert (
"Since you are using `securecookie` session backend mechanism, we cannot"
in flash_mock.call_args[0][0]
)
@mock.patch("airflow.providers.fab.auth_manager.security_manager.override.log")
@mock.patch(
"airflow.providers.fab.auth_manager.security_manager.override.MAX_NUM_DATABASE_USER_SESSIONS", 1
)
def test_refuse_delete_cli(self, log_mock):
self.create_user_db_session("session_id_1", timedelta(days=1), self.user_1.id)
self.create_user_db_session("session_id_2", timedelta(days=1), self.user_2.id)
self.session.commit()
self.session.flush()
assert self.session.scalar(select(func.count()).select_from(self.model)) == 2
assert self.get_session_by_id("session_id_1") is not None
assert self.get_session_by_id("session_id_2") is not None
with self.app.app_context():
self.security_manager.reset_password(self.user_1.id, "new_password")
assert log_mock.warning.called
assert (
"The old sessions for user user_to_delete_1 have *NOT* been deleted!\n"
in log_mock.warning.call_args[0][0]
)
assert self.session.scalar(select(func.count()).select_from(self.model)) == 2
assert self.get_session_by_id("session_id_1") is not None
assert self.get_session_by_id("session_id_2") is not None
@mock.patch("airflow.providers.fab.auth_manager.security_manager.override.log")
def test_warn_securecookie_cli(self, log_mock):
self.app.session_interface = SecureCookieSessionInterface()
with self.app.app_context():
self.security_manager.reset_password(self.user_1.id, "new_password")
assert log_mock.warning.called
assert (
"Since you are using `securecookie` session backend mechanism, we cannot"
in log_mock.warning.call_args[0][0]
)
| TestResetUserSessions |
python | pytorch__pytorch | test/quantization/core/test_quantized_op.py | {
"start": 389770,
"end": 393011
} | class ____(TestCase):
"""Tests the element-wise equality ops."""
@given(A=hu.tensor(shapes=((3, 4, 5),),
qparams=hu.qparams()),
B=hu.tensor(shapes=((5,), (1, 5), (1, 1, 5), (4, 5), (3, 4, 5)),
qparams=hu.qparams()))
def test_compare_tensor_tensor(self, A, B):
A, (scale_a, zero_point_a, dtype_a) = A
B, (scale_b, zero_point_b, dtype_b) = B
tA = torch.from_numpy(A)
tB = torch.from_numpy(B)
qA = torch.quantize_per_tensor(tA, scale=scale_a, zero_point=zero_point_a,
dtype=dtype_a)
qB = torch.quantize_per_tensor(tB, scale=scale_b, zero_point=zero_point_b,
dtype=dtype_b)
dqA = qA.dequantize()
dqB = qB.dequantize()
ops_under_test = ('__eq__', '__ne__', '__ge__', '__le__', '__gt__',
'__lt__', 'eq', 'ne', 'ge', 'le', 'gt', 'lt')
for op in ops_under_test:
result_ref = getattr(dqA, op)(dqB)
result = getattr(qA, op)(qB)
self.assertEqual(result_ref, result,
msg=f"'tensor.{op}(tensor)'' failed")
# Reversed broadcasting.
result_ref = getattr(dqB, op)(dqA)
result = getattr(qB, op)(qA)
self.assertEqual(result_ref, result,
msg=f"'tensor.{op}(tensor)'' failed")
@given(A=hu.tensor(shapes=((3, 4, 5),),
qparams=hu.qparams()),
b=hu.floats(allow_infinity=False, allow_nan=False))
def test_compare_tensor_scalar(self, A, b):
A, (scale_a, zero_point_a, dtype_a) = A
tA = torch.from_numpy(A)
qA = torch.quantize_per_tensor(tA, scale=scale_a, zero_point=zero_point_a,
dtype=dtype_a)
dqA = qA.dequantize()
ops_under_test_reversible = ('__eq__', '__ne__', '__ge__', '__le__',
'__gt__', '__lt__')
ops_under_test_nonreversible = ('eq', 'ne', 'ge', 'le', 'gt', 'lt')
for op in ops_under_test_reversible:
result_ref = getattr(dqA, op)(b)
result = getattr(qA, op)(b)
note(f"result_ref 1: {result_ref}")
note(f"result 1: {result}")
self.assertEqual(result_ref, result,
msg=f"'tensor.{op}(scalar)'' failed")
# Reversed broadcasting.
result_ref = getattr(b, op)(dqA)
result = getattr(b, op)(qA)
note(f"result_ref 2: {result_ref}")
note(f"result 2: {result}")
self.assertEqual(result_ref, result,
msg=f"'scalar.{op}(tensor)'' failed")
for op in ops_under_test_nonreversible:
result_ref = getattr(dqA, op)(b)
result = getattr(qA, op)(b)
note(f"result_ref 3: {result_ref}")
note(f"result 3: {result}")
self.assertEqual(result_ref, result,
msg=f"'tensor.{op}(scalar)'' failed")
"""Tests the correctness of the quantized::embedding_bag_(byte|4bit|2bit)_prepack_with_rowwise_min_max ops."""
| TestComparatorOps |
python | mwaskom__seaborn | tests/test_rcmod.py | {
"start": 7345,
"end": 8010
} | class ____(RCParamFixtures):
def test_set_palette(self):
rcmod.set_palette("deep")
assert utils.get_color_cycle() == palettes.color_palette("deep", 10)
rcmod.set_palette("pastel6")
assert utils.get_color_cycle() == palettes.color_palette("pastel6", 6)
rcmod.set_palette("dark", 4)
assert utils.get_color_cycle() == palettes.color_palette("dark", 4)
rcmod.set_palette("Set2", color_codes=True)
assert utils.get_color_cycle() == palettes.color_palette("Set2", 8)
assert mpl.colors.same_color(
mpl.rcParams["patch.facecolor"], palettes.color_palette()[0]
)
| TestPalette |
python | tiangolo__fastapi | docs_src/sql_databases/tutorial002_an.py | {
"start": 539,
"end": 2639
} | class ____(HeroBase):
name: Union[str, None] = None
age: Union[int, None] = None
secret_name: Union[str, None] = None
sqlite_file_name = "database.db"
sqlite_url = f"sqlite:///{sqlite_file_name}"
connect_args = {"check_same_thread": False}
engine = create_engine(sqlite_url, connect_args=connect_args)
def create_db_and_tables():
SQLModel.metadata.create_all(engine)
def get_session():
with Session(engine) as session:
yield session
SessionDep = Annotated[Session, Depends(get_session)]
app = FastAPI()
@app.on_event("startup")
def on_startup():
create_db_and_tables()
@app.post("/heroes/", response_model=HeroPublic)
def create_hero(hero: HeroCreate, session: SessionDep):
db_hero = Hero.model_validate(hero)
session.add(db_hero)
session.commit()
session.refresh(db_hero)
return db_hero
@app.get("/heroes/", response_model=List[HeroPublic])
def read_heroes(
session: SessionDep,
offset: int = 0,
limit: Annotated[int, Query(le=100)] = 100,
):
heroes = session.exec(select(Hero).offset(offset).limit(limit)).all()
return heroes
@app.get("/heroes/{hero_id}", response_model=HeroPublic)
def read_hero(hero_id: int, session: SessionDep):
hero = session.get(Hero, hero_id)
if not hero:
raise HTTPException(status_code=404, detail="Hero not found")
return hero
@app.patch("/heroes/{hero_id}", response_model=HeroPublic)
def update_hero(hero_id: int, hero: HeroUpdate, session: SessionDep):
hero_db = session.get(Hero, hero_id)
if not hero_db:
raise HTTPException(status_code=404, detail="Hero not found")
hero_data = hero.model_dump(exclude_unset=True)
hero_db.sqlmodel_update(hero_data)
session.add(hero_db)
session.commit()
session.refresh(hero_db)
return hero_db
@app.delete("/heroes/{hero_id}")
def delete_hero(hero_id: int, session: SessionDep):
hero = session.get(Hero, hero_id)
if not hero:
raise HTTPException(status_code=404, detail="Hero not found")
session.delete(hero)
session.commit()
return {"ok": True}
| HeroUpdate |
python | wandb__wandb | wandb/automations/_generated/get_automations_by_entity.py | {
"start": 648,
"end": 943
} | class ____(GQLResult):
node: Optional[ProjectTriggersFields]
GetAutomationsByEntity.model_rebuild()
GetAutomationsByEntityScope.model_rebuild()
GetAutomationsByEntityScopeProjects.model_rebuild()
GetAutomationsByEntityScopeProjectsEdges.model_rebuild()
| GetAutomationsByEntityScopeProjectsEdges |
python | pandas-dev__pandas | asv_bench/benchmarks/multiindex_object.py | {
"start": 6618,
"end": 8050
} | class ____:
params = [
("monotonic", "non_monotonic"),
("datetime", "int", "string", "ea_int"),
("intersection", "union", "symmetric_difference"),
(False, None),
]
param_names = ["index_structure", "dtype", "method", "sort"]
def setup(self, index_structure, dtype, method, sort):
N = 10**5
level1 = range(1000)
level2 = date_range(start="1/1/2000", periods=N // 1000)
dates_left = MultiIndex.from_product([level1, level2])
level2 = range(N // 1000)
int_left = MultiIndex.from_product([level1, level2])
level2 = Index([f"i-{i}" for i in range(N // 1000)], dtype=object).values
str_left = MultiIndex.from_product([level1, level2])
level2 = range(N // 1000)
ea_int_left = MultiIndex.from_product([level1, Series(level2, dtype="Int64")])
data = {
"datetime": dates_left,
"int": int_left,
"string": str_left,
"ea_int": ea_int_left,
}
if index_structure == "non_monotonic":
data = {k: mi[::-1] for k, mi in data.items()}
data = {k: {"left": mi, "right": mi[:-1]} for k, mi in data.items()}
self.left = data[dtype]["left"]
self.right = data[dtype]["right"]
def time_operation(self, index_structure, dtype, method, sort):
getattr(self.left, method)(self.right, sort=sort)
| SetOperations |
python | ray-project__ray | python/ray/train/examples/horovod/horovod_example.py | {
"start": 5105,
"end": 8286
} | class ____:
def __init__(self, config):
self.log_interval = config.get("log_interval", 10)
self.use_cuda = config.get("use_cuda", False)
if self.use_cuda:
torch.cuda.set_device(hvd.local_rank())
self.model, self.optimizer, self.train_loader, self.train_sampler = setup(
config
)
def train(self, epoch):
loss = train_epoch(
self.model,
self.optimizer,
self.train_sampler,
self.train_loader,
epoch,
self.log_interval,
self.use_cuda,
)
return loss
if __name__ == "__main__":
# Training settings
parser = argparse.ArgumentParser(
description="PyTorch MNIST Example",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument(
"--batch-size",
type=int,
default=64,
metavar="N",
help="input batch size for training (default: 64)",
)
parser.add_argument(
"--num-epochs",
type=int,
default=5,
metavar="N",
help="number of epochs to train (default: 10)",
)
parser.add_argument(
"--lr",
type=float,
default=0.01,
metavar="LR",
help="learning rate (default: 0.01)",
)
parser.add_argument(
"--momentum",
type=float,
default=0.5,
metavar="M",
help="SGD momentum (default: 0.5)",
)
parser.add_argument(
"--use-gpu", action="store_true", default=False, help="enables CUDA training"
)
parser.add_argument(
"--seed", type=int, default=42, metavar="S", help="random seed (default: 42)"
)
parser.add_argument(
"--log-interval",
type=int,
default=10,
metavar="N",
help="how many batches to wait before logging training status",
)
parser.add_argument(
"--use-adasum",
action="store_true",
default=False,
help="use adasum algorithm to do reduction",
)
parser.add_argument(
"--num-workers",
type=int,
default=2,
help="Number of Ray workers to use for training.",
)
parser.add_argument(
"--data-dir",
help="location of the training dataset in the local filesystem ("
"will be downloaded if needed)",
)
parser.add_argument(
"--address",
required=False,
type=str,
default=None,
help="Address of Ray cluster.",
)
args = parser.parse_args()
if args.address:
ray.init(args.address)
else:
ray.init()
use_cuda = args.use_gpu if args.use_gpu is not None else False
kwargs = {
"data_dir": args.data_dir,
"seed": args.seed,
"use_cuda": use_cuda,
"batch_size": args.batch_size,
"use_adasum": args.use_adasum if args.use_adasum else False,
"lr": args.lr,
"momentum": args.momentum,
"num_epochs": args.num_epochs,
"log_interval": args.log_interval,
}
main(num_workers=args.num_workers, use_gpu=use_cuda, kwargs=kwargs)
| HorovodTrainClass |
python | huggingface__transformers | src/transformers/models/flaubert/modeling_flaubert.py | {
"start": 15651,
"end": 18543
} | class ____(nn.Module):
"""
Compute SQuAD 2.0 answer class from classification and start tokens hidden states.
Args:
config ([`FlaubertConfig`]):
The config used by the model, will be used to grab the `hidden_size` of the model.
"""
def __init__(self, config: FlaubertConfig):
super().__init__()
self.dense_0 = nn.Linear(config.hidden_size * 2, config.hidden_size)
self.activation = nn.Tanh()
self.dense_1 = nn.Linear(config.hidden_size, 1, bias=False)
def forward(
self,
hidden_states: torch.FloatTensor,
start_states: Optional[torch.FloatTensor] = None,
start_positions: Optional[torch.LongTensor] = None,
cls_index: Optional[torch.LongTensor] = None,
) -> torch.FloatTensor:
"""
Args:
hidden_states (`torch.FloatTensor` of shape `(batch_size, seq_len, hidden_size)`):
The final hidden states of the model.
start_states (`torch.FloatTensor` of shape `(batch_size, seq_len, hidden_size)`, *optional*):
The hidden states of the first tokens for the labeled span.
start_positions (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
The position of the first token for the labeled span.
cls_index (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
Position of the CLS token for each sentence in the batch. If `None`, takes the last token.
<Tip>
One of `start_states` or `start_positions` should be not `None`. If both are set, `start_positions` overrides
`start_states`.
</Tip>
Returns:
`torch.FloatTensor`: The SQuAD 2.0 answer class.
"""
# No dependency on end_feature so that we can obtain one single `cls_logits` for each sample.
hsz = hidden_states.shape[-1]
assert start_states is not None or start_positions is not None, (
"One of start_states, start_positions should be not None"
)
if start_positions is not None:
start_positions = start_positions[:, None, None].expand(-1, -1, hsz) # shape (bsz, 1, hsz)
start_states = hidden_states.gather(-2, start_positions).squeeze(-2) # shape (bsz, hsz)
if cls_index is not None:
cls_index = cls_index[:, None, None].expand(-1, -1, hsz) # shape (bsz, 1, hsz)
cls_token_state = hidden_states.gather(-2, cls_index).squeeze(-2) # shape (bsz, hsz)
else:
cls_token_state = hidden_states[:, -1, :] # shape (bsz, hsz)
x = self.dense_0(torch.cat([start_states, cls_token_state], dim=-1))
x = self.activation(x)
x = self.dense_1(x).squeeze(-1)
return x
# Copied from transformers.models.xlm.modeling_xlm.XLMSQuADHead with XLM->Flaubert
| FlaubertPoolerAnswerClass |
python | geekcomputers__Python | advanced_calculator.py | {
"start": 1172,
"end": 11260
} | class ____:
def __init__(self):
self.take_inputs()
def add(self):
"""summary: Get the sum of numbers
Returns:
_type_: _description_
"""
return self.num1 + self.num2
def sub(self):
"""_summary_: Get the difference of numbers
Returns:
_type_: _description_
"""
return self.num1 - self.num2
def multi(self):
"""_summary_: Get the product of numbers
Returns:
_type_: _description_
"""
return self.num1 * self.num2
def div(self):
"""_summary_: Get the quotient of numbers
Returns:
_type_: _description_
"""
# What do we mean by quotient?
return self.num1 / self.num2
def power(self):
"""_summary_: Get the power of numbers
Returns:
_type_: _description_
"""
return self.num1**self.num2
def root(self):
"""_summary_: Get the root of numbers
Returns:
_type_: _description_
"""
return self.num1 ** (1 / self.num2)
def remainer(self):
"""_summary_: Get the remainder of numbers
Returns:
_type_: _description_
"""
# Do I have to use the '.' period or full_stop in the numbers?
return self.num1 % self.num2
def cube_root(self):
"""_summary_: Get the cube root of numbers
Returns:
_type_: _description_
"""
return self.num1 ** (1 / 3)
def cube_exponent(self):
"""_summary_: Get the cube exponent of numbers
Returns:
_type_: _description_
"""
return self.num1**3
def square_root(self):
"""_summary_: Get the square root of numbers
Returns:
_type_: _description_
"""
return self.num1 ** (1 / 2)
def square_exponent(self):
"""_summary_: Get the square exponent of numbers
Returns:
_type_: _description_
"""
return self.num1**2
def factorial(self):
"""_summary_: Get the factorial of numbers"""
pass
def list_factors(self):
"""_summary_: Get the list of factors of numbers"""
pass
def factorial(self):
for i in range(1, self.num + 1):
self.factorial = self.factorial * i # is this right?
def LCM(self):
"""_summary_: Get the LCM of numbers"""
pass
def HCF(self):
"""_summary_: Get the HCF of numbers"""
pass
# class time: # Working with days calculator
def age_calculator(self):
"""_summary_: Get the age of the user"""
# This is be very accurate and precise it should include proper leap year and last birthday till now every detail.
# Should show the preciseness in seconds when called.
pass
def days_calculator(self):
"""_summary_: Get the days between two dates"""
pass
def leap_year(self):
"""_summary_: Get the leap year of the user"""
pass
def perimeter(self):
"""_summary_: Get the perimeter of the user"""
pass
class Trigonometry:
"""_summary_: Class enriched with all the methods to solve basic trignometric problems"""
def pythagorean_theorem(self):
"""_summary_: Get the pythagorean theorem of the user"""
pass
def find_hypotenuse(self):
"""_summary_: Get the hypotenuse of the user"""
pass
def find_base(self):
"""_summary_: Get the base of the user"""
pass
def find_perpendicular(self):
"""_summary_: Get the perpendicular of the user"""
pass
# class Logarithms:
# Learn more about Maths in general
def quadratic_equation(self):
"""_summary_: Get the quadratic equation of the user"""
pass
def open_system_calculator(self):
"""_summary_: Open the calculator present on the machine of the user"""
# first identify the os
# track the calculator
# add a debugging feature like error handling
# for linux and mac
# if no such found then print a message to the user that sorry dear it wasn't possible to so
# then open it
def take_inputs(self):
"""_summary_: Take the inputs from the user in proper sucession"""
while True:
while True:
try:
# self.num1 = float(input("Enter The First Number: "))
# self.num2 = float(input("Enter The Second Number: "))
pprint("Enter your number")
# validation check must be done
break
except ValueError:
pprint("Please Enter A Valid Number")
continue
# To let the user to know it is time to exit.
pprint("Press 'q' to exit")
# if self.num1 == "q" or self.num2 == "q":
# exit() # Some how I need to exit it
def greeting(self):
"""_summary_: Greet the user with using Audio"""
text_to_audio = "Welcome To The Calculator"
self.gtts_object = gTTS(text=text_to_audio, lang="en", tld="co.in", slow=False)
tts = self.gtts_object
fp = BytesIO()
tts.write_to_fp(fp)
fp.seek(0) # Reset the BytesIO object to the beginning
mixer.init()
mixer.music.load(fp)
mixer.music.play()
while mixer.music.get_busy():
time.Clock().tick(10)
# Here OOP is not followed.
def user_name(self):
"""_summary_: Get the name of the user and have an option to greet him/her"""
self.name = input("Please enter your good name: ")
# Making validation checks here
text_to_audio = "{self.name}"
self.gtts_object = gTTS(text=text_to_audio, lang="en", tld="co.in", slow=False)
tts = self.gtts_object
fp = BytesIO()
tts.write_to_fp(fp)
fp.seek(0) # Reset the BytesIO object to the beginning
mixer.init()
mixer.music.load(fp)
mixer.music.play()
while mixer.music.get_busy():
time.Clock().tick(10)
def user_name_art(self):
"""_summary_: Get the name of the user and have an option to show him his user name in art"""
# Default is to show = True, else False if user tries to disable it.
# Tell him to show the time and date
# print(art.text2art(self.name))
# print(date and time of now)
# Remove whitespaces from beginning and end
# Remove middle name and last name
# Remove special characters
# Remove numbers
f_name = self.name.split(" ")[0]
f_name = f_name.strip()
# Remove every number present in it
# Will have to practice not logic
f_name = "".join([i for i in f_name if not i.isdigit()])
# perform string operations on it for the art to be displayed.
# Remove white spaces
# Remove middle name and last name
# Remove special characters
# Remove numbers
# Remove everything
class unitConversion:
"""_summary_: Class enriched with all the methods to convert units"""
# Do we full-stops in generating documentations?
def __init__(self):
"""_summary_: Initialise the class with the required attributes"""
self.take_inputs()
def length(self):
"""_summary_: Convert length units"""
# It should have a meter to unit and unit to meter converter
# Othe lengths units it should also have.
# Like cm to pico meter and what not
pass
def area(self):
# This will to have multiple shapes and polygons to it to improve it's area.
# This will to have multiple shapes and polygons to it to improve it's area.
# I will try to use the best of the formula to do it like the n number of polygons to be solved.
pass
def volume(self):
# Different shapes and polygons to it to improve it's volume.
pass
def mass(self):
pass
def time(self):
pass
def speed(self):
pass
def temperature(self):
pass
def data(self):
pass
def pressure(self):
pass
def energy(self):
pass
def power(self):
pass
def angle(self):
pass
def force(self):
pass
def frequency(self):
pass
def take_inputs(self):
pass
class CurrencyConverter:
def __init__(self):
self.take_inputs()
def take_inputs(self):
pass
def convert(self):
pass
class Commands:
def __init__(self):
self.take_inputs()
def previous_number(self):
pass
def previous_operation(self):
pass
def previous_result(self):
pass
def clear_screen(self):
# Do I need a clear screen?
# os.system("cls" if os.name == "nt" else "clear")
# os.system("cls")
# os.system("clear")
pass
if __name__ == "__main__":
operation_1 = Calculator(10, 5)
# Operations
# User interaction
# Study them properly and try to understand them.
# Study them properly and try to understand them in very detailed length. Please.
# Add a function to continually ask for input until the user enters a valid input.
# Let's explore colorma
# Also user log ins, and it saves user data and preferences.
# A feature of the least priority right now.
# List of features priority should be planned.
# Documentations are good to read and understand.
# A one stop solution is to stop and read the document.
# It is much better and easier to understand.
| Calculator |
python | mahmoud__boltons | tests/test_iterutils.py | {
"start": 762,
"end": 1701
} | class ____:
def test_empty_iterables(self):
"""
Empty iterables return None.
"""
s = set()
l = []
assert first(s) is None
assert first(l) is None
def test_default_value(self):
"""
Empty iterables + a default value return the default value.
"""
s = set()
l = []
assert first(s, default=42) == 42
assert first(l, default=3.14) == 3.14
l = [0, False, []]
assert first(l, default=3.14) == 3.14
def test_selection(self):
"""
Success cases with and without a key function.
"""
l = [(), 0, False, 3, []]
assert first(l, default=42) == 3
assert first(l, key=isint) == 0
assert first(l, key=isbool) is False
assert first(l, key=odd) == 3
assert first(l, key=even) == 0
assert first(l, key=is_meaning_of_life) is None
| TestFirst |
python | python-rapidjson__python-rapidjson | tests/test_streams.py | {
"start": 1372,
"end": 2140
} | class ____(io.StringIO):
def read(self, *args, **kwargs):
raise CattyError('No real reason')
def write(self, *args, **kwargs):
raise CattyError('No real reason')
def test_underlying_stream_read_error():
stream = CattyStream()
with pytest.raises(CattyError):
rj.load(stream)
def test_underlying_stream_write_error():
stream = CattyStream()
with pytest.raises(CattyError):
rj.dump('1234567890', stream)
def test_file_object():
for stream in tempfile.TemporaryFile(), tempfile.TemporaryFile('w+', encoding='utf-8'):
with stream:
datum = ['1234567890', 1234, 3.14, '~𓆙~']
rj.dump(datum, stream)
stream.seek(0)
assert rj.load(stream) == datum
| CattyStream |
python | google__jax | jax/experimental/array_serialization/serialization_test.py | {
"start": 29248,
"end": 41926
} | class ____(UserAPITestCase):
def setUp(self):
super().setUp()
global _DEFAULT_SHARDING
_DEFAULT_SHARDING = SingleDeviceSharding(jax.devices()[0])
self.tempdirs = []
def tearDown(self):
for tempdir in self.tempdirs:
tempdir.cleanup()
super().tearDown()
def create_tempdir(self):
tempdir = tempfile.TemporaryDirectory()
self.tempdirs.append(tempdir)
return pathlib.Path(tempdir.name).resolve()
@parameterized.product(tree=[{'a': 1}, [1, 2, 3], (1, 2, 3), 1, 2, 3])
def test_save_then_load(self, tree): # pylint: disable=redefined-outer-name
path = self.create_tempdir()
tree = jax.tree.map(jnp.array, tree)
tree_save(tree, path)
tree2 = tree_load(path)
self.assertPyTreeEqual(tree, tree2)
@parameterized.product(dtype=_DTYPES_LIST)
def test_saving_dtype(self, dtype):
if dtype in _X64_DTYPES_LIST and jtu.test_device_matches(['tpu']):
self.skipTest('Don\'t test x64 dtypes on TPUs')
path = self.create_tempdir()
test_tree = self.generate_clean_tree(dtype=dtype)
tree_save(test_tree, path)
new_tree = tree_load(path)
self.assertPyTreeEqual(test_tree, new_tree)
def test_do_not_overwrite_noncheckpoint_directories(self):
path = self.create_tempdir()
path.mkdir(exist_ok=True)
(path / 'hello.txt').write_text('Hello World')
with self.assertRaisesRegex(RuntimeError, 'Refusing to work on a directory'
' that is not a previous checkpoint.'):
tree_save({'a': jnp.ones(1)}, path)
def test_checkpoint_exists(self):
path = self.create_tempdir()
tree_save({'a': jnp.ones(1)}, path)
with self.assertRaises(ValueError):
tree_save({'a': jnp.ones(1)}, path, overwrite=False)
@parameterized.product(test_load_fail=[True, False])
def test_custom_types(self, test_load_fail):
path = self.create_tempdir()
with custom_types_threading_lock:
magic_value = jnp.ones(()) * 37
n = CustomNode(magic_value)
d = CustomDataclass(magic_value, 'hello', magic_value + 1)
s = CustomStatic(magic_value - 1)
tree_to_save = [n, (d, s)]
register_pytree_node_serialization(CustomNode,
serialized_name='CustomNode',
serialize_auxdata=pickle.dumps,
deserialize_auxdata=pickle.loads)
register_pytree_node_serialization(CustomStatic,
serialized_name='CustomStatic',
serialize_auxdata=pickle.dumps,
deserialize_auxdata=pickle.loads)
register_pytree_node_serialization(CustomDataclass,
serialized_name='CustomDataclass',
serialize_auxdata=pickle.dumps,
deserialize_auxdata=pickle.loads)
tree_save(tree_to_save, path)
if test_load_fail:
_ = [_remove_from_serialization_registry(cls)
for cls in [CustomStatic, CustomNode, CustomDataclass]]
with self.assertRaises(ValueError):
_ = tree_load(path)
else:
tree2 = tree_load(path)
self.assertEqual(tree2[0].a, magic_value)
self.assertEqual(tree2[1][0].a, magic_value)
self.assertEqual(tree2[1][0].c, 'hello')
self.assertEqual(tree2[1][0].d, magic_value + 1)
self.assertEqual(tree2[1][1].a, magic_value - 1)
_ = [_remove_from_serialization_registry(cls)
for cls in [CustomStatic, CustomNode, CustomDataclass]]
def test_flax_frozen_dict(self):
path = self.create_tempdir()
try:
# pylint: disable=g-import-not-at-top
# pylint: disable=g-importing-member
from flax.core.frozen_dict import FrozenDict
# pylint: enable=g-importing-member
# pylint: enable=g-import-not-at-top
except ImportError:
logging.warning('Skipping Flax FrozenDict tests as flax is not installed')
return
try:
register_pytree_node_serialization(FrozenDict,
serialized_name='FrozenDict',
serialize_auxdata=pickle.dumps,
deserialize_auxdata=pickle.loads)
tree_save(FrozenDict(a=1, b=self.generate_clean_tree()), path)
tree_load(path)
finally:
_remove_from_serialization_registry(FrozenDict)
def test_register_as_decorator(self):
@partial(register_pytree_node_serialization,
serialized_name='CustomDNode',
serialize_auxdata=json.dumps,
deserialize_auxdata=json.loads)
@partial(jax.tree_util.register_dataclass, data_fields=['a', 'b'],
meta_fields=[])
@dataclass
class CustomDNode:
a: int
b: int
# test whether the object can be created (is visible in this scope)
_ = CustomDNode(1, 2)
def test_custom_node_registration(self):
path = self.create_tempdir()
@jax.tree_util.register_static
@dataclass
class P:
a: int = 2
@partial(jax.tree_util.register_dataclass, data_fields=['a', 'b'],
meta_fields=['op'])
@dataclass
class D:
a: Any
b: Any
op: str
def serialize_D(data):
return json.dumps(jax.tree.map(lambda x: np.array(x).tolist(), data)
).encode('utf-8')
def deserialize_D(data):
return jnp.array(json.loads(data))
data = [jnp.ones(1), {'world': [jnp.zeros(3), (jnp.ones(1), jnp.ones(2))]},
7 * jnp.ones(()), P()]
serialize_fn = lambda p: json.dumps(int(p.a)).encode('utf-8')
deserialize_fn = lambda data: P(json.loads(data))
with self.assertRaises(ValueError):
tree_save(data, path)
register_pytree_node_serialization(P,
serialized_name='P',
serialize_auxdata=serialize_fn,
deserialize_auxdata=deserialize_fn)
magic_value = -171
data[-1].a = jnp.array(magic_value)
tree_save(data, path)
ret = tree_load(path)
self.assertLen(ret, len(data))
self.assertEqual(ret[-1].a, magic_value)
magic_val = 17 * jnp.ones(2)
data.append(D(jnp.ones(1), jax.numpy.zeros(2), magic_val))
with self.assertRaises(ValueError):
tree_save(data, path)
register_pytree_node_serialization(D,
serialized_name='D',
serialize_auxdata=serialize_D,
deserialize_auxdata=deserialize_D)
tree_save(data, path)
ret = tree_load(path)
self.assertLen(ret, len(data))
self.assertLess(jnp.linalg.norm(ret[-1].op - magic_val), 1e-5)
jax.tree.flatten(data)
def test_masked_reading(self):
path = self.create_tempdir()
data = [jnp.ones(1), {'world': [jnp.zeros(3), (jnp.ones(1), jnp.ones(2))]},
7 * jnp.ones(())]
tree_save(data, path)
for mask in [False, True]:
ret = tree_load(path, mask=mask)
expected = jax.tree.map(lambda x: None if not mask else x, data)
self.assertPyTreeEqual(ret, expected, is_leaf=lambda x: x is None)
mask = [True, False, False]
expected = data[:1] + jax.tree.map(lambda x: None, data[1:])
ret = tree_load(path, mask=mask)
self.assertPyTreeEqual(ret, expected, is_leaf=lambda x: x is None)
mask = [True, True, False]
expected = data[:2] + jax.tree.map(lambda x: None, data[2:])
ret = tree_load(path, mask=mask)
self.assertPyTreeEqual(ret, expected, is_leaf=lambda x: x is None)
mask = [True, {'world': [True, (False, True)]}, False]
data[1]['world'][1] = (None, data[1]['world'][1][1])
ret = tree_load(path, mask=mask)
self.assertPyTreeEqual(ret, expected, is_leaf=lambda x: x is None)
# TODO(rdyro): Remove when serialization supports non-arrays
@parameterized.product(obj=[b'hello', 'hello', 1, 1.0, 1j])
def test_serialization_works_for_arrays_only(self, obj):
path = self.create_tempdir()
data = [{'world': [jnp.zeros(3), (jnp.ones(1), jnp.ones(2))]}, obj]
msg = ('For serialization, all leaves must be either None or'
' jax.Array-like objects.')
with self.assertRaisesRegex(ValueError, msg):
tree_save(data, path)
def test_load_pytreedef(self):
path = self.create_tempdir()
data = [jnp.ones(1), {'world': [jnp.zeros(3), (jnp.ones(1), jnp.ones(2))]},
7 * jnp.ones(())]
tree_save(data, path)
pytreedef = tree_load_pytreedef(path)
expected_pytreedef = jax.tree.map(
lambda x: jax.ShapeDtypeStruct(x.shape, x.dtype), data)
self.assertPyTreeEqual(pytreedef, expected_pytreedef)
@parameterized.product(data=[
None, [None], [None, np.ones(())],
[None, {'world': [None, (np.ones(1), np.ones(2))]}, np.ones(())],
[None, {'world': [np.zeros(3), (None, np.ones(2))]}, None]])
def test_save_and_load_null_leaves(self, data):
path = self.create_tempdir()
# TPUs might not have X64 enabled, so we need to convert to float32
data = jax.tree.map(lambda x: jnp.array(x, dtype=jnp.float32), data)
tree_save(data, path)
pytreedef = tree_load_pytreedef(path)
is_leaf = lambda x: x is None
expected_pytreedef = jax.tree.map(lambda x: jax.ShapeDtypeStruct(
x.shape, x.dtype) if x is not None else x, data, is_leaf=is_leaf)
self.assertPyTreeEqual(pytreedef, expected_pytreedef)
load_data = tree_load(path)
load_leaves, load_struct = jax.tree.flatten(load_data, is_leaf=is_leaf)
expected_leaves, expected_struct = jax.tree.flatten(data, is_leaf=is_leaf)
self.assertEqual(load_struct, expected_struct)
self.assertLen(load_leaves, len(expected_leaves))
for (l1, l2) in zip(load_leaves, expected_leaves):
if l1 is None:
self.assertIsNone(l2)
else:
self.assertArraysEqual(l1, l2)
@parameterized.product(manually_broadcast_ts_specs=[True, False])
def test_custom_ts_specs(self, manually_broadcast_ts_specs):
if ts_impl._TS_ARRAY_DRIVER == 'zarr':
self.skipTest('Skipping since this test assumes zarr is NOT the default')
path = self.create_tempdir()
data = [jnp.ones(()), (jnp.zeros(()), jnp.ones(())), None]
ts_spec = {'driver': 'zarr', 'metadata': {'shape': ()}}
if manually_broadcast_ts_specs:
ts_specs = [ts_spec, (ts_spec, None), None] # None ts_spec allowed
else:
ts_specs = ts_spec
tree_save(data, path, ts_specs=ts_specs)
load_data = tree_load(path, ts_specs=ts_specs)
self.assertPyTreeEqual(data, load_data)
with self.assertRaisesRegex(ValueError,
'NOT_FOUND: Error opening "zarr3" driver:'):
_ = tree_load(path) # default attempts to open with zarr3 and fails
def test_save_load_future_printable(self):
path = self.create_tempdir()
data = [jnp.ones(())]
save_fut = pytree_serialization.nonblocking_save(data, path)
str(save_fut)
save_fut.result()
load_fut = pytree_serialization.nonblocking_load(
path, shardings=_DEFAULT_SHARDING)
str(load_fut)
load_fut.result()
def test_format_alone_not_supported(self):
# passing a format for a dtype not matching the dtype on disk will cause an
# XLA error (since formats can be dtype/bit-width specific), hence allow
# format only if dtype is also specified
path = self.create_tempdir()
data = jnp.arange(16 * 16, dtype=jnp.bfloat16).reshape((16, 16))
sharding = NamedSharding(jtu.create_mesh((1, 1), ('x', 'y')), P('x', None))
data: jax.Array = jax.device_put(data, sharding)
tree_save(data, path)
with self.assertRaisesRegex(NotImplementedError,
'Deserialization with `Format` instead of'
' `Sharding` is not currently supported.'):
pytree_serialization.load(path, shardings=data.format)
def test_formats_support(self):
path = self.create_tempdir()
data = jnp.arange(16 * 16, dtype=jnp.float32).reshape((16, 16))
data_bf16_format = jnp.arange(16 * 16, dtype=jnp.bfloat16).reshape(
(16, 16)).format
sharding = NamedSharding(jtu.create_mesh((1, 1), ('x', 'y')), P('x', None))
data: jax.Array = jax.device_put(data, sharding)
tree_save(data, path)
pytree_serialization.load(path, shardings=jax.ShapeDtypeStruct(
data.shape, jnp.bfloat16, sharding=data_bf16_format))
if __name__ == '__main__':
absltest.main(testLoader=jtu.JaxTestLoader())
| UserPytreeAPITest |
python | encode__django-rest-framework | tests/test_relations_pk.py | {
"start": 16782,
"end": 21772
} | class ____(TestCase):
def setUp(self):
target = ForeignKeyTarget(name='target-1')
target.save()
for idx in range(1, 4):
if idx == 3:
target = None
source = NullableForeignKeySource(name='source-%d' % idx, target=target)
source.save()
def test_foreign_key_retrieve_with_null(self):
queryset = NullableForeignKeySource.objects.all()
serializer = NullableForeignKeySourceSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'source-1', 'target': 1},
{'id': 2, 'name': 'source-2', 'target': 1},
{'id': 3, 'name': 'source-3', 'target': None},
]
assert serializer.data == expected
def test_foreign_key_create_with_valid_null(self):
data = {'id': 4, 'name': 'source-4', 'target': None}
serializer = NullableForeignKeySourceSerializer(data=data)
assert serializer.is_valid()
obj = serializer.save()
assert serializer.data == data
assert obj.name == 'source-4'
# Ensure source 4 is created, and everything else is as expected
queryset = NullableForeignKeySource.objects.all()
serializer = NullableForeignKeySourceSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'source-1', 'target': 1},
{'id': 2, 'name': 'source-2', 'target': 1},
{'id': 3, 'name': 'source-3', 'target': None},
{'id': 4, 'name': 'source-4', 'target': None}
]
assert serializer.data == expected
def test_foreign_key_create_with_valid_emptystring(self):
"""
The emptystring should be interpreted as null in the context
of relationships.
"""
data = {'id': 4, 'name': 'source-4', 'target': ''}
expected_data = {'id': 4, 'name': 'source-4', 'target': None}
serializer = NullableForeignKeySourceSerializer(data=data)
assert serializer.is_valid()
obj = serializer.save()
assert serializer.data == expected_data
assert obj.name == 'source-4'
# Ensure source 4 is created, and everything else is as expected
queryset = NullableForeignKeySource.objects.all()
serializer = NullableForeignKeySourceSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'source-1', 'target': 1},
{'id': 2, 'name': 'source-2', 'target': 1},
{'id': 3, 'name': 'source-3', 'target': None},
{'id': 4, 'name': 'source-4', 'target': None}
]
assert serializer.data == expected
def test_foreign_key_update_with_valid_null(self):
data = {'id': 1, 'name': 'source-1', 'target': None}
instance = NullableForeignKeySource.objects.get(pk=1)
serializer = NullableForeignKeySourceSerializer(instance, data=data)
assert serializer.is_valid()
serializer.save()
assert serializer.data == data
# Ensure source 1 is updated, and everything else is as expected
queryset = NullableForeignKeySource.objects.all()
serializer = NullableForeignKeySourceSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'source-1', 'target': None},
{'id': 2, 'name': 'source-2', 'target': 1},
{'id': 3, 'name': 'source-3', 'target': None}
]
assert serializer.data == expected
def test_foreign_key_update_with_valid_emptystring(self):
"""
The emptystring should be interpreted as null in the context
of relationships.
"""
data = {'id': 1, 'name': 'source-1', 'target': ''}
expected_data = {'id': 1, 'name': 'source-1', 'target': None}
instance = NullableForeignKeySource.objects.get(pk=1)
serializer = NullableForeignKeySourceSerializer(instance, data=data)
assert serializer.is_valid()
serializer.save()
assert serializer.data == expected_data
# Ensure source 1 is updated, and everything else is as expected
queryset = NullableForeignKeySource.objects.all()
serializer = NullableForeignKeySourceSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'source-1', 'target': None},
{'id': 2, 'name': 'source-2', 'target': 1},
{'id': 3, 'name': 'source-3', 'target': None}
]
assert serializer.data == expected
def test_null_uuid_foreign_key_serializes_as_none(self):
source = NullableUUIDForeignKeySource(name='Source')
serializer = NullableUUIDForeignKeySourceSerializer(source)
data = serializer.data
assert data["target"] is None
def test_nullable_uuid_foreign_key_is_valid_when_none(self):
data = {"name": "Source", "target": None}
serializer = NullableUUIDForeignKeySourceSerializer(data=data)
assert serializer.is_valid(), serializer.errors
| PKNullableForeignKeyTests |
python | ray-project__ray | rllib/examples/quadx_waypoints.py | {
"start": 1854,
"end": 4013
} | class ____(gym.RewardWrapper):
def __init__(self, env):
super().__init__(env)
def reward(self, reward):
# Scale rewards:
if reward >= 99.0 or reward <= -99.0:
return reward / 10
return reward
def create_quadx_waypoints_env(env_config):
import PyFlyt.gym_envs # noqa
from PyFlyt.gym_envs import FlattenWaypointEnv
env = gym.make("PyFlyt/QuadX-Waypoints-v1")
# Wrap Environment to use max 10 and -10 for rewards
env = RewardWrapper(env)
return FlattenWaypointEnv(env, context_length=1)
if __name__ == "__main__":
args = parser.parse_args()
# Register the environment with tune.
register_env(args.env_name, env_creator=create_quadx_waypoints_env)
# Get the algorithm class to use for training.
algo_cls = get_trainable_cls(args.run)
config = (
algo_cls.get_default_config()
.environment(env=args.env_name)
.env_runners(
num_envs_per_env_runner=args.num_envs_per_env_runner,
)
.reporting(min_time_s_per_iteration=0.1)
)
# If PPO set additional configurations.
if args.run == "PPO":
config.rl_module(
model_config={
"fcnet_hiddens": [32],
"fcnet_activation": "linear",
"vf_share_layers": True,
}
)
config.training(
minibatch_size=128,
train_batch_size_per_learner=10000,
)
# If IMPALA set additional arguments.
elif args.run == "IMPALA":
config.env_runners(num_env_runners=2)
config.learners(num_gpus_per_learner=0)
config.training(vf_loss_coeff=0.01)
# Set the stopping arguments.
EPISODE_RETURN_MEAN_KEY = f"{ENV_RUNNER_RESULTS}/{EPISODE_RETURN_MEAN}"
stop = {
TRAINING_ITERATION_TIMER: args.stop_iters,
EPISODE_RETURN_MEAN_KEY: args.stop_reward,
}
# Run the experiment.
run_rllib_example_script_experiment(
config,
args,
stop=stop,
success_metric={
f"{ENV_RUNNER_RESULTS}/{EPISODE_RETURN_MEAN}": args.stop_reward,
},
)
| RewardWrapper |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.