language stringclasses 1 value | repo stringclasses 346 values | path stringlengths 6 201 | class_span dict | source stringlengths 21 2.38M | target stringlengths 1 96 |
|---|---|---|---|---|---|
python | PrefectHQ__prefect | src/integrations/prefect-github/prefect_github/schemas/graphql_schema.py | {
"start": 8991,
"end": 9198
} | class ____(sgqlc.types.Enum):
"""
See source code for more info.
"""
__schema__ = graphql_schema
__choices__ = ("FAILURE", "PENDING", "SUCCESS")
| EnterpriseServerUserAccountsUploadSyncState |
python | sqlalchemy__sqlalchemy | lib/sqlalchemy/testing/suite/test_select.py | {
"start": 64104,
"end": 65806
} | class ____(fixtures.TablesTest):
__backend__ = True
run_inserts = run_deletes = "once"
inserted_data = [{"a": i, "b": i + 1} for i in range(10)]
@classmethod
def define_tables(cls, metadata):
Table("bitwise", metadata, Column("a", Integer), Column("b", Integer))
@classmethod
def insert_data(cls, connection):
connection.execute(cls.tables.bitwise.insert(), cls.inserted_data)
@testing.combinations(
(
lambda a: a.bitwise_xor(5),
[i for i in range(10) if i != 5],
testing.requires.supports_bitwise_xor,
),
(
lambda a: a.bitwise_or(1),
list(range(10)),
testing.requires.supports_bitwise_or,
),
(
lambda a: a.bitwise_and(4),
list(range(4, 8)),
testing.requires.supports_bitwise_and,
),
(
lambda a: (a - 2).bitwise_not(),
[0],
testing.requires.supports_bitwise_not,
),
(
lambda a: a.bitwise_lshift(1),
list(range(1, 10)),
testing.requires.supports_bitwise_shift,
),
(
lambda a: a.bitwise_rshift(2),
list(range(4, 10)),
testing.requires.supports_bitwise_shift,
),
argnames="case, expected",
)
def test_bitwise(self, case, expected, connection):
tbl = self.tables.bitwise
a = tbl.c.a
op = testing.resolve_lambda(case, a=a)
stmt = select(tbl).where(op > 0).order_by(a)
res = connection.execute(stmt).mappings().all()
eq_(res, [self.inserted_data[i] for i in expected])
| BitwiseTest |
python | kubernetes-client__python | kubernetes/client/models/v1beta2_resource_claim.py | {
"start": 383,
"end": 7590
} | class ____(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'api_version': 'str',
'kind': 'str',
'metadata': 'V1ObjectMeta',
'spec': 'V1beta2ResourceClaimSpec',
'status': 'V1beta2ResourceClaimStatus'
}
attribute_map = {
'api_version': 'apiVersion',
'kind': 'kind',
'metadata': 'metadata',
'spec': 'spec',
'status': 'status'
}
def __init__(self, api_version=None, kind=None, metadata=None, spec=None, status=None, local_vars_configuration=None): # noqa: E501
"""V1beta2ResourceClaim - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._api_version = None
self._kind = None
self._metadata = None
self._spec = None
self._status = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
if kind is not None:
self.kind = kind
if metadata is not None:
self.metadata = metadata
self.spec = spec
if status is not None:
self.status = status
@property
def api_version(self):
"""Gets the api_version of this V1beta2ResourceClaim. # noqa: E501
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501
:return: The api_version of this V1beta2ResourceClaim. # noqa: E501
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""Sets the api_version of this V1beta2ResourceClaim.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501
:param api_version: The api_version of this V1beta2ResourceClaim. # noqa: E501
:type: str
"""
self._api_version = api_version
@property
def kind(self):
"""Gets the kind of this V1beta2ResourceClaim. # noqa: E501
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501
:return: The kind of this V1beta2ResourceClaim. # noqa: E501
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""Sets the kind of this V1beta2ResourceClaim.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501
:param kind: The kind of this V1beta2ResourceClaim. # noqa: E501
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""Gets the metadata of this V1beta2ResourceClaim. # noqa: E501
:return: The metadata of this V1beta2ResourceClaim. # noqa: E501
:rtype: V1ObjectMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""Sets the metadata of this V1beta2ResourceClaim.
:param metadata: The metadata of this V1beta2ResourceClaim. # noqa: E501
:type: V1ObjectMeta
"""
self._metadata = metadata
@property
def spec(self):
"""Gets the spec of this V1beta2ResourceClaim. # noqa: E501
:return: The spec of this V1beta2ResourceClaim. # noqa: E501
:rtype: V1beta2ResourceClaimSpec
"""
return self._spec
@spec.setter
def spec(self, spec):
"""Sets the spec of this V1beta2ResourceClaim.
:param spec: The spec of this V1beta2ResourceClaim. # noqa: E501
:type: V1beta2ResourceClaimSpec
"""
if self.local_vars_configuration.client_side_validation and spec is None: # noqa: E501
raise ValueError("Invalid value for `spec`, must not be `None`") # noqa: E501
self._spec = spec
@property
def status(self):
"""Gets the status of this V1beta2ResourceClaim. # noqa: E501
:return: The status of this V1beta2ResourceClaim. # noqa: E501
:rtype: V1beta2ResourceClaimStatus
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this V1beta2ResourceClaim.
:param status: The status of this V1beta2ResourceClaim. # noqa: E501
:type: V1beta2ResourceClaimStatus
"""
self._status = status
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1beta2ResourceClaim):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1beta2ResourceClaim):
return True
return self.to_dict() != other.to_dict()
| V1beta2ResourceClaim |
python | pytorch__pytorch | torchgen/api/autograd.py | {
"start": 1191,
"end": 2153
} | class ____:
# The formula string (legit C++ expression).
# Note that expressions against input arguments have been replaced with the
# corresponding saved attributes.
# E.g.:
# raw formula: `mul_tensor_backward(grad, self, other.scalar_type())`
# here: `mul_tensor_backward(grad, self, other_scalar_type)`
formula: str
# The formula string before input argument replacement
original_formula: str
# Names of the arguments for which this formula calculates derivatives.
var_names: tuple[str, ...]
# Saved inputs that are referenced by the formula.
saved_inputs: tuple[SavedAttribute, ...]
# Saved outputs that are referenced by the formula.
saved_outputs: tuple[SavedAttribute, ...]
# Gradients that are referenced by name in the formula.
named_gradients: set[str]
# Represents a forward formula that calculates forward derivatives
# for one tensor.
@dataclass(frozen=True)
| Derivative |
python | microsoft__pyright | packages/pyright-internal/src/tests/samples/typedDict3.py | {
"start": 179,
"end": 1024
} | class ____(Movie, total=True):
based_on: str
movie1 = Movie(year=1982, name="Blade Runner")
# This should generate an error because
# the type is incorrect.
movie2 = Movie(name="Blade Runner", year="1982")
movie3 = Movie(name="Blade Runner")
# This should generate an error because
# the key name is not supported.
movie4 = Movie(name2="Blade Runner")
book1 = BookBasedMovie(year=1979, name="Moonraker", based_on="Moonraker")
book2 = BookBasedMovie(based_on="Moonraker", year=1979)
book3 = BookBasedMovie(based_on="Moonraker")
# This should generate an error because 'author' isn't
# a defined field.
book4 = BookBasedMovie(based_on="Moonraker", author="Ian Fleming")
# This should generate an error because 'based_on' is
# a required field, and it's not provided.
book5 = BookBasedMovie(year=1982, name="Blade Runner")
| BookBasedMovie |
python | jazzband__django-oauth-toolkit | tests/test_django_checks.py | {
"start": 213,
"end": 829
} | class ____(TestCase):
def test_checks_pass(self):
call_command("check")
# CrossDatabaseRouter claims AccessToken is in beta while everything else is in alpha.
# This will cause the database checks to fail.
@override_settings(
DATABASE_ROUTERS=["tests.db_router.CrossDatabaseRouter", "tests.db_router.AlphaRouter"]
)
def test_checks_fail_when_router_crosses_databases(self):
message = "The token models are expected to be stored in the same database."
with self.assertRaisesMessage(SystemCheckError, message):
call_command("check")
| DjangoChecksTestCase |
python | pytorch__pytorch | test/onnx/test_onnxscript_no_runtime.py | {
"start": 306,
"end": 6410
} | class ____(common_utils.TestCase):
# opset version is
# 1. local function is supported after opset 15
# 2. onnx-script requires users to determine opset in local function
opset_version = 15
def test_onnxscript_registration_with_multiple_models(self):
from onnxscript.onnx_opset import opset15 as op
# 1. Register Selu onnxscript function as custom Op
custom_opset = onnxscript.values.Opset(domain="onnx-script", version=1)
@onnxscript.script(custom_opset)
def Selu(X):
# default value is not supported by onnxscript
alpha = 1.67326 # auto wrapped as Constants
gamma = 1.0507
alphaX = op.CastLike(alpha, X)
gammaX = op.CastLike(gamma, X)
neg = gammaX * (alphaX * op.Exp(X) - alphaX)
pos = gammaX * X
zero = op.CastLike(0, X)
return op.Where(X <= zero, neg, pos)
def custom_selu(g: jit_utils.GraphContext, X):
return g.onnxscript_op(Selu, X).setType(X.type())
torch.onnx.register_custom_op_symbolic(
symbolic_name="aten::selu",
symbolic_fn=custom_selu,
opset_version=self.opset_version,
)
# 2. Register layer_norm onnxscript function as custom Op
@onnxscript.script(custom_opset)
def layer_norm(
X, axes: list[int], weight: FLOAT[...], bias: FLOAT[...], eps: float
):
mean = op.ReduceMean(X, axes=axes)
D = X - mean # op.Sub(X, mean)
DD = D * D # op.Mul(D, D)
var = op.ReduceMean(DD, axes=axes)
vareps = var + eps # op.Add(var, eps)
stddev = op.Sqrt(vareps)
invstddev = op.Reciprocal(stddev)
normalized = D * invstddev # op.Mul(D, invstddev)
normalizedw = op.CastLike(
normalized, weight
) # Type issue if missing this Op
normalizedscaled = normalizedw * weight # op.Mul(normalized, weight)
return normalizedscaled + bias
@torch.onnx.symbolic_helper.parse_args("v", "is", "v", "v", "f", "none")
def custom_layer_norm(
g, input, normalized_shape, weight, bias, eps, cudnn_enable
):
# comprehension is not supported by onnxscript
axes = [-i for i in range(len(normalized_shape), 0, -1)]
return g.onnxscript_op(
layer_norm, input, weight, bias, axes_i=axes, eps_f=eps
).setType(input.type())
torch.onnx.register_custom_op_symbolic(
symbolic_name="aten::layer_norm",
symbolic_fn=custom_layer_norm,
opset_version=self.opset_version,
)
# 3. export two models
x = torch.randn(1, 2, 3, 4, requires_grad=True)
model_selu = torch.nn.SELU()
selu_onnx = io.BytesIO()
torch.onnx.export(
model_selu, x, selu_onnx, opset_version=self.opset_version, dynamo=False
)
N, C = 3, 4
y = torch.randn(N, C)
model_layer_norm = torch.nn.LayerNorm(C)
layer_norm_onnx = io.BytesIO()
torch.onnx.export(
model_layer_norm,
y,
layer_norm_onnx,
opset_version=self.opset_version,
dynamo=False,
)
# 4. test on models
selu_proto = onnx.load(io.BytesIO(selu_onnx.getvalue()))
layer_norm_proto = onnx.load(io.BytesIO(layer_norm_onnx.getvalue()))
self.assertEqual(len(selu_proto.functions), 1)
self.assertEqual(len(layer_norm_proto.functions), 1)
self.assertEqual(selu_proto.functions[0].name, "Selu")
self.assertEqual(layer_norm_proto.functions[0].name, "layer_norm")
def test_loop_registration(self):
# Control flow is tested for _find_onnxscript_op function in torch/onnx/utils.py,
# which has recursive logic to go through every nodes with subgraph in model proto
class NestedLoopsModel(torch.jit.ScriptModule):
def __init__(self) -> None:
super().__init__()
self.selu = torch.nn.SELU()
@torch.jit.script_method
def forward(self, x):
y = x
for i in range(x.size(3)):
if i == 0:
y = self.selu(x)
else:
y += i
return y
model = NestedLoopsModel()
inputs = torch.zeros(1, 2, 3, 4)
from onnxscript.onnx_opset import opset15 as op
custom_opset = onnxscript.values.Opset(domain="onnx-script", version=2)
@onnxscript.script(custom_opset)
def Selu(X):
alpha = 1.6732632423543772848170429916717
gamma = 1.0507009873554804934193349852946
alphaX = op.CastLike(alpha, X)
gammaX = op.CastLike(gamma, X)
neg = gammaX * (alphaX * op.Exp(X) - alphaX)
pos = gammaX * X
zero = op.CastLike(0, X)
return op.Where(X <= zero, neg, pos)
def custom_selu(g, X):
# domain of the Op should be aligned with onnx-script
# setType API is required for custom Op to support
# torchscript shape type inference
print("custom_selu is used!")
return g.onnxscript_op(Selu, X).setType(X.type())
torch.onnx.register_custom_op_symbolic(
symbolic_name="aten::selu",
symbolic_fn=custom_selu,
opset_version=15,
)
saved_model = io.BytesIO()
torch.onnx.export(
torch.jit.script(model),
inputs,
f=saved_model,
opset_version=15,
dynamo=False,
)
loop_selu_proto = onnx.load(io.BytesIO(saved_model.getvalue()))
self.assertEqual(len(loop_selu_proto.functions), 1)
if __name__ == "__main__":
raise RuntimeError(
"This test is not currently used and should be "
"enabled in discover_tests.py if required."
)
| TestONNXScriptExport |
python | huggingface__transformers | tests/models/layoutlmv2/test_processing_layoutlmv2.py | {
"start": 1328,
"end": 5701
} | class ____(ProcessorTesterMixin, unittest.TestCase):
processor_class = LayoutLMv2Processor
@classmethod
def _setup_image_processor(cls):
image_processor_class = cls._get_component_class_from_processor("image_processor")
return image_processor_class(
do_resize=True,
size=224,
apply_ocr=True,
)
@classmethod
def _setup_tokenizer(cls):
vocab_tokens = [
"[UNK]",
"[CLS]",
"[SEP]",
"[PAD]",
"[MASK]",
"want",
"##want",
"##ed",
"wa",
"un",
"runn",
"##ing",
",",
"low",
"lowest",
]
vocab_file = os.path.join(cls.tmpdirname, VOCAB_FILES_NAMES["vocab_file"])
with open(vocab_file, "w", encoding="utf-8") as vocab_writer:
vocab_writer.write("".join([x + "\n" for x in vocab_tokens]))
return LayoutLMv2Tokenizer.from_pretrained(cls.tmpdirname)
@unittest.skip("LayoutLMv2Processor doesn't use pixel_values")
def test_image_processor_defaults(self):
pass
@unittest.skip("LayoutLMv2Processor doesn't use pixel_values")
def test_processor_with_multiple_inputs(self):
pass
def test_save_load_pretrained_additional_features(self):
processor = self.get_processor()
processor.save_pretrained(self.tmpdirname)
# slow tokenizer
tokenizer_add_kwargs = self.get_component("tokenizer", bos_token="(BOS)", eos_token="(EOS)")
image_processor_add_kwargs = self.get_component("image_processor", do_resize=False, size=30, use_fast=False)
processor = LayoutLMv2Processor.from_pretrained(
self.tmpdirname, use_fast=False, bos_token="(BOS)", eos_token="(EOS)", do_resize=False, size=30
)
self.assertEqual(processor.tokenizer.get_vocab(), tokenizer_add_kwargs.get_vocab())
self.assertIsInstance(processor.tokenizer, LayoutLMv2Tokenizer)
self.assertEqual(processor.image_processor.to_json_string(), image_processor_add_kwargs.to_json_string())
self.assertIsInstance(processor.image_processor, LayoutLMv2ImageProcessor)
# fast tokenizer
tokenizer_add_kwargs = self.get_component("tokenizer", bos_token="(BOS)", eos_token="(EOS)")
image_processor_add_kwargs = self.get_component("image_processor", do_resize=False, size=30)
processor = LayoutLMv2Processor.from_pretrained(
self.tmpdirname, bos_token="(BOS)", eos_token="(EOS)", do_resize=False, size=30
)
self.assertEqual(processor.tokenizer.get_vocab(), tokenizer_add_kwargs.get_vocab())
self.assertIsInstance(processor.tokenizer, LayoutLMv2TokenizerFast)
self.assertEqual(processor.image_processor.to_json_string(), image_processor_add_kwargs.to_json_string())
self.assertIsInstance(processor.image_processor, LayoutLMv2ImageProcessorFast)
@slow
def test_overflowing_tokens(self):
# In the case of overflowing tokens, test that we still have 1-to-1 mapping between the images and input_ids (sequences that are too long are broken down into multiple sequences).
from datasets import load_dataset
# set up
datasets = load_dataset("nielsr/funsd")
processor = LayoutLMv2Processor.from_pretrained("microsoft/layoutlmv2-base-uncased", apply_ocr=False)
def preprocess_data(examples):
images = [image.convert("RGB") for image in examples["image"]]
words = list(examples["words"])
boxes = list(examples["bboxes"])
word_labels = list(examples["ner_tags"])
encoded_inputs = processor(
images,
words,
boxes=boxes,
word_labels=word_labels,
padding="max_length",
truncation=True,
return_overflowing_tokens=True,
stride=50,
return_offsets_mapping=True,
return_tensors="pt",
)
return encoded_inputs
train_data = preprocess_data(datasets["train"])
self.assertEqual(len(train_data["image"]), len(train_data["input_ids"]))
# different use cases tests
@require_torch
@require_pytesseract
| LayoutLMv2ProcessorTest |
python | getsentry__sentry | src/sentry/integrations/api/endpoints/external_team_details.py | {
"start": 1119,
"end": 3937
} | class ____(TeamEndpoint, ExternalActorEndpointMixin):
publish_status = {
"DELETE": ApiPublishStatus.PUBLIC,
"PUT": ApiPublishStatus.PUBLIC,
}
owner = ApiOwner.ENTERPRISE
def convert_args(
self,
request: Request,
organization_id_or_slug: int | str,
team_id_or_slug: int | str,
external_team_id: int,
*args: Any,
**kwargs: Any,
) -> tuple[Any, Any]:
args, kwargs = super().convert_args(
request, organization_id_or_slug, team_id_or_slug, *args, **kwargs
)
kwargs["external_team"] = self.get_external_actor_or_404(
external_team_id, kwargs["team"].organization
)
return args, kwargs
@extend_schema(
operation_id="Update an External Team",
parameters=[
GlobalParams.ORG_ID_OR_SLUG,
GlobalParams.TEAM_ID_OR_SLUG,
OrganizationParams.EXTERNAL_TEAM_ID,
],
request=ExternalTeamSerializer,
responses={
200: ExternalActorSerializer,
400: RESPONSE_BAD_REQUEST,
403: RESPONSE_FORBIDDEN,
},
examples=IntegrationExamples.EXTERNAL_TEAM_CREATE,
)
def put(self, request: Request, team: Team, external_team: ExternalActor) -> Response:
"""
Update a team in an external provider that is currently linked to a Sentry team.
"""
self.assert_has_feature(request, team.organization)
if "teamId" in request.data:
del request.data["teamId"]
serializer = ExternalTeamSerializer(
instance=external_team,
data={**request.data, "team_id": team.id},
partial=True,
context={"organization": team.organization},
)
if serializer.is_valid():
updated_external_team = serializer.save()
return Response(
serialize(updated_external_team, request.user), status=status.HTTP_200_OK
)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@extend_schema(
operation_id="Delete an External Team",
parameters=[
GlobalParams.ORG_ID_OR_SLUG,
GlobalParams.TEAM_ID_OR_SLUG,
OrganizationParams.EXTERNAL_TEAM_ID,
],
request=None,
responses={
204: RESPONSE_NO_CONTENT,
400: RESPONSE_BAD_REQUEST,
403: RESPONSE_FORBIDDEN,
},
)
def delete(self, request: Request, team: Team, external_team: ExternalActor) -> Response:
"""
Delete the link between a team from an external provider and a Sentry team.
"""
external_team.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
| ExternalTeamDetailsEndpoint |
python | django-import-export__django-import-export | tests/core/tests/test_resources/test_bulk_operations.py | {
"start": 17966,
"end": 22134
} | class ____(BulkTest):
class DeleteBookResource(resources.ModelResource):
def for_delete(self, row, instance):
return True
class Meta:
model = Book
use_bulk = True
# there are errors when diffing with mocks
# therefore disable diff with this flag
skip_diff = True
def setUp(self):
super().setUp()
self.resource = self.DeleteBookResource()
self.resource._meta.batch_size = 1000
self.resource._meta.use_bulk = True
self.init_update_test_data()
@mock.patch("core.models.Book.delete")
def test_bulk_delete_use_bulk_is_false(self, mock_obj_delete):
self.resource._meta.use_bulk = False
self.resource.import_data(self.dataset)
self.assertEqual(10, mock_obj_delete.call_count)
@mock.patch("core.models.Book.objects")
def test_bulk_delete_batch_size_of_4(self, mock_obj_manager):
self.resource._meta.batch_size = 4
result = self.resource.import_data(self.dataset)
self.assertEqual(3, mock_obj_manager.filter.return_value.delete.call_count)
self.assertEqual(10, result.total_rows)
self.assertEqual(10, result.totals["delete"])
@mock.patch("core.models.Book.objects")
def test_bulk_delete_batch_size_of_5(self, mock_obj_manager):
self.resource._meta.batch_size = 5
result = self.resource.import_data(self.dataset)
self.assertEqual(2, mock_obj_manager.filter.return_value.delete.call_count)
self.assertEqual(10, result.total_rows)
self.assertEqual(10, result.totals["delete"])
@mock.patch("core.models.Book.objects")
def test_bulk_delete_batch_size_is_none(self, mock_obj_manager):
self.resource._meta.batch_size = None
result = self.resource.import_data(self.dataset)
self.assertEqual(1, mock_obj_manager.filter.return_value.delete.call_count)
self.assertEqual(10, result.total_rows)
self.assertEqual(10, result.totals["delete"])
@mock.patch("core.models.Book.objects")
def test_bulk_delete_not_called_when_not_using_transactions(self, mock_obj_manager):
class _BookResource(self.DeleteBookResource):
def import_data(
self,
dataset,
dry_run=False,
raise_errors=False,
use_transactions=None,
collect_failed_rows=False,
**kwargs,
):
# override so that we can enforce not using_transactions
using_transactions = False
return self.import_data_inner(
dataset,
dry_run,
raise_errors,
using_transactions,
collect_failed_rows,
**kwargs,
)
resource = _BookResource()
resource.import_data(self.dataset, dry_run=True)
self.assertEqual(0, mock_obj_manager.filter.return_value.delete.call_count)
@mock.patch("core.models.Book.objects")
def test_bulk_delete_called_for_dry_run(self, mock_obj_manager):
self.resource.import_data(self.dataset, dry_run=True)
self.assertEqual(1, mock_obj_manager.filter.return_value.delete.call_count)
@mock.patch("core.models.Book.objects")
def test_bulk_delete_logs_exception(self, mock_obj_manager):
e = Exception("invalid")
mock_obj_manager.filter.return_value.delete.side_effect = e
with mock.patch("logging.Logger.debug") as mock_exception:
self.resource.import_data(self.dataset)
mock_exception.assert_called_with(e, exc_info=mock.ANY)
self.assertEqual(1, mock_exception.call_count)
@mock.patch("core.models.Book.objects")
def test_bulk_delete_raises_exception(self, mock_obj_manager):
e = Exception("invalid")
mock_obj_manager.filter.return_value.delete.side_effect = e
with self.assertRaises(Exception) as raised_exc:
self.resource.import_data(self.dataset, raise_errors=True)
self.assertEqual(e, raised_exc)
| BulkDeleteTest |
python | doocs__leetcode | solution/2400-2499/2493.Divide Nodes Into the Maximum Number of Groups/Solution.py | {
"start": 0,
"end": 821
} | class ____:
def magnificentSets(self, n: int, edges: List[List[int]]) -> int:
g = [[] for _ in range(n)]
for a, b in edges:
g[a - 1].append(b - 1)
g[b - 1].append(a - 1)
d = defaultdict(int)
for i in range(n):
q = deque([i])
dist = [0] * n
dist[i] = mx = 1
root = i
while q:
a = q.popleft()
root = min(root, a)
for b in g[a]:
if dist[b] == 0:
dist[b] = dist[a] + 1
mx = max(mx, dist[b])
q.append(b)
elif abs(dist[b] - dist[a]) != 1:
return -1
d[root] = max(d[root], mx)
return sum(d.values())
| Solution |
python | ray-project__ray | python/ray/train/tests/test_torch_predictor.py | {
"start": 417,
"end": 521
} | class ____(torch.nn.Module):
def forward(self, input):
return input * 2
| DummyModelSingleTensor |
python | getsentry__sentry | src/sentry/similarity/backends/redis.py | {
"start": 466,
"end": 7589
} | class ____(AbstractIndexBackend):
def __init__(
self, cluster, namespace, signature_builder, bands, interval, retention, candidate_set_limit
):
self.cluster = cluster
self.namespace = namespace
self.signature_builder = signature_builder
self.bands = bands
self.interval = interval
self.retention = retention
self.candidate_set_limit = candidate_set_limit
def _build_signature_arguments(self, features):
if not features:
return [0] * self.bands
arguments = []
for bucket in band(self.bands, self.signature_builder(features)):
arguments.extend([1, ",".join(str(b) for b in bucket), 1])
return arguments
def __index(self, scope, args):
# scope must be passed into the script call as a key to allow the
# cluster client to determine what cluster the script should be
# executed on. The script itself will use the scope as the hashtag for
# all redis operations.
return index([scope], args, self.cluster)
def _as_search_result(self, results):
score_replacements = {
-1.0: None, # both items don't have the feature (no comparison)
-2.0: 0, # one item doesn't have the feature (totally dissimilar)
}
def decode_search_result(result):
key, scores = result
return (
force_str(key),
[score_replacements.get(float(score), float(score)) for score in scores],
)
def get_comparison_key(result):
key, scores = result
scores = [score for score in scores if score is not None]
return (
sum(scores) / len(scores) * -1, # average score, descending
len(scores) * -1, # number of indexes with scores, descending
key, # lexicographical sort on key, ascending
)
return sorted((decode_search_result(result) for result in results), key=get_comparison_key)
def classify(self, scope, items, limit=None, timestamp=None):
if timestamp is None:
timestamp = int(time.time())
arguments = [
"CLASSIFY",
timestamp,
self.namespace,
self.bands,
self.interval,
self.retention,
self.candidate_set_limit,
scope,
limit if limit is not None else -1,
]
for idx, threshold, features in items:
arguments.extend([idx, threshold])
arguments.extend(self._build_signature_arguments(features))
return self._as_search_result(self.__index(scope, arguments))
def compare(self, scope, key, items, limit=None, timestamp=None):
if timestamp is None:
timestamp = int(time.time())
arguments = [
"COMPARE",
timestamp,
self.namespace,
self.bands,
self.interval,
self.retention,
self.candidate_set_limit,
scope,
limit if limit is not None else -1,
key,
]
for idx, threshold in items:
arguments.extend([idx, threshold])
return self._as_search_result(self.__index(scope, arguments))
def record(self, scope, key, items, timestamp=None):
if not items:
return # nothing to do
if timestamp is None:
timestamp = int(time.time())
arguments = [
"RECORD",
timestamp,
self.namespace,
self.bands,
self.interval,
self.retention,
self.candidate_set_limit,
scope,
key,
]
for idx, features in items:
arguments.append(idx)
arguments.extend(self._build_signature_arguments(features))
return self.__index(scope, arguments)
def merge(self, scope, destination, items, timestamp=None):
if timestamp is None:
timestamp = int(time.time())
arguments = [
"MERGE",
timestamp,
self.namespace,
self.bands,
self.interval,
self.retention,
self.candidate_set_limit,
scope,
destination,
]
for idx, source in items:
arguments.extend([idx, source])
return self.__index(scope, arguments)
def delete(self, scope, items, timestamp=None):
if timestamp is None:
timestamp = int(time.time())
arguments = [
"DELETE",
timestamp,
self.namespace,
self.bands,
self.interval,
self.retention,
self.candidate_set_limit,
scope,
]
for idx, key in items:
arguments.extend([idx, key])
return self.__index(scope, arguments)
def scan(self, scope, indices, batch=1000, timestamp=None):
if timestamp is None:
timestamp = int(time.time())
arguments = [
"SCAN",
timestamp,
self.namespace,
self.bands,
self.interval,
self.retention,
self.candidate_set_limit,
scope,
]
cursors = {idx: 0 for idx in indices}
while cursors:
requests = []
for idx, cursor in cursors.items():
requests.append([idx, cursor, batch])
responses = self.__index(scope, arguments + flatten(requests))
for (idx, _, _), (cursor, chunk) in zip(requests, responses):
cursor = int(cursor)
if cursor == 0:
del cursors[idx]
else:
cursors[idx] = cursor
yield idx, chunk
def flush(self, scope, indices, batch=1000, timestamp=None):
for index, chunk in self.scan(scope, indices, batch, timestamp):
if chunk:
self.cluster.delete(*chunk)
def export(self, scope, items, timestamp=None):
if timestamp is None:
timestamp = int(time.time())
arguments = [
"EXPORT",
timestamp,
self.namespace,
self.bands,
self.interval,
self.retention,
self.candidate_set_limit,
scope,
]
for idx, key in items:
arguments.extend([idx, key])
return self.__index(scope, arguments)
def import_(self, scope, items, timestamp=None):
if timestamp is None:
timestamp = int(time.time())
arguments = [
"IMPORT",
timestamp,
self.namespace,
self.bands,
self.interval,
self.retention,
self.candidate_set_limit,
scope,
]
for idx, key, data in items:
arguments.extend([idx, key, data])
return self.__index(scope, arguments)
| RedisScriptMinHashIndexBackend |
python | google__jax | jax/experimental/jax2tf/tests/flax_models/bilstm_classifier.py | {
"start": 7045,
"end": 8530
} | class ____(nn.Module):
"""A simple Multilayer perceptron with 1 hidden layer.
Attributes:
hidden_size: The size of the hidden layer.
output_size: The size of the output.
activation: The activation function to apply to the hidden layer.
dropout_rate: The dropout rate applied to the hidden layer.
output_bias: If False, do not use a bias term in the last layer.
deterministic: Disables dropout if set to True.
"""
hidden_size: int
output_size: int
activation: Callable[..., Any] = nn.tanh
dropout_rate: float = 0.0
output_bias: bool = False
deterministic: bool | None = None
def setup(self):
self.intermediate_layer = nn.Dense(self.hidden_size)
self.output_layer = nn.Dense(self.output_size, use_bias=self.output_bias)
self.dropout_layer = nn.Dropout(rate=self.dropout_rate)
def __call__(self, inputs: Array, deterministic: bool | None = None):
"""Applies the MLP to the last dimension of the inputs.
Args:
inputs: <float32>[batch_size, ..., input_features].
deterministic: Disables dropout when set to True.
Returns:
The MLP output <float32>[batch_size, ..., output_size]
"""
deterministic = nn.module.merge_param(
'deterministic', self.deterministic, deterministic)
hidden = self.intermediate_layer(inputs)
hidden = self.activation(hidden)
hidden = self.dropout_layer(hidden, deterministic=deterministic)
output = self.output_layer(hidden)
return output
| MLP |
python | euske__pdfminer | pdfminer/pdffont.py | {
"start": 2267,
"end": 2413
} | class ____:
@classmethod
def get_metrics(klass, fontname):
return FONT_METRICS[fontname]
## Type1FontHeaderParser
##
| FontMetricsDB |
python | ijl__orjson | test/test_enum.py | {
"start": 592,
"end": 773
} | class ____(enum.Enum):
A = "a"
B = 1
C = FloatEnum.ONE
D = {"d": IntEnum.ONE} # noqa: RUF012
E = Custom("c")
F = datetime.datetime(1970, 1, 1)
| UnspecifiedEnum |
python | doocs__leetcode | solution/2100-2199/2179.Count Good Triplets in an Array/Solution.py | {
"start": 449,
"end": 889
} | class ____:
def goodTriplets(self, nums1: List[int], nums2: List[int]) -> int:
pos = {v: i for i, v in enumerate(nums2, 1)}
ans = 0
n = len(nums1)
tree = BinaryIndexedTree(n)
for num in nums1:
p = pos[num]
left = tree.query(p)
right = n - p - (tree.query(n) - tree.query(p))
ans += left * right
tree.update(p, 1)
return ans
| Solution |
python | fluentpython__example-code-2e | 06-obj-ref/bus.py | {
"start": 284,
"end": 609
} | class ____:
def __init__(self, passengers=None):
if passengers is None:
self.passengers = []
else:
self.passengers = list(passengers)
def pick(self, name):
self.passengers.append(name)
def drop(self, name):
self.passengers.remove(name)
# end::BUS_CLASS[]
| Bus |
python | wandb__wandb | wandb/automations/_generated/fragments.py | {
"start": 1713,
"end": 1879
} | class ____(GQLResult):
typename__: Typename[Literal["NoOpTriggeredAction"]] = "NoOpTriggeredAction"
no_op: Optional[bool] = Field(alias="noOp")
| NoOpActionFields |
python | getsentry__sentry | tests/sentry/api/test_paginator.py | {
"start": 34659,
"end": 36469
} | class ____(APITestCase, SnubaTestCase):
cls = CallbackPaginator
def setUp(self) -> None:
super().setUp()
self.now = timezone.now()
self.project.date_added = self.now - timedelta(minutes=5)
for i in range(8):
self.store_event(
project_id=self.project.id,
data={
"event_id": str(i) * 32,
"timestamp": (self.now - timedelta(minutes=2)).isoformat(),
},
)
def test_simple(self) -> None:
paginator = self.cls(
callback=lambda limit, offset: dummy_snuba_request_method(
limit, offset, self.organization.id, self.project.id, self.now
),
)
first_page = paginator.get_result(limit=3)
assert len(first_page.results) == 3
assert first_page.results == [{"event_id": str(i) * 32} for i in range(3)]
assert first_page.next.offset == 1
assert first_page.next.has_results
assert first_page.prev.has_results is False
second_page = paginator.get_result(limit=3, cursor=first_page.next)
assert len(second_page.results) == 3
assert second_page.results == [{"event_id": str(i) * 32} for i in range(3, 6)]
assert second_page.next.offset == 2
assert second_page.next.has_results
assert second_page.prev.offset == 0
assert second_page.prev.has_results
third_page = paginator.get_result(limit=3, cursor=second_page.next)
assert len(third_page.results) == 2
assert third_page.results == [{"event_id": str(i) * 32} for i in range(6, 8)]
assert third_page.next.has_results is False
assert third_page.prev.offset == 1
assert third_page.prev.has_results
| CallbackPaginatorTest |
python | facebook__pyre-check | client/language_server/protocol.py | {
"start": 10353,
"end": 10491
} | class ____(json_mixins.CamlCaseAndExcludeJsonMixin):
include_text: Optional[bool] = None
@dataclasses.dataclass(frozen=True)
| SaveOptions |
python | mkdocs__mkdocs | mkdocs/tests/config/config_options_tests.py | {
"start": 66938,
"end": 66998
} | class ____(BasePlugin[_FakePluginConfig]):
pass
| FakePlugin |
python | allegroai__clearml | clearml/backend_api/services/v2_13/models.py | {
"start": 97657,
"end": 98792
} | class ____(Response):
"""
Response of models.make_public endpoint.
:param updated: Number of models updated
:type updated: int
"""
_service = "models"
_action = "make_public"
_version = "2.13"
_schema = {
"definitions": {},
"properties": {
"updated": {
"description": "Number of models updated",
"type": ["integer", "null"],
}
},
"type": "object",
}
def __init__(self, updated: Optional[int] = None, **kwargs: Any) -> None:
super(MakePublicResponse, self).__init__(**kwargs)
self.updated = updated
@schema_property("updated")
def updated(self) -> Optional[int]:
return self._property_updated
@updated.setter
def updated(self, value: Optional[int]) -> None:
if value is None:
self._property_updated = None
return
if isinstance(value, float) and value.is_integer():
value = int(value)
self.assert_isinstance(value, "updated", six.integer_types)
self._property_updated = value
| MakePublicResponse |
python | pypa__warehouse | tests/unit/manage/test_views.py | {
"start": 187256,
"end": 210727
} | class ____:
@pytest.fixture
def organization(self, _enable_organizations, pyramid_user):
organization = OrganizationFactory.create()
OrganizationRoleFactory.create(
organization=organization,
user=pyramid_user,
role_name=OrganizationRoleType.Owner,
)
return organization
@pytest.fixture
def organization_project(self, organization):
project = ProjectFactory.create(organization=organization)
OrganizationProjectFactory(organization=organization, project=project)
return project
@pytest.fixture
def organization_member(self, organization):
member = UserFactory.create()
OrganizationRoleFactory.create(
organization=organization,
user=member,
role_name=OrganizationRoleType.Member,
)
return member
@pytest.fixture
def organization_team(self, organization, organization_member):
team = TeamFactory(organization=organization)
TeamRoleFactory.create(team=team, user=organization_member)
return team
def test_get_manage_project_roles(self, db_request):
user_service = pretend.stub()
db_request.find_service = pretend.call_recorder(
lambda iface, context: user_service
)
form_obj = pretend.stub()
form_class = pretend.call_recorder(lambda d, user_service: form_obj)
project = ProjectFactory.create(name="foobar")
user = UserFactory.create()
user_2 = UserFactory.create()
role = RoleFactory.create(user=user, project=project)
role_invitation = RoleInvitationFactory.create(user=user_2, project=project)
result = views.manage_project_roles(project, db_request, _form_class=form_class)
assert db_request.find_service.calls == [
pretend.call(IOrganizationService, context=None),
pretend.call(IUserService, context=None),
]
assert form_class.calls == [
pretend.call(db_request.POST, user_service=user_service)
]
assert result == {
"project": project,
"roles": {role},
"invitations": {role_invitation},
"form": form_obj,
"enable_internal_collaborator": False,
"team_project_roles": set(),
"internal_role_form": None,
}
def test_post_new_internal_team_role(
self,
db_request,
organization_project,
organization_team,
organization_member,
monkeypatch,
):
db_request.method = "POST"
db_request.POST = MultiDict(
{
"is_team": "true",
"team_name": organization_team.name,
"team_project_role_name": "Owner",
"username": "",
"role_name": "",
}
)
send_team_collaborator_added_email = pretend.call_recorder(
lambda *a, **kw: None
)
monkeypatch.setattr(
views,
"send_team_collaborator_added_email",
send_team_collaborator_added_email,
)
send_added_as_team_collaborator_email = pretend.call_recorder(
lambda *a, **kw: None
)
monkeypatch.setattr(
views,
"send_added_as_team_collaborator_email",
send_added_as_team_collaborator_email,
)
result = views.manage_project_roles(organization_project, db_request)
assert send_team_collaborator_added_email.calls == [
pretend.call(
db_request,
{db_request.user},
team=organization_team,
submitter=db_request.user,
project_name=organization_project.name,
role="Owner",
)
]
assert send_added_as_team_collaborator_email.calls == [
pretend.call(
db_request,
{organization_member},
team=organization_team,
submitter=db_request.user,
project_name=organization_project.name,
role="Owner",
)
]
assert isinstance(result, HTTPSeeOther)
def test_post_duplicate_internal_team_role(
self,
db_request,
organization_project,
organization_team,
monkeypatch,
):
db_request.method = "POST"
db_request.POST = MultiDict(
{
"is_team": "true",
"team_name": organization_team.name,
"team_project_role_name": "Owner",
"username": "",
"role_name": "",
}
)
db_request.session = pretend.stub(
flash=pretend.call_recorder(lambda *a, **kw: None)
)
team_project_role = TeamProjectRoleFactory.create(
team=organization_team,
project=organization_project,
role_name=TeamProjectRoleType.Owner,
)
result = views.manage_project_roles(organization_project, db_request)
form = result["form"]
internal_role_form = result["internal_role_form"]
# No additional roles are created
assert team_project_role == db_request.db.query(TeamProjectRole).one()
assert db_request.session.flash.calls == [
pretend.call(
f"Team '{organization_team.name}' already has Owner role for project",
queue="error",
)
]
assert result == {
"project": organization_project,
"roles": set(),
"invitations": set(),
"form": form,
"enable_internal_collaborator": True,
"team_project_roles": {team_project_role},
"internal_role_form": internal_role_form,
}
def test_post_new_internal_role(
self,
db_request,
organization_project,
organization_member,
monkeypatch,
):
db_request.method = "POST"
db_request.POST = MultiDict(
{
"is_team": "false",
"team_name": "",
"team_project_role_name": "Owner",
"username": organization_member.username,
"role_name": "Owner",
}
)
send_collaborator_added_email = pretend.call_recorder(lambda *a, **kw: None)
monkeypatch.setattr(
views,
"send_collaborator_added_email",
send_collaborator_added_email,
)
send_added_as_collaborator_email = pretend.call_recorder(lambda *a, **kw: None)
monkeypatch.setattr(
views,
"send_added_as_collaborator_email",
send_added_as_collaborator_email,
)
result = views.manage_project_roles(organization_project, db_request)
assert send_collaborator_added_email.calls == [
pretend.call(
db_request,
{db_request.user},
user=organization_member,
submitter=db_request.user,
project_name=organization_project.name,
role="Owner",
)
]
assert send_added_as_collaborator_email.calls == [
pretend.call(
db_request,
organization_member,
submitter=db_request.user,
project_name=organization_project.name,
role="Owner",
)
]
assert isinstance(result, HTTPSeeOther)
def test_post_new_role_validation_fails(self, db_request):
project = ProjectFactory.create(name="foobar")
user = UserFactory.create(username="testuser")
user_2 = UserFactory.create(username="newuser")
role = RoleFactory.create(user=user, project=project)
role_invitation = RoleInvitationFactory.create(user=user_2, project=project)
user_service = pretend.stub()
db_request.find_service = pretend.call_recorder(
lambda iface, context: user_service
)
db_request.method = "POST"
form_obj = pretend.stub(validate=pretend.call_recorder(lambda: False))
form_class = pretend.call_recorder(lambda d, user_service: form_obj)
result = views.manage_project_roles(project, db_request, _form_class=form_class)
assert db_request.find_service.calls == [
pretend.call(IOrganizationService, context=None),
pretend.call(IUserService, context=None),
]
assert form_class.calls == [
pretend.call(db_request.POST, user_service=user_service)
]
assert form_obj.validate.calls == [pretend.call()]
assert result == {
"project": project,
"roles": {role},
"invitations": {role_invitation},
"form": form_obj,
"enable_internal_collaborator": False,
"team_project_roles": set(),
"internal_role_form": None,
}
def test_post_new_role(self, monkeypatch, db_request):
project = ProjectFactory.create(name="foobar")
new_user = UserFactory.create(username="new_user")
EmailFactory.create(user=new_user, verified=True, primary=True)
owner_1 = UserFactory.create(username="owner_1")
owner_2 = UserFactory.create(username="owner_2")
RoleFactory.create(user=owner_1, project=project, role_name="Owner")
RoleFactory.create(user=owner_2, project=project, role_name="Owner")
organization_service = pretend.stub()
user_service = pretend.stub(
find_userid=lambda username: new_user.id, get_user=lambda userid: new_user
)
token_service = pretend.stub(
dumps=lambda data: "TOKEN", max_age=6 * 60 * 60, loads=lambda data: None
)
db_request.find_service = pretend.call_recorder(
lambda iface, context=None, name=None: {
IOrganizationService: organization_service,
ITokenService: token_service,
IUserService: user_service,
}.get(iface)
)
db_request.method = "POST"
db_request.POST = pretend.stub()
db_request.user = owner_1
form_obj = pretend.stub(
validate=pretend.call_recorder(lambda: True),
username=pretend.stub(data=new_user.username),
role_name=pretend.stub(data="Owner"),
)
form_class = pretend.call_recorder(lambda *a, **kw: form_obj)
db_request.session = pretend.stub(
flash=pretend.call_recorder(lambda *a, **kw: None)
)
send_project_role_verification_email = pretend.call_recorder(
lambda r, u, **k: None
)
monkeypatch.setattr(
views,
"send_project_role_verification_email",
send_project_role_verification_email,
)
result = views.manage_project_roles(project, db_request, _form_class=form_class)
assert db_request.find_service.calls == [
pretend.call(IOrganizationService, context=None),
pretend.call(IUserService, context=None),
pretend.call(ITokenService, name="email"),
]
assert form_obj.validate.calls == [pretend.call()]
assert form_class.calls == [
pretend.call(db_request.POST, user_service=user_service),
]
assert db_request.session.flash.calls == [
pretend.call(f"Invitation sent to '{new_user.username}'", queue="success")
]
# Only one role invitation is created
assert (
db_request.db.query(RoleInvitation)
.filter(RoleInvitation.user == new_user)
.filter(RoleInvitation.project == project)
.one()
)
assert isinstance(result, HTTPSeeOther)
assert send_project_role_verification_email.calls == [
pretend.call(
db_request,
new_user,
desired_role=form_obj.role_name.data,
initiator_username=db_request.user.username,
project_name=project.name,
email_token=token_service.dumps(
{
"action": "email-project-role-verify",
"desired_role": form_obj.role_name.data,
"user_id": new_user.id,
"project_id": project.id,
}
),
token_age=token_service.max_age,
)
]
def test_post_duplicate_role(self, db_request):
project = ProjectFactory.create(name="foobar")
user = UserFactory.create(username="testuser")
role = RoleFactory.create(user=user, project=project, role_name="Owner")
organization_service = pretend.stub()
user_service = pretend.stub(
find_userid=lambda username: user.id, get_user=lambda userid: user
)
token_service = pretend.stub(
dumps=lambda data: "TOKEN", max_age=6 * 60 * 60, loads=lambda data: None
)
db_request.find_service = pretend.call_recorder(
lambda iface, context=None, name=None: {
IOrganizationService: organization_service,
ITokenService: token_service,
IUserService: user_service,
}.get(iface)
)
db_request.method = "POST"
db_request.POST = pretend.stub()
form_obj = pretend.stub(
validate=pretend.call_recorder(lambda: True),
username=pretend.stub(data=user.username),
role_name=pretend.stub(data=role.role_name),
)
form_class = pretend.call_recorder(lambda *a, **kw: form_obj)
db_request.session = pretend.stub(
flash=pretend.call_recorder(lambda *a, **kw: None)
)
result = views.manage_project_roles(project, db_request, _form_class=form_class)
assert db_request.find_service.calls == [
pretend.call(IOrganizationService, context=None),
pretend.call(IUserService, context=None),
]
assert form_obj.validate.calls == [pretend.call()]
assert form_class.calls == [
pretend.call(db_request.POST, user_service=user_service),
]
assert db_request.session.flash.calls == [
pretend.call(
"User 'testuser' already has Owner role for project", queue="error"
)
]
# No additional roles are created
assert role == db_request.db.query(Role).one()
assert isinstance(result, HTTPSeeOther)
def test_reinvite_role_after_expiration(self, monkeypatch, db_request):
project = ProjectFactory.create(name="foobar")
new_user = UserFactory.create(username="new_user")
EmailFactory.create(user=new_user, verified=True, primary=True)
owner_1 = UserFactory.create(username="owner_1")
owner_2 = UserFactory.create(username="owner_2")
RoleFactory.create(user=owner_1, project=project, role_name="Owner")
RoleFactory.create(user=owner_2, project=project, role_name="Owner")
RoleInvitationFactory.create(
user=new_user, project=project, invite_status="expired"
)
organization_service = pretend.stub()
user_service = pretend.stub(
find_userid=lambda username: new_user.id, get_user=lambda userid: new_user
)
token_service = pretend.stub(
dumps=lambda data: "TOKEN", max_age=6 * 60 * 60, loads=lambda data: None
)
db_request.find_service = pretend.call_recorder(
lambda iface, context=None, name=None: {
IOrganizationService: organization_service,
ITokenService: token_service,
IUserService: user_service,
}.get(iface)
)
db_request.method = "POST"
db_request.POST = pretend.stub()
db_request.remote_addr = "10.10.10.10"
db_request.user = owner_1
form_obj = pretend.stub(
validate=pretend.call_recorder(lambda: True),
username=pretend.stub(data=new_user.username),
role_name=pretend.stub(data="Owner"),
)
form_class = pretend.call_recorder(lambda *a, **kw: form_obj)
db_request.session = pretend.stub(
flash=pretend.call_recorder(lambda *a, **kw: None)
)
send_project_role_verification_email = pretend.call_recorder(
lambda r, u, **k: None
)
monkeypatch.setattr(
views,
"send_project_role_verification_email",
send_project_role_verification_email,
)
result = views.manage_project_roles(project, db_request, _form_class=form_class)
assert db_request.find_service.calls == [
pretend.call(IOrganizationService, context=None),
pretend.call(IUserService, context=None),
pretend.call(ITokenService, name="email"),
]
assert form_obj.validate.calls == [pretend.call()]
assert form_class.calls == [
pretend.call(db_request.POST, user_service=user_service),
]
assert db_request.session.flash.calls == [
pretend.call(f"Invitation sent to '{new_user.username}'", queue="success")
]
# Only one role invitation is created
assert (
db_request.db.query(RoleInvitation)
.filter(RoleInvitation.user == new_user)
.filter(RoleInvitation.project == project)
.one()
)
assert isinstance(result, HTTPSeeOther)
assert send_project_role_verification_email.calls == [
pretend.call(
db_request,
new_user,
desired_role=form_obj.role_name.data,
initiator_username=db_request.user.username,
project_name=project.name,
email_token=token_service.dumps(
{
"action": "email-project-role-verify",
"desired_role": form_obj.role_name.data,
"user_id": new_user.id,
"project_id": project.id,
}
),
token_age=token_service.max_age,
)
]
@pytest.mark.parametrize("with_email", [True, False])
def test_post_unverified_email(self, db_request, with_email):
project = ProjectFactory.create(name="foobar")
user = UserFactory.create(username="testuser")
if with_email:
EmailFactory.create(user=user, verified=False, primary=True)
organization_service = pretend.stub()
user_service = pretend.stub(
find_userid=lambda username: user.id, get_user=lambda userid: user
)
token_service = pretend.stub(
dumps=lambda data: "TOKEN",
max_age=6 * 60 * 60,
loads=lambda data: None,
)
db_request.find_service = pretend.call_recorder(
lambda iface, context=None, name=None: {
IOrganizationService: organization_service,
ITokenService: token_service,
IUserService: user_service,
}.get(iface)
)
db_request.method = "POST"
db_request.POST = pretend.stub()
form_obj = pretend.stub(
validate=pretend.call_recorder(lambda: True),
username=pretend.stub(data=user.username),
role_name=pretend.stub(data="Owner"),
)
form_class = pretend.call_recorder(lambda *a, **kw: form_obj)
db_request.session = pretend.stub(
flash=pretend.call_recorder(lambda *a, **kw: None)
)
result = views.manage_project_roles(project, db_request, _form_class=form_class)
assert db_request.find_service.calls == [
pretend.call(IOrganizationService, context=None),
pretend.call(IUserService, context=None),
pretend.call(ITokenService, name="email"),
]
assert form_obj.validate.calls == [pretend.call()]
assert form_class.calls == [
pretend.call(db_request.POST, user_service=user_service),
]
assert db_request.session.flash.calls == [
pretend.call(
"User 'testuser' does not have a verified primary email address "
"and cannot be added as a Owner for project",
queue="error",
)
]
# No additional roles are created
assert db_request.db.query(Role).all() == []
assert isinstance(result, HTTPSeeOther)
def test_cannot_reinvite_role(self, db_request):
project = ProjectFactory.create(name="foobar")
new_user = UserFactory.create(username="new_user")
EmailFactory.create(user=new_user, verified=True, primary=True)
owner_1 = UserFactory.create(username="owner_1")
owner_2 = UserFactory.create(username="owner_2")
RoleFactory.create(user=owner_1, project=project, role_name="Owner")
RoleFactory.create(user=owner_2, project=project, role_name="Owner")
RoleInvitationFactory.create(
user=new_user, project=project, invite_status="pending"
)
organization_service = pretend.stub()
user_service = pretend.stub(
find_userid=lambda username: new_user.id, get_user=lambda userid: new_user
)
token_service = pretend.stub(
dumps=lambda data: "TOKEN",
max_age=6 * 60 * 60,
loads=lambda data: {"desired_role": "Maintainer"},
)
db_request.find_service = pretend.call_recorder(
lambda iface, context=None, name=None: {
IOrganizationService: organization_service,
ITokenService: token_service,
IUserService: user_service,
}.get(iface)
)
db_request.method = "POST"
db_request.POST = pretend.stub()
db_request.remote_addr = "10.10.10.10"
db_request.user = owner_1
form_obj = pretend.stub(
validate=pretend.call_recorder(lambda: True),
username=pretend.stub(data=new_user.username),
role_name=pretend.stub(data="Owner"),
)
form_class = pretend.call_recorder(lambda *a, **kw: form_obj)
db_request.session = pretend.stub(
flash=pretend.call_recorder(lambda *a, **kw: None)
)
result = views.manage_project_roles(project, db_request, _form_class=form_class)
assert db_request.find_service.calls == [
pretend.call(IOrganizationService, context=None),
pretend.call(IUserService, context=None),
pretend.call(ITokenService, name="email"),
]
assert form_obj.validate.calls == [pretend.call()]
assert form_class.calls == [
pretend.call(db_request.POST, user_service=user_service),
]
assert db_request.session.flash.calls == [
pretend.call(
"User 'new_user' already has an active invite. Please try again later.",
queue="error",
)
]
assert isinstance(result, HTTPSeeOther)
| TestManageProjectRoles |
python | tensorflow__tensorflow | tensorflow/python/data/experimental/kernel_tests/optimization/filter_parallelization_test.py | {
"start": 9418,
"end": 12004
} | class ____(checkpoint_test_base.CheckpointTestBase,
parameterized.TestCase):
def enableFilterParallelization(self, dataset):
options = options_lib.Options()
options.experimental_optimization.filter_parallelization = True
return dataset.with_options(options)
def _build_filter_range_graph(self, div):
dataset = dataset_ops.Dataset.range(100)
dataset = self.enableFilterParallelization(dataset)
dataset = dataset.apply(testing.assert_next(["ParallelFilter"]))
return dataset.filter(
lambda x: math_ops.not_equal(math_ops.mod(x, div), 2))
@combinations.generate(
combinations.times(test_base.default_test_combinations(),
checkpoint_test_base.default_test_combinations()))
def test(self, verify_fn):
div = 3
num_outputs = sum(x % 3 != 2 for x in range(100))
verify_fn(self, lambda: self._build_filter_range_graph(div), num_outputs)
def _build_filter_dict_graph(self):
# pylint: disable=g-long-lambda
dataset = dataset_ops.Dataset.range(10).map(lambda x: {
"foo": x * 2,
"bar": x**2
})
dataset = self.enableFilterParallelization(dataset)
dataset = dataset.apply(testing.assert_next(["ParallelFilter"]))
return dataset.filter(lambda d: math_ops.equal(d["bar"] % 2, 0)).map(
lambda d: d["foo"] + d["bar"])
@combinations.generate(
combinations.times(test_base.default_test_combinations(),
checkpoint_test_base.default_test_combinations()))
def testDict(self, verify_fn):
num_outputs = sum((x**2) % 2 == 0 for x in range(10))
# pylint: disable=unnecessary-lambda
verify_fn(self, lambda: self._build_filter_dict_graph(), num_outputs)
def _build_sparse_filter(self):
def _map_fn(i):
return sparse_tensor.SparseTensor(
indices=[[0, 0]], values=(i * [1]), dense_shape=[1, 1]), i
def _filter_fn(_, i):
return math_ops.equal(i % 2, 0)
dataset = dataset_ops.Dataset.range(10).map(_map_fn)
dataset = self.enableFilterParallelization(dataset)
dataset = dataset.apply(testing.assert_next(["ParallelFilter"]))
return dataset.filter(_filter_fn).map(lambda x, i: x)
@combinations.generate(
combinations.times(test_base.default_test_combinations(),
checkpoint_test_base.default_test_combinations()))
def testSparse(self, verify_fn):
# pylint: disable=unnecessary-lambda
verify_fn(self, lambda: self._build_sparse_filter(), num_outputs=5)
if __name__ == "__main__":
test.main()
| FilterCheckpointTest |
python | huggingface__transformers | src/transformers/models/lxmert/modeling_lxmert.py | {
"start": 27747,
"end": 28247
} | class ____(nn.Module):
def __init__(self, config):
super().__init__()
self.transform = LxmertPredictionHeadTransform(config)
self.decoder = nn.Linear(config.hidden_size, config.vocab_size, bias=False)
self.bias = nn.Parameter(torch.zeros(config.vocab_size))
def forward(self, hidden_states):
hidden_states = self.transform(hidden_states)
hidden_states = self.decoder(hidden_states) + self.bias
return hidden_states
| LxmertLMPredictionHead |
python | ray-project__ray | python/ray/tune/progress_reporter.py | {
"start": 23918,
"end": 47467
} | class ____(TuneReporterBase):
"""Command-line reporter
Args:
metric_columns: Names of metrics to
include in progress table. If this is a dict, the keys should
be metric names and the values should be the displayed names.
If this is a list, the metric name is used directly.
parameter_columns: Names of parameters to
include in progress table. If this is a dict, the keys should
be parameter names and the values should be the displayed names.
If this is a list, the parameter name is used directly. If empty,
defaults to all available parameters.
max_progress_rows: Maximum number of rows to print
in the progress table. The progress table describes the
progress of each trial. Defaults to 20.
max_error_rows: Maximum number of rows to print in the
error table. The error table lists the error file, if any,
corresponding to each trial. Defaults to 20.
max_column_length: Maximum column length (in characters). Column
headers and values longer than this will be abbreviated.
max_report_frequency: Maximum report frequency in seconds.
Defaults to 5s.
infer_limit: Maximum number of metrics to automatically infer
from tune results.
print_intermediate_tables: Print intermediate result
tables. If None (default), will be set to True for verbosity
levels above 3, otherwise False. If True, intermediate tables
will be printed with experiment progress. If False, tables
will only be printed at then end of the tuning run for verbosity
levels greater than 2.
metric: Metric used to determine best current trial.
mode: One of [min, max]. Determines whether objective is
minimizing or maximizing the metric attribute.
sort_by_metric: Sort terminated trials by metric in the
intermediate table. Defaults to False.
"""
def __init__(
self,
*,
metric_columns: Optional[Union[List[str], Dict[str, str]]] = None,
parameter_columns: Optional[Union[List[str], Dict[str, str]]] = None,
total_samples: Optional[int] = None,
max_progress_rows: int = 20,
max_error_rows: int = 20,
max_column_length: int = 20,
max_report_frequency: int = 5,
infer_limit: int = 3,
print_intermediate_tables: Optional[bool] = None,
metric: Optional[str] = None,
mode: Optional[str] = None,
sort_by_metric: bool = False,
):
super(CLIReporter, self).__init__(
metric_columns=metric_columns,
parameter_columns=parameter_columns,
total_samples=total_samples,
max_progress_rows=max_progress_rows,
max_error_rows=max_error_rows,
max_column_length=max_column_length,
max_report_frequency=max_report_frequency,
infer_limit=infer_limit,
print_intermediate_tables=print_intermediate_tables,
metric=metric,
mode=mode,
sort_by_metric=sort_by_metric,
)
def _print(self, msg: str):
safe_print(msg)
def report(self, trials: List[Trial], done: bool, *sys_info: Dict):
self._print(self._progress_str(trials, done, *sys_info))
def _get_memory_usage() -> Tuple[float, float, Optional[str]]:
"""Get the current memory consumption.
Returns:
Memory used, memory available, and optionally a warning
message to be shown to the user when memory consumption is higher
than 90% or if `psutil` is not installed
"""
try:
import ray # noqa F401
import psutil
total_gb = psutil.virtual_memory().total / (1024**3)
used_gb = total_gb - psutil.virtual_memory().available / (1024**3)
if used_gb > total_gb * 0.9:
message = (
": ***LOW MEMORY*** less than 10% of the memory on "
"this node is available for use. This can cause "
"unexpected crashes. Consider "
"reducing the memory used by your application "
"or reducing the Ray object store size by setting "
"`object_store_memory` when calling `ray.init`."
)
else:
message = None
return round(used_gb, 1), round(total_gb, 1), message
except ImportError:
return (
np.nan,
np.nan,
"Unknown memory usage. Please run `pip install psutil` to resolve",
)
def _get_time_str(start_time: float, current_time: float) -> Tuple[str, str]:
"""Get strings representing the current and elapsed time.
Args:
start_time: POSIX timestamp of the start of the tune run
current_time: POSIX timestamp giving the current time
Returns:
Current time and elapsed time for the current run
"""
current_time_dt = datetime.datetime.fromtimestamp(current_time)
start_time_dt = datetime.datetime.fromtimestamp(start_time)
delta: datetime.timedelta = current_time_dt - start_time_dt
rest = delta.total_seconds()
days = rest // (60 * 60 * 24)
rest -= days * (60 * 60 * 24)
hours = rest // (60 * 60)
rest -= hours * (60 * 60)
minutes = rest // 60
seconds = rest - minutes * 60
if days > 0:
running_for_str = f"{days:.0f} days, "
else:
running_for_str = ""
running_for_str += f"{hours:02.0f}:{minutes:02.0f}:{seconds:05.2f}"
return f"{current_time_dt:%Y-%m-%d %H:%M:%S}", running_for_str
def _time_passed_str(start_time: float, current_time: float) -> str:
"""Generate a message describing the current and elapsed time in the run.
Args:
start_time: POSIX timestamp of the start of the tune run
current_time: POSIX timestamp giving the current time
Returns:
Message with the current and elapsed time for the current tune run,
formatted to be displayed to the user
"""
current_time_str, running_for_str = _get_time_str(start_time, current_time)
return f"Current time: {current_time_str} " f"(running for {running_for_str})"
def _get_trials_by_state(trials: List[Trial]):
trials_by_state = collections.defaultdict(list)
for t in trials:
trials_by_state[t.status].append(t)
return trials_by_state
def _trial_progress_str(
trials: List[Trial],
metric_columns: Union[List[str], Dict[str, str]],
parameter_columns: Optional[Union[List[str], Dict[str, str]]] = None,
total_samples: int = 0,
force_table: bool = False,
fmt: str = "psql",
max_rows: Optional[int] = None,
max_column_length: int = 20,
done: bool = False,
metric: Optional[str] = None,
mode: Optional[str] = None,
sort_by_metric: bool = False,
):
"""Returns a human readable message for printing to the console.
This contains a table where each row represents a trial, its parameters
and the current values of its metrics.
Args:
trials: List of trials to get progress string for.
metric_columns: Names of metrics to include.
If this is a dict, the keys are metric names and the values are
the names to use in the message. If this is a list, the metric
name is used in the message directly.
parameter_columns: Names of parameters to
include. If this is a dict, the keys are parameter names and the
values are the names to use in the message. If this is a list,
the parameter name is used in the message directly. If this is
empty, all parameters are used in the message.
total_samples: Total number of trials that will be generated.
force_table: Force printing a table. If False, a table will
be printed only at the end of the training for verbosity levels
above `Verbosity.V2_TRIAL_NORM`.
fmt: Output format (see tablefmt in tabulate API).
max_rows: Maximum number of rows in the trial table. Defaults to
unlimited.
max_column_length: Maximum column length (in characters).
done: True indicates that the tuning run finished.
metric: Metric used to sort trials.
mode: One of [min, max]. Determines whether objective is
minimizing or maximizing the metric attribute.
sort_by_metric: Sort terminated trials by metric in the
intermediate table. Defaults to False.
"""
messages = []
delim = "<br>" if fmt == "html" else "\n"
if len(trials) < 1:
return delim.join(messages)
num_trials = len(trials)
trials_by_state = _get_trials_by_state(trials)
for local_dir in sorted({t.local_experiment_path for t in trials}):
messages.append("Result logdir: {}".format(local_dir))
num_trials_strs = [
"{} {}".format(len(trials_by_state[state]), state)
for state in sorted(trials_by_state)
]
if total_samples and total_samples >= sys.maxsize:
total_samples = "infinite"
messages.append(
"Number of trials: {}{} ({})".format(
num_trials,
f"/{total_samples}" if total_samples else "",
", ".join(num_trials_strs),
)
)
if force_table or (has_verbosity(Verbosity.V2_TRIAL_NORM) and done):
messages += _trial_progress_table(
trials=trials,
metric_columns=metric_columns,
parameter_columns=parameter_columns,
fmt=fmt,
max_rows=max_rows,
metric=metric,
mode=mode,
sort_by_metric=sort_by_metric,
max_column_length=max_column_length,
)
return delim.join(messages)
def _max_len(
value: Any, max_len: int = 20, add_addr: bool = False, wrap: bool = False
) -> Any:
"""Abbreviate a string representation of an object to `max_len` characters.
For numbers, booleans and None, the original value will be returned for
correct rendering in the table formatting tool.
Args:
value: Object to be represented as a string.
max_len: Maximum return string length.
add_addr: If True, will add part of the object address to the end of the
string, e.g. to identify different instances of the same class. If
False, three dots (``...``) will be used instead.
"""
if value is None or isinstance(value, (int, float, numbers.Number, bool)):
return value
string = str(value)
if len(string) <= max_len:
return string
if wrap:
# Maximum two rows.
# Todo: Make this configurable in the refactor
if len(value) > max_len * 2:
value = "..." + string[(3 - (max_len * 2)) :]
wrapped = textwrap.wrap(value, width=max_len)
return "\n".join(wrapped)
if add_addr and not isinstance(value, (int, float, bool)):
result = f"{string[: (max_len - 5)]}_{hex(id(value))[-4:]}"
return result
result = "..." + string[(3 - max_len) :]
return result
def _get_progress_table_data(
trials: List[Trial],
metric_columns: Union[List[str], Dict[str, str]],
parameter_columns: Optional[Union[List[str], Dict[str, str]]] = None,
max_rows: Optional[int] = None,
metric: Optional[str] = None,
mode: Optional[str] = None,
sort_by_metric: bool = False,
max_column_length: int = 20,
) -> Tuple[List, List[str], Tuple[bool, str]]:
"""Generate a table showing the current progress of tuning trials.
Args:
trials: List of trials for which progress is to be shown.
metric_columns: Metrics to be displayed in the table.
parameter_columns: List of parameters to be included in the data
max_rows: Maximum number of rows to show. If there's overflow, a
message will be shown to the user indicating that some rows
are not displayed
metric: Metric which is being tuned
mode: Sort the table in descending order if mode is "max";
ascending otherwise
sort_by_metric: If true, the table will be sorted by the metric
max_column_length: Max number of characters in each column
Returns:
- Trial data
- List of column names
- Overflow tuple:
- boolean indicating whether the table has rows which are hidden
- string with info about the overflowing rows
"""
num_trials = len(trials)
trials_by_state = _get_trials_by_state(trials)
# Sort terminated trials by metric and mode, descending if mode is "max"
if sort_by_metric:
trials_by_state[Trial.TERMINATED] = sorted(
trials_by_state[Trial.TERMINATED],
reverse=(mode == "max"),
key=lambda t: unflattened_lookup(metric, t.last_result, default=None),
)
state_tbl_order = [
Trial.RUNNING,
Trial.PAUSED,
Trial.PENDING,
Trial.TERMINATED,
Trial.ERROR,
]
max_rows = max_rows or float("inf")
if num_trials > max_rows:
# TODO(ujvl): suggestion for users to view more rows.
trials_by_state_trunc = _fair_filter_trials(
trials_by_state, max_rows, sort_by_metric
)
trials = []
overflow_strs = []
for state in state_tbl_order:
if state not in trials_by_state:
continue
trials += trials_by_state_trunc[state]
num = len(trials_by_state[state]) - len(trials_by_state_trunc[state])
if num > 0:
overflow_strs.append("{} {}".format(num, state))
# Build overflow string.
overflow = num_trials - max_rows
overflow_str = ", ".join(overflow_strs)
else:
overflow = False
overflow_str = ""
trials = []
for state in state_tbl_order:
if state not in trials_by_state:
continue
trials += trials_by_state[state]
# Pre-process trials to figure out what columns to show.
if isinstance(metric_columns, Mapping):
metric_keys = list(metric_columns.keys())
else:
metric_keys = metric_columns
metric_keys = [
k
for k in metric_keys
if any(
unflattened_lookup(k, t.last_result, default=None) is not None
for t in trials
)
]
if not parameter_columns:
parameter_keys = sorted(set().union(*[t.evaluated_params for t in trials]))
elif isinstance(parameter_columns, Mapping):
parameter_keys = list(parameter_columns.keys())
else:
parameter_keys = parameter_columns
# Build trial rows.
trial_table = [
_get_trial_info(
trial, parameter_keys, metric_keys, max_column_length=max_column_length
)
for trial in trials
]
# Format column headings
if isinstance(metric_columns, Mapping):
formatted_metric_columns = [
_max_len(
metric_columns[k], max_len=max_column_length, add_addr=False, wrap=True
)
for k in metric_keys
]
else:
formatted_metric_columns = [
_max_len(k, max_len=max_column_length, add_addr=False, wrap=True)
for k in metric_keys
]
if isinstance(parameter_columns, Mapping):
formatted_parameter_columns = [
_max_len(
parameter_columns[k],
max_len=max_column_length,
add_addr=False,
wrap=True,
)
for k in parameter_keys
]
else:
formatted_parameter_columns = [
_max_len(k, max_len=max_column_length, add_addr=False, wrap=True)
for k in parameter_keys
]
columns = (
["Trial name", "status", "loc"]
+ formatted_parameter_columns
+ formatted_metric_columns
)
return trial_table, columns, (overflow, overflow_str)
def _trial_progress_table(
trials: List[Trial],
metric_columns: Union[List[str], Dict[str, str]],
parameter_columns: Optional[Union[List[str], Dict[str, str]]] = None,
fmt: str = "psql",
max_rows: Optional[int] = None,
metric: Optional[str] = None,
mode: Optional[str] = None,
sort_by_metric: bool = False,
max_column_length: int = 20,
) -> List[str]:
"""Generate a list of trial progress table messages.
Args:
trials: List of trials for which progress is to be shown.
metric_columns: Metrics to be displayed in the table.
parameter_columns: List of parameters to be included in the data
fmt: Format of the table; passed to tabulate as the fmtstr argument
max_rows: Maximum number of rows to show. If there's overflow, a
message will be shown to the user indicating that some rows
are not displayed
metric: Metric which is being tuned
mode: Sort the table in descenting order if mode is "max";
ascending otherwise
sort_by_metric: If true, the table will be sorted by the metric
max_column_length: Max number of characters in each column
Returns:
Messages to be shown to the user containing progress tables
"""
data, columns, (overflow, overflow_str) = _get_progress_table_data(
trials,
metric_columns,
parameter_columns,
max_rows,
metric,
mode,
sort_by_metric,
max_column_length,
)
messages = [tabulate(data, headers=columns, tablefmt=fmt, showindex=False)]
if overflow:
messages.append(f"... {overflow} more trials not shown ({overflow_str})")
return messages
def _generate_sys_info_str(*sys_info) -> str:
"""Format system info into a string.
*sys_info: System info strings to be included.
Returns:
Formatted string containing system information.
"""
if sys_info:
return "<br>".join(sys_info).replace("\n", "<br>")
return ""
def _trial_errors_str(
trials: List[Trial], fmt: str = "psql", max_rows: Optional[int] = None
):
"""Returns a readable message regarding trial errors.
Args:
trials: List of trials to get progress string for.
fmt: Output format (see tablefmt in tabulate API).
max_rows: Maximum number of rows in the error table. Defaults to
unlimited.
"""
messages = []
failed = [t for t in trials if t.error_file]
num_failed = len(failed)
if num_failed > 0:
messages.append("Number of errored trials: {}".format(num_failed))
if num_failed > (max_rows or float("inf")):
messages.append(
"Table truncated to {} rows ({} overflow)".format(
max_rows, num_failed - max_rows
)
)
fail_header = ["Trial name", "# failures", "error file"]
fail_table_data = [
[
str(trial),
str(trial.run_metadata.num_failures)
+ ("" if trial.status == Trial.ERROR else "*"),
trial.error_file,
]
for trial in failed[:max_rows]
]
messages.append(
tabulate(
fail_table_data,
headers=fail_header,
tablefmt=fmt,
showindex=False,
colalign=("left", "right", "left"),
)
)
if any(trial.status == Trial.TERMINATED for trial in failed[:max_rows]):
messages.append("* The trial terminated successfully after retrying.")
delim = "<br>" if fmt == "html" else "\n"
return delim.join(messages)
def _best_trial_str(
trial: Trial,
metric: str,
parameter_columns: Optional[Union[List[str], Dict[str, str]]] = None,
):
"""Returns a readable message stating the current best trial."""
val = unflattened_lookup(metric, trial.last_result, default=None)
config = trial.last_result.get("config", {})
parameter_columns = parameter_columns or list(config.keys())
if isinstance(parameter_columns, Mapping):
parameter_columns = parameter_columns.keys()
params = {p: unflattened_lookup(p, config) for p in parameter_columns}
return (
f"Current best trial: {trial.trial_id} with {metric}={val} and "
f"parameters={params}"
)
def _fair_filter_trials(
trials_by_state: Dict[str, List[Trial]],
max_trials: int,
sort_by_metric: bool = False,
):
"""Filters trials such that each state is represented fairly.
The oldest trials are truncated if necessary.
Args:
trials_by_state: Maximum number of trials to return.
Returns:
Dict mapping state to List of fairly represented trials.
"""
num_trials_by_state = collections.defaultdict(int)
no_change = False
# Determine number of trials to keep per state.
while max_trials > 0 and not no_change:
no_change = True
for state in sorted(trials_by_state):
if num_trials_by_state[state] < len(trials_by_state[state]):
no_change = False
max_trials -= 1
num_trials_by_state[state] += 1
# Sort by start time, descending if the trails is not sorted by metric.
sorted_trials_by_state = dict()
for state in sorted(trials_by_state):
if state == Trial.TERMINATED and sort_by_metric:
sorted_trials_by_state[state] = trials_by_state[state]
else:
sorted_trials_by_state[state] = sorted(
trials_by_state[state], reverse=False, key=lambda t: t.trial_id
)
# Truncate oldest trials.
filtered_trials = {
state: sorted_trials_by_state[state][: num_trials_by_state[state]]
for state in sorted(trials_by_state)
}
return filtered_trials
def _get_trial_location(trial: Trial, result: dict) -> _Location:
# we get the location from the result, as the one in trial will be
# reset when trial terminates
node_ip, pid = result.get(NODE_IP, None), result.get(PID, None)
if node_ip and pid:
location = _Location(node_ip, pid)
else:
# fallback to trial location if there hasn't been a report yet
location = trial.temporary_state.location
return location
def _get_trial_info(
trial: Trial, parameters: List[str], metrics: List[str], max_column_length: int = 20
):
"""Returns the following information about a trial:
name | status | loc | params... | metrics...
Args:
trial: Trial to get information for.
parameters: Names of trial parameters to include.
metrics: Names of metrics to include.
max_column_length: Maximum column length (in characters).
"""
result = trial.last_result
config = trial.config
location = _get_trial_location(trial, result)
trial_info = [str(trial), trial.status, str(location)]
trial_info += [
_max_len(
unflattened_lookup(param, config, default=None),
max_len=max_column_length,
add_addr=True,
)
for param in parameters
]
trial_info += [
_max_len(
unflattened_lookup(metric, result, default=None),
max_len=max_column_length,
add_addr=True,
)
for metric in metrics
]
return trial_info
@DeveloperAPI
| CLIReporter |
python | pytorch__pytorch | benchmarks/functional_autograd_benchmark/torchaudio_models.py | {
"start": 8593,
"end": 12789
} | class ____(nn.Module):
def __init__(
self,
rnn_type,
labels,
rnn_hidden_size,
nb_layers,
audio_conf,
bidirectional,
context=20,
):
super().__init__()
self.hidden_size = rnn_hidden_size
self.hidden_layers = nb_layers
self.rnn_type = rnn_type
self.audio_conf = audio_conf
self.labels = labels
self.bidirectional = bidirectional
sample_rate = self.audio_conf["sample_rate"]
window_size = self.audio_conf["window_size"]
num_classes = len(self.labels)
self.conv = MaskConv(
nn.Sequential(
nn.Conv2d(1, 32, kernel_size=(41, 11), stride=(2, 2), padding=(20, 5)),
nn.BatchNorm2d(32),
nn.Hardtanh(0, 20, inplace=True),
nn.Conv2d(32, 32, kernel_size=(21, 11), stride=(2, 1), padding=(10, 5)),
nn.BatchNorm2d(32),
nn.Hardtanh(0, 20, inplace=True),
)
)
# Based on above convolutions and spectrogram size using conv formula (W - F + 2P)/ S+1
rnn_input_size = int(math.floor((sample_rate * window_size) / 2) + 1)
rnn_input_size = int(math.floor(rnn_input_size + 2 * 20 - 41) / 2 + 1)
rnn_input_size = int(math.floor(rnn_input_size + 2 * 10 - 21) / 2 + 1)
rnn_input_size *= 32
rnns = []
rnn = BatchRNN(
input_size=rnn_input_size,
hidden_size=rnn_hidden_size,
rnn_type=rnn_type,
bidirectional=bidirectional,
batch_norm=False,
)
rnns.append(("0", rnn))
for x in range(nb_layers - 1):
rnn = BatchRNN(
input_size=rnn_hidden_size,
hidden_size=rnn_hidden_size,
rnn_type=rnn_type,
bidirectional=bidirectional,
)
rnns.append((f"{x + 1:d}", rnn))
self.rnns = nn.Sequential(OrderedDict(rnns))
self.lookahead = (
nn.Sequential(
# consider adding batch norm?
Lookahead(rnn_hidden_size, context=context),
nn.Hardtanh(0, 20, inplace=True),
)
if not bidirectional
else None
)
fully_connected = nn.Sequential(
nn.BatchNorm1d(rnn_hidden_size),
nn.Linear(rnn_hidden_size, num_classes, bias=False),
)
self.fc = nn.Sequential(
SequenceWise(fully_connected),
)
self.inference_softmax = InferenceBatchSoftmax()
def forward(self, x, lengths):
lengths = lengths.cpu().int()
output_lengths = self.get_seq_lens(lengths)
x, _ = self.conv(x, output_lengths)
sizes = x.size()
x = x.view(
sizes[0], sizes[1] * sizes[2], sizes[3]
) # Collapse feature dimension
x = x.transpose(1, 2).transpose(0, 1).contiguous() # TxNxH
for rnn in self.rnns:
x = rnn(x, output_lengths)
if not self.bidirectional: # no need for lookahead layer in bidirectional
x = self.lookahead(x)
x = self.fc(x)
x = x.transpose(0, 1)
# identity in training mode, softmax in eval mode
x = self.inference_softmax(x)
return x, output_lengths
def get_seq_lens(self, input_length):
"""
Given a 1D Tensor or Variable containing integer sequence lengths, return a 1D tensor or variable
containing the size sequences that will be output by the network.
:param input_length: 1D Tensor
:return: 1D Tensor scaled by model
"""
seq_len = input_length
for m in self.conv.modules():
if type(m) is nn.modules.conv.Conv2d:
seq_len = (
seq_len
+ 2 * m.padding[1]
- m.dilation[1] * (m.kernel_size[1] - 1)
- 1
)
seq_len = seq_len.true_divide(m.stride[1]) + 1
return seq_len.int()
# Taken from https://github.com/pytorch/examples/blob/master/word_language_model/model.py#L108-L152
| DeepSpeech |
python | apache__airflow | helm-tests/tests/helm_tests/airflow_aux/test_create_user_job.py | {
"start": 914,
"end": 17339
} | class ____:
"""Tests create user job."""
def test_should_run_by_default(self):
docs = render_chart(show_only=["templates/jobs/create-user-job.yaml"])
assert docs[0]["kind"] == "Job"
assert jmespath.search("spec.template.spec.containers[0].name", docs[0]) == "create-user"
assert jmespath.search("spec.template.spec.securityContext.runAsUser", docs[0]) == 50000
def test_should_support_annotations(self):
docs = render_chart(
values={"createUserJob": {"annotations": {"foo": "bar"}, "jobAnnotations": {"fiz": "fuz"}}},
show_only=["templates/jobs/create-user-job.yaml"],
)
annotations = jmespath.search("spec.template.metadata.annotations", docs[0])
assert "foo" in annotations
assert annotations["foo"] == "bar"
job_annotations = jmespath.search("metadata.annotations", docs[0])
assert "fiz" in job_annotations
assert job_annotations["fiz"] == "fuz"
def test_should_add_component_specific_labels(self):
docs = render_chart(
values={
"createUserJob": {
"labels": {"test_label": "test_label_value"},
},
},
show_only=["templates/jobs/create-user-job.yaml"],
)
assert "test_label" in jmespath.search("spec.template.metadata.labels", docs[0])
assert jmespath.search("spec.template.metadata.labels", docs[0])["test_label"] == "test_label_value"
def test_should_create_valid_affinity_tolerations_and_node_selector(self):
docs = render_chart(
values={
"createUserJob": {
"affinity": {
"nodeAffinity": {
"requiredDuringSchedulingIgnoredDuringExecution": {
"nodeSelectorTerms": [
{
"matchExpressions": [
{"key": "foo", "operator": "In", "values": ["true"]},
]
}
]
}
}
},
"tolerations": [
{"key": "dynamic-pods", "operator": "Equal", "value": "true", "effect": "NoSchedule"}
],
"nodeSelector": {"diskType": "ssd"},
}
},
show_only=["templates/jobs/create-user-job.yaml"],
)
assert jmespath.search("kind", docs[0]) == "Job"
assert (
jmespath.search(
"spec.template.spec.affinity.nodeAffinity."
"requiredDuringSchedulingIgnoredDuringExecution."
"nodeSelectorTerms[0]."
"matchExpressions[0]."
"key",
docs[0],
)
== "foo"
)
assert (
jmespath.search(
"spec.template.spec.nodeSelector.diskType",
docs[0],
)
== "ssd"
)
assert (
jmespath.search(
"spec.template.spec.tolerations[0].key",
docs[0],
)
== "dynamic-pods"
)
def test_scheduler_name(self):
docs = render_chart(
values={"schedulerName": "airflow-scheduler"},
show_only=["templates/jobs/create-user-job.yaml"],
)
assert (
jmespath.search(
"spec.template.spec.schedulerName",
docs[0],
)
== "airflow-scheduler"
)
def test_create_user_job_resources_are_configurable(self):
resources = {
"requests": {
"cpu": "128m",
"memory": "256Mi",
},
"limits": {
"cpu": "256m",
"memory": "512Mi",
},
}
docs = render_chart(
values={
"createUserJob": {
"resources": resources,
},
},
show_only=["templates/jobs/create-user-job.yaml"],
)
assert resources == jmespath.search("spec.template.spec.containers[0].resources", docs[0])
def test_should_disable_default_helm_hooks(self):
docs = render_chart(
values={"createUserJob": {"useHelmHooks": False}},
show_only=["templates/jobs/create-user-job.yaml"],
)
annotations = jmespath.search("metadata.annotations", docs[0])
assert annotations is None
def test_should_set_correct_helm_hooks_weight(self):
docs = render_chart(
show_only=[
"templates/jobs/create-user-job.yaml",
],
)
annotations = jmespath.search("metadata.annotations", docs[0])
assert annotations["helm.sh/hook-weight"] == "2"
def test_should_add_extra_containers(self):
docs = render_chart(
values={
"createUserJob": {
"extraContainers": [
{"name": "{{ .Chart.Name}}", "image": "test-registry/test-repo:test-tag"}
],
},
},
show_only=["templates/jobs/create-user-job.yaml"],
)
assert jmespath.search("spec.template.spec.containers[-1]", docs[0]) == {
"name": "airflow",
"image": "test-registry/test-repo:test-tag",
}
def test_should_add_extra_init_containers(self):
docs = render_chart(
values={
"createUserJob": {
"extraInitContainers": [
{"name": "{{ .Chart.Name}}", "image": "test-registry/test-repo:test-tag"}
],
},
},
show_only=["templates/jobs/create-user-job.yaml"],
)
assert jmespath.search("spec.template.spec.initContainers[0]", docs[0]) == {
"name": "airflow",
"image": "test-registry/test-repo:test-tag",
}
def test_should_template_extra_containers(self):
docs = render_chart(
values={
"createUserJob": {
"extraContainers": [{"name": "{{ .Release.Name }}-test-container"}],
},
},
show_only=["templates/jobs/create-user-job.yaml"],
)
assert jmespath.search("spec.template.spec.containers[-1]", docs[0]) == {
"name": "release-name-test-container"
}
def test_should_add_extra_volumes(self):
docs = render_chart(
values={
"createUserJob": {
"extraVolumes": [{"name": "myvolume-{{ .Chart.Name }}", "emptyDir": {}}],
},
},
show_only=["templates/jobs/create-user-job.yaml"],
)
assert jmespath.search("spec.template.spec.volumes[-1]", docs[0]) == {
"name": "myvolume-airflow",
"emptyDir": {},
}
def test_should_add_extra_volume_mounts(self):
docs = render_chart(
values={
"createUserJob": {
"extraVolumeMounts": [{"name": "foobar-{{ .Chart.Name }}", "mountPath": "foo/bar"}],
},
},
show_only=["templates/jobs/create-user-job.yaml"],
)
assert jmespath.search("spec.template.spec.containers[0].volumeMounts[-1]", docs[0]) == {
"name": "foobar-airflow",
"mountPath": "foo/bar",
}
def test_should_add_global_volume_and_global_volume_mount(self):
docs = render_chart(
values={
"volumes": [{"name": "myvolume", "emptyDir": {}}],
"volumeMounts": [{"name": "foobar", "mountPath": "foo/bar"}],
},
show_only=["templates/jobs/create-user-job.yaml"],
)
assert jmespath.search("spec.template.spec.volumes[-1]", docs[0]) == {
"name": "myvolume",
"emptyDir": {},
}
assert jmespath.search("spec.template.spec.containers[0].volumeMounts[-1]", docs[0]) == {
"name": "foobar",
"mountPath": "foo/bar",
}
def test_should_add_extraEnvs(self):
docs = render_chart(
values={
"createUserJob": {
"env": [
{"name": "TEST_ENV_1", "value": "test_env_1"},
{
"name": "TEST_ENV_2",
"valueFrom": {"secretKeyRef": {"name": "my-secret", "key": "my-key"}},
},
{
"name": "TEST_ENV_3",
"valueFrom": {"configMapKeyRef": {"name": "my-config-map", "key": "my-key"}},
},
],
},
},
show_only=["templates/jobs/create-user-job.yaml"],
)
assert {"name": "TEST_ENV_1", "value": "test_env_1"} in jmespath.search(
"spec.template.spec.containers[0].env", docs[0]
)
assert {
"name": "TEST_ENV_2",
"valueFrom": {"secretKeyRef": {"name": "my-secret", "key": "my-key"}},
} in jmespath.search("spec.template.spec.containers[0].env", docs[0])
assert {
"name": "TEST_ENV_3",
"valueFrom": {"configMapKeyRef": {"name": "my-config-map", "key": "my-key"}},
} in jmespath.search("spec.template.spec.containers[0].env", docs[0])
def test_should_enable_custom_env(self):
docs = render_chart(
values={
"env": [
{"name": "foo", "value": "bar"},
],
"extraEnv": "- name: extraFoo\n value: extraBar\n",
"createUserJob": {"applyCustomEnv": True},
},
show_only=["templates/jobs/create-user-job.yaml"],
)
envs = jmespath.search("spec.template.spec.containers[0].env", docs[0])
assert {"name": "foo", "value": "bar"} in envs
assert {"name": "extraFoo", "value": "extraBar"} in envs
def test_should_disable_custom_env(self):
docs = render_chart(
values={
"env": [
{"name": "foo", "value": "bar"},
],
"extraEnv": "- name: extraFoo\n value: extraBar\n",
"createUserJob": {"applyCustomEnv": False},
},
show_only=["templates/jobs/create-user-job.yaml"],
)
envs = jmespath.search("spec.template.spec.containers[0].env", docs[0])
assert {"name": "foo", "value": "bar"} not in envs
assert {"name": "extraFoo", "value": "extraBar"} not in envs
def test_job_ttl_after_finished(self):
docs = render_chart(
values={"createUserJob": {"ttlSecondsAfterFinished": 1}},
show_only=["templates/jobs/create-user-job.yaml"],
)
ttl = jmespath.search("spec.ttlSecondsAfterFinished", docs[0])
assert ttl == 1
def test_job_ttl_after_finished_zero(self):
docs = render_chart(
values={"createUserJob": {"ttlSecondsAfterFinished": 0}},
show_only=["templates/jobs/create-user-job.yaml"],
)
ttl = jmespath.search("spec.ttlSecondsAfterFinished", docs[0])
assert ttl == 0
def test_job_ttl_after_finished_nil(self):
docs = render_chart(
values={"createUserJob": {"ttlSecondsAfterFinished": None}},
show_only=["templates/jobs/create-user-job.yaml"],
)
spec = jmespath.search("spec", docs[0])
assert "ttlSecondsAfterFinished" not in spec
@pytest.mark.parametrize(
("airflow_version", "expected_arg"),
[
("1.10.14", "airflow create_user"),
("2.0.2", "airflow users create"),
],
)
def test_default_command_and_args_airflow_version(self, airflow_version, expected_arg):
docs = render_chart(
values={
"airflowVersion": airflow_version,
},
show_only=["templates/jobs/create-user-job.yaml"],
)
assert jmespath.search("spec.template.spec.containers[0].command", docs[0]) is None
assert [
"bash",
"-c",
f'exec \\\n{expected_arg} "$@"',
"--",
"-r",
"Admin",
"-u",
"admin",
"-e",
"admin@example.com",
"-f",
"admin",
"-l",
"user",
"-p",
"admin",
] == jmespath.search("spec.template.spec.containers[0].args", docs[0])
@pytest.mark.parametrize("command", [None, ["custom", "command"]])
@pytest.mark.parametrize("args", [None, ["custom", "args"]])
def test_command_and_args_overrides(self, command, args):
docs = render_chart(
values={"createUserJob": {"command": command, "args": args}},
show_only=["templates/jobs/create-user-job.yaml"],
)
assert command == jmespath.search("spec.template.spec.containers[0].command", docs[0])
assert args == jmespath.search("spec.template.spec.containers[0].args", docs[0])
def test_command_and_args_overrides_are_templated(self):
docs = render_chart(
values={
"createUserJob": {"command": ["{{ .Release.Name }}"], "args": ["{{ .Release.Service }}"]}
},
show_only=["templates/jobs/create-user-job.yaml"],
)
assert jmespath.search("spec.template.spec.containers[0].command", docs[0]) == ["release-name"]
assert jmespath.search("spec.template.spec.containers[0].args", docs[0]) == ["Helm"]
def test_default_user_overrides(self):
docs = render_chart(
values={
"webserver": {
"defaultUser": {
"role": "SomeRole",
"username": "jdoe",
"email": "jdoe@example.com",
"firstName": "John",
"lastName": "Doe",
"password": "whereisjane?",
}
}
},
show_only=["templates/jobs/create-user-job.yaml"],
)
assert jmespath.search("spec.template.spec.containers[0].command", docs[0]) is None
assert jmespath.search("spec.template.spec.containers[0].args", docs[0]) == [
"bash",
"-c",
'exec \\\nairflow users create "$@"',
"--",
"-r",
"SomeRole",
"-u",
"jdoe",
"-e",
"jdoe@example.com",
"-f",
"John",
"-l",
"Doe",
"-p",
"whereisjane?",
]
def test_no_airflow_local_settings(self):
docs = render_chart(
values={"airflowLocalSettings": None}, show_only=["templates/jobs/create-user-job.yaml"]
)
volume_mounts = jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0])
assert "airflow_local_settings.py" not in str(volume_mounts)
def test_airflow_local_settings(self):
docs = render_chart(
values={"airflowLocalSettings": "# Well hello!"},
show_only=["templates/jobs/create-user-job.yaml"],
)
assert {
"name": "config",
"mountPath": "/opt/airflow/config/airflow_local_settings.py",
"subPath": "airflow_local_settings.py",
"readOnly": True,
} in jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0])
@pytest.mark.parametrize(
"restart_policy",
[
"OnFailure",
"Never",
],
)
def test_restart_policy(self, restart_policy):
docs = render_chart(
values={"createUserJob": {"restartPolicy": restart_policy}},
show_only=["templates/jobs/create-user-job.yaml"],
)
assert restart_policy == jmespath.search("spec.template.spec.restartPolicy", docs[0])
| TestCreateUserJob |
python | huggingface__transformers | src/transformers/utils/import_utils.py | {
"start": 59020,
"end": 59608
} | class ____(type):
"""
Metaclass for the dummy objects. Any class inheriting from it will return the ImportError generated by
`requires_backend` each time a user tries to access any method of that class.
"""
is_dummy = True
def __getattribute__(cls, key):
if (key.startswith("_") and key != "_from_config") or key == "is_dummy" or key == "mro" or key == "call":
return super().__getattribute__(key)
requires_backends(cls, cls._backends)
BACKENDS_T = frozenset[str]
IMPORT_STRUCTURE_T = dict[BACKENDS_T, dict[str, set[str]]]
| DummyObject |
python | streamlit__streamlit | lib/tests/streamlit/runtime/caching/cache_resource_api_test.py | {
"start": 7899,
"end": 9969
} | class ____(unittest.TestCase):
def setUp(self) -> None:
# Caching functions rely on an active script run ctx
add_script_run_ctx(threading.current_thread(), create_mock_script_run_ctx())
def tearDown(self):
st.cache_resource.clear()
# Some of these tests reach directly into _cache_info and twiddle it.
# Reset default values on teardown.
cache_resource_api.CACHE_RESOURCE_MESSAGE_REPLAY_CTX._cached_func_stack = []
def test_validate_success(self):
"""If we have a validate function and it returns True, we don't recompute our cached value."""
validate = Mock(return_value=True)
call_count: list[int] = [0]
@st.cache_resource(validate=validate)
def f() -> int:
call_count[0] += 1
return call_count[0]
# First call: call_count == 1; validate not called (because we computed a new value)
assert f() == 1
validate.assert_not_called()
# Subsequent calls: call_count == 1; validate called each time
for _ in range(3):
assert f() == 1
validate.assert_called_once_with(1)
validate.reset_mock()
def test_validate_fail(self):
"""If we have a validate function and it returns False, we recompute our cached value."""
validate = Mock(return_value=False)
call_count: list[int] = [0]
@st.cache_resource(validate=validate)
def f() -> int:
call_count[0] += 1
return call_count[0]
# First call: call_count == 1; validate not called (because we computed a new value)
expected_call_count = 1
assert expected_call_count == f()
validate.assert_not_called()
# Subsequent calls: call_count increases; validate called with previous value
for _ in range(3):
expected_call_count += 1
assert expected_call_count == f()
validate.assert_called_once_with(expected_call_count - 1)
validate.reset_mock()
| CacheResourceValidateTest |
python | modin-project__modin | modin/core/execution/ray/common/deferred_execution.py | {
"start": 19659,
"end": 28258
} | class ____:
"""Remote functions for DeferredExecution."""
@staticmethod
def exec_func(fn: Callable, obj: Any, args: Tuple, kwargs: Dict) -> Any:
"""
Execute the specified function.
Parameters
----------
fn : Callable
obj : Any
args : Tuple
kwargs : dict
Returns
-------
Any
"""
try:
try:
return fn(obj, *args, **kwargs)
# Sometimes Arrow forces us to make a copy of an object before we operate on it. We
# don't want the error to propagate to the user, and we want to avoid copying unless
# we absolutely have to.
except ValueError as err:
if isinstance(obj, (pandas.DataFrame, pandas.Series)):
return fn(obj.copy(), *args, **kwargs)
else:
raise err
except Exception as err:
get_logger().error(
f"{err}. fn={fn}, obj={obj}, args={args}, kwargs={kwargs}"
)
raise err
@classmethod
def construct(cls, num_returns: int, args: Tuple): # pragma: no cover
"""
Construct and execute the specified chain.
This function is called in a worker process. The last value, returned by
this generator, is the meta list, containing the objects lengths and widths
and the worker ip address, as the last value in the list.
Parameters
----------
num_returns : int
args : tuple
Yields
------
Any
The execution results and the MetaList as the last value.
"""
chain = list(reversed(args))
meta = []
try:
stack = [cls.construct_chain(chain, {}, meta, None)]
while stack:
try:
gen = stack.pop()
obj = next(gen)
stack.append(gen)
if isinstance(obj, Generator):
stack.append(obj)
else:
yield obj
except StopIteration:
pass
except Exception as err:
get_logger().error(f"{err}. args={args}, chain={list(reversed(chain))}")
raise err
meta.append(get_node_ip_address())
yield meta
@classmethod
def construct_chain(
cls,
chain: List,
refs: Dict[int, Any],
meta: List,
lst: Optional[List],
): # pragma: no cover
"""
Construct the chain and execute it one by one.
Parameters
----------
chain : list
A flat list containing the execution tree, deconstructed by
``DeferredExecution._deconstruct()``.
refs : dict
If an execution result is required for multiple chains, the
reference to this result is saved in this dict.
meta : list
The lengths of the returned objects are added to this list.
lst : list
If specified, the execution result is added to this list.
This is used when a chain is passed as an argument to a
DeferredExecution task.
Yields
------
Any
Either the ``construct_list()`` generator or the execution results.
"""
pop = chain.pop
tg_e = _Tag.END
obj = pop()
if obj is _Tag.REF:
obj = refs[pop()]
elif obj is _Tag.LIST:
obj = []
yield cls.construct_list(obj, chain, refs, meta)
while chain:
fn = pop()
if fn == tg_e:
lst.append(obj)
break
if (args_len := pop()) >= 0:
if args_len == 0:
args = []
else:
args = chain[-args_len:]
del chain[-args_len:]
args.reverse()
else:
args = []
yield cls.construct_list(args, chain, refs, meta)
if (args_len := pop()) >= 0:
kwargs = {pop(): pop() for _ in range(args_len)}
else:
values = []
yield cls.construct_list(values, chain, refs, meta)
kwargs = {pop(): v for v in values}
obj = cls.exec_func(fn, obj, args, kwargs)
if ref := pop(): # <ref> is not 0 - adding the result to refs
refs[ref] = obj
if (num_returns := pop()) == 0:
continue
itr = iter([obj] if num_returns == 1 else obj)
for _ in range(num_returns):
obj = next(itr)
meta.append(len(obj) if hasattr(obj, "__len__") else 0)
meta.append(len(obj.columns) if hasattr(obj, "columns") else 0)
yield obj
@classmethod
def construct_list(
cls,
lst: List,
chain: List,
refs: Dict[int, Any],
meta: List,
): # pragma: no cover
"""
Construct the list.
Parameters
----------
lst : list
chain : list
refs : dict
meta : list
Yields
------
Any
Either ``construct_chain()`` or ``construct_list()`` generator.
"""
pop = chain.pop
lst_append = lst.append
while True:
obj = pop()
if isinstance(obj, _Tag):
if obj == _Tag.END:
break
elif obj == _Tag.CHAIN:
yield cls.construct_chain(chain, refs, meta, lst)
elif obj == _Tag.LIST:
lst_append([])
yield cls.construct_list(lst[-1], chain, refs, meta)
elif obj is _Tag.REF:
lst_append(refs[pop()])
else:
raise ValueError(f"Unexpected tag {obj}")
else:
lst_append(obj)
def __reduce__(self):
"""
Use a single instance on deserialization.
Returns
-------
str
Returns the ``_REMOTE_EXEC`` attribute name.
"""
return "_REMOTE_EXEC"
_REMOTE_EXEC = _RemoteExecutor()
@ray.remote(num_returns=4)
def remote_exec_func(
fn: Callable,
obj: Any,
*flat_args: Tuple,
remote_executor=_REMOTE_EXEC,
**flat_kwargs: Dict,
): # pragma: no cover
"""
Execute the specified function with the arguments in a worker process.
The object `obj` is passed to the function as the first argument.
Note: all the arguments must be flat, i.e. no lists, no chains.
Parameters
----------
fn : Callable
obj : Any
*flat_args : list
remote_executor : _RemoteExecutor, default: _REMOTE_EXEC
Do not change, it's used to avoid excessive serializations.
**flat_kwargs : dict
Returns
-------
tuple[Any, int, int, str]
The execution result, the result length and width, the worked address.
"""
obj = remote_executor.exec_func(fn, obj, flat_args, flat_kwargs)
return (
obj,
len(obj) if hasattr(obj, "__len__") else 0,
len(obj.columns) if hasattr(obj, "columns") else 0,
get_node_ip_address(),
)
@ray.remote(num_returns=2)
def _remote_exec_single_chain(
*args: Tuple, remote_executor=_REMOTE_EXEC
) -> Generator: # pragma: no cover
"""
Execute the deconstructed chain with a single return value in a worker process.
Parameters
----------
*args : tuple
A deconstructed chain to be executed.
remote_executor : _RemoteExecutor, default: _REMOTE_EXEC
Do not change, it's used to avoid excessive serializations.
Returns
-------
Generator
"""
return remote_executor.construct(num_returns=2, args=args)
@ray.remote
def _remote_exec_multi_chain(
num_returns: int, *args: Tuple, remote_executor=_REMOTE_EXEC
) -> Generator: # pragma: no cover
"""
Execute the deconstructed chain with a multiple return values in a worker process.
Parameters
----------
num_returns : int
The number of return values.
*args : tuple
A deconstructed chain to be executed.
remote_executor : _RemoteExecutor, default: _REMOTE_EXEC
Do not change, it's used to avoid excessive serializations.
Returns
-------
Generator
"""
return remote_executor.construct(num_returns, args)
| _RemoteExecutor |
python | charliermarsh__ruff | crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI013.py | {
"start": 101,
"end": 143
} | class ____:
...
...
| TwoEllipsesClass |
python | redis__redis-py | redis/commands/search/reducers.py | {
"start": 2592,
"end": 3775
} | class ____(Reducer):
"""
Selects the first value within the group according to sorting parameters
"""
NAME = "FIRST_VALUE"
def __init__(self, field: str, *byfields: Union[Asc, Desc]) -> None:
"""
Selects the first value of the given field within the group.
### Parameter
- **field**: Source field used for the value
- **byfields**: How to sort the results. This can be either the
*class* of `aggregation.Asc` or `aggregation.Desc` in which
case the field `field` is also used as the sort input.
`byfields` can also be one or more *instances* of `Asc` or `Desc`
indicating the sort order for these fields
"""
fieldstrs = []
if (
len(byfields) == 1
and isinstance(byfields[0], type)
and issubclass(byfields[0], SortDirection)
):
byfields = [byfields[0](field)]
for f in byfields:
fieldstrs += [f.field, f.DIRSTRING]
args = [field]
if fieldstrs:
args += ["BY"] + fieldstrs
super().__init__(*args)
self._field = field
| first_value |
python | django__django | tests/indexes/models.py | {
"start": 1282,
"end": 1563
} | class ____(models.Model):
headline = models.CharField(max_length=100, db_index=True)
body = models.TextField(db_index=True)
slug = models.CharField(max_length=40, unique=True)
class Meta:
required_db_features = {"supports_index_on_text_field"}
| IndexedArticle |
python | GoogleCloudPlatform__python-docs-samples | appengine/standard/endpoints-frameworks-v2/iata/main.py | {
"start": 1023,
"end": 1272
} | class ____(messages.Message):
iata = messages.StringField(1, required=True)
name = messages.StringField(2, required=True)
IATA_AIRPORT_RESOURCE = endpoints.ResourceContainer(
Airport, iata=messages.StringField(1, required=True)
)
| Airport |
python | milvus-io__pymilvus | pymilvus/client/search_result.py | {
"start": 22418,
"end": 25757
} | class ____(list):
"""List[Dict] Topk search result with pks, distances, and output fields.
[
{"id": 1, "distance": 0.3, "entity": {"vector": [1, 2, 3]}},
{"id": 2, "distance": 0.2, "entity": {"vector": [4, 5, 6]}},
{"id": 3, "distance": 0.1, "entity": {"vector": [7, 8, 9]}},
]
Examples:
>>> res = client.search()
>>> hits = res[0]
>>> for hit in hits:
>>> print(hit)
{"id": 1, "distance": 0.3, "entity": {"vector": [1, 2, 3]}}
{"id": 2, "distance": 0.2, "entity": {"vector": [4, 5, 6]}}
{"id": 3, "distance": 0.1, "entity": {"vector": [7, 8, 9]}}
Attributes:
ids(List[Union[str, int]]): topk primary keys
distances(List[float]): topk distances
"""
ids: List[Union[str, int]]
distances: List[float]
def __init__(
self,
topk: int,
pks: List[Union[int, str]],
distances: List[float],
fields: Dict[str, Tuple[List[Any], schema_pb2.FieldData]],
output_fields: List[str],
pk_name: str,
):
"""
Args:
fields(Dict[str, Tuple[List[Any], schema_pb2.FieldData]]):
field name to a tuple of topk data and field meta
"""
self.ids = pks
self.distances = distances
all_fields = list(fields.keys())
dynamic_fields = list(set(output_fields) - set(all_fields))
top_k_res = []
for i in range(topk):
entity = {}
for fname, (data, field_meta) in fields.items():
if len(data) <= i:
entity[fname] = None
# Get dense vectors
if field_meta.type in (
DataType.FLOAT_VECTOR,
DataType.BINARY_VECTOR,
DataType.BFLOAT16_VECTOR,
DataType.FLOAT16_VECTOR,
DataType.INT8_VECTOR,
):
dim = field_meta.vectors.dim
if field_meta.type in [DataType.BINARY_VECTOR]:
dim = dim // 8
elif field_meta.type in [DataType.BFLOAT16_VECTOR, DataType.FLOAT16_VECTOR]:
dim = dim * 2
entity[fname] = data[i * dim : (i + 1) * dim]
continue
# Get dynamic fields
if field_meta.type == DataType.JSON and field_meta.is_dynamic:
if len(dynamic_fields) > 0:
entity.update({k: v for k, v in data[i].items() if k in dynamic_fields})
continue
if fname in output_fields:
entity.update(data[i])
continue
# sparse float vector and other fields
entity[fname] = data[i]
top_k_res.append(
Hit({pk_name: pks[i], "distance": distances[i], "entity": entity}, pk_name=pk_name)
)
super().__init__(top_k_res)
def __str__(self) -> str:
"""Only print at most 10 query results"""
reminder = f" ... and {len(self) - 10} entities remaining" if len(self) > 10 else ""
return f"{self[:10]}{reminder}"
__repr__ = __str__
from collections import UserDict
| Hits |
python | simonw__datasette | datasette/views/special.py | {
"start": 12329,
"end": 18302
} | class ____(BaseView):
name = "permission_rules"
has_json_alternate = False
async def get(self, request):
await self.ds.ensure_permission(action="view-instance", actor=request.actor)
await self.ds.ensure_permission(action="permissions-debug", actor=request.actor)
# Check if this is a request for JSON (has .json extension)
as_format = request.url_vars.get("format")
if not as_format:
# Render the HTML form (even if query parameters are present)
return await self.render(
["debug_rules.html"],
request,
{
"sorted_actions": sorted(self.ds.actions.keys()),
"has_debug_permission": True,
},
)
# JSON API - action parameter is required
action = request.args.get("action")
if not action:
return Response.json({"error": "action parameter is required"}, status=400)
if action not in self.ds.actions:
return Response.json({"error": f"Unknown action: {action}"}, status=404)
actor = request.actor if isinstance(request.actor, dict) else None
try:
page = int(request.args.get("page", "1"))
page_size = int(request.args.get("page_size", "50"))
except ValueError:
return Response.json(
{"error": "page and page_size must be integers"}, status=400
)
if page < 1:
return Response.json({"error": "page must be >= 1"}, status=400)
if page_size < 1:
return Response.json({"error": "page_size must be >= 1"}, status=400)
max_page_size = 200
if page_size > max_page_size:
page_size = max_page_size
offset = (page - 1) * page_size
from datasette.utils.actions_sql import build_permission_rules_sql
union_sql, union_params, restriction_sqls = await build_permission_rules_sql(
self.ds, actor, action
)
await self.ds.refresh_schemas()
db = self.ds.get_internal_database()
count_query = f"""
WITH rules AS (
{union_sql}
)
SELECT COUNT(*) AS count
FROM rules
"""
count_row = (await db.execute(count_query, union_params)).first()
total = count_row["count"] if count_row else 0
data_query = f"""
WITH rules AS (
{union_sql}
)
SELECT parent, child, allow, reason, source_plugin
FROM rules
ORDER BY allow DESC, (parent IS NOT NULL), parent, child
LIMIT :limit OFFSET :offset
"""
params = {**union_params, "limit": page_size, "offset": offset}
rows = await db.execute(data_query, params)
items = []
for row in rows:
parent = row["parent"]
child = row["child"]
items.append(
{
"parent": parent,
"child": child,
"resource": _resource_path(parent, child),
"allow": row["allow"],
"reason": row["reason"],
"source_plugin": row["source_plugin"],
}
)
def build_page_url(page_number):
pairs = []
for key in request.args:
if key in {"page", "page_size"}:
continue
for value in request.args.getlist(key):
pairs.append((key, value))
pairs.append(("page", str(page_number)))
pairs.append(("page_size", str(page_size)))
query = urllib.parse.urlencode(pairs)
return f"{request.path}?{query}"
response = {
"action": action,
"actor_id": (actor or {}).get("id") if actor else None,
"page": page,
"page_size": page_size,
"total": total,
"items": items,
}
if total > offset + page_size:
response["next_url"] = build_page_url(page + 1)
if page > 1:
response["previous_url"] = build_page_url(page - 1)
headers = {}
if self.ds.cors:
add_cors_headers(headers)
return Response.json(response, headers=headers)
async def _check_permission_for_actor(ds, action, parent, child, actor):
"""Shared logic for checking permissions. Returns a dict with check results."""
if action not in ds.actions:
return {"error": f"Unknown action: {action}"}, 404
if child and not parent:
return {"error": "parent is required when child is provided"}, 400
# Use the action's properties to create the appropriate resource object
action_obj = ds.actions.get(action)
if not action_obj:
return {"error": f"Unknown action: {action}"}, 400
# Global actions (no resource_class) don't have a resource
if action_obj.resource_class is None:
resource_obj = None
elif action_obj.takes_parent and action_obj.takes_child:
# Child-level resource (e.g., TableResource, QueryResource)
resource_obj = action_obj.resource_class(database=parent, table=child)
elif action_obj.takes_parent:
# Parent-level resource (e.g., DatabaseResource)
resource_obj = action_obj.resource_class(database=parent)
else:
# This shouldn't happen given validation in Action.__post_init__
return {"error": f"Invalid action configuration: {action}"}, 500
allowed = await ds.allowed(action=action, resource=resource_obj, actor=actor)
response = {
"action": action,
"allowed": bool(allowed),
"resource": {
"parent": parent,
"child": child,
"path": _resource_path(parent, child),
},
}
if actor and "id" in actor:
response["actor_id"] = actor["id"]
return response, 200
| PermissionRulesView |
python | sphinx-doc__sphinx | sphinx/roles.py | {
"start": 1176,
"end": 6340
} | class ____(ReferenceRole):
"""A generic cross-referencing role. To create a callable that can be used as
a role function, create an instance of this class.
The general features of this role are:
* Automatic creation of a reference and a content node.
* Optional separation of title and target with `title <target>`.
* The implementation is a class rather than a function to make
customization easier.
Customization can be done in two ways:
* Supplying constructor parameters:
* `fix_parens` to normalize parentheses (strip from target, and add to
title if configured)
* `lowercase` to lowercase the target
* `nodeclass` and `innernodeclass` select the node classes for
the reference and the content node
* Subclassing and overwriting `process_link()` and/or `result_nodes()`.
"""
nodeclass: type[Element] = addnodes.pending_xref
innernodeclass: type[TextElement] = nodes.literal
def __init__(
self,
fix_parens: bool = False,
lowercase: bool = False,
nodeclass: type[Element] | None = None,
innernodeclass: type[TextElement] | None = None,
warn_dangling: bool = False,
) -> None:
self.fix_parens = fix_parens
self.lowercase = lowercase
self.warn_dangling = warn_dangling
if nodeclass is not None:
self.nodeclass = nodeclass
if innernodeclass is not None:
self.innernodeclass = innernodeclass
super().__init__()
def update_title_and_target(self, title: str, target: str) -> tuple[str, str]:
if not self.has_explicit_title:
if self.config.add_function_parentheses:
if not title.endswith('()'):
# add parentheses to the title
title += '()'
else:
# remove parentheses
title = title.removesuffix('()')
# remove parentheses from the target
target = target.removesuffix('()')
return title, target
def run(self) -> tuple[list[Node], list[system_message]]:
if ':' not in self.name:
self.refdomain, self.reftype = '', self.name
self.classes = ['xref', self.reftype]
else:
self.refdomain, _, self.reftype = self.name.partition(':')
self.classes = ['xref', self.refdomain, f'{self.refdomain}-{self.reftype}']
if self.disabled:
return self.create_non_xref_node()
else:
return self.create_xref_node()
def create_non_xref_node(self) -> tuple[list[Node], list[system_message]]:
text = utils.unescape(self.text[1:])
if self.fix_parens:
self.has_explicit_title = False # treat as implicit
text, _target = self.update_title_and_target(text, '')
node = self.innernodeclass(self.rawtext, text, classes=self.classes)
return self.result_nodes(self.inliner.document, self.env, node, is_ref=False)
def create_xref_node(self) -> tuple[list[Node], list[system_message]]:
target = self.target
title = self.title
if self.lowercase:
target = target.lower()
if self.fix_parens:
title, target = self.update_title_and_target(title, target)
# create the reference node
options = {
'refdoc': self.env.current_document.docname,
'refdomain': self.refdomain,
'reftype': self.reftype,
'refexplicit': self.has_explicit_title,
'refwarn': self.warn_dangling,
}
refnode = self.nodeclass(self.rawtext, **options)
self.set_source_info(refnode)
# determine the target and title for the class
title, target = self.process_link(
self.env, refnode, self.has_explicit_title, title, target
)
refnode['reftarget'] = target
refnode += self.innernodeclass(self.rawtext, title, classes=self.classes)
return self.result_nodes(self.inliner.document, self.env, refnode, is_ref=True)
# methods that can be overwritten
def process_link(
self,
env: BuildEnvironment,
refnode: Element,
has_explicit_title: bool,
title: str,
target: str,
) -> tuple[str, str]:
"""Called after parsing title and target text, and creating the
reference node (given in *refnode*). This method can alter the
reference node and must return a new (or the same) ``(title, target)``
tuple.
"""
return title, ws_re.sub(' ', target)
def result_nodes(
self,
document: nodes.document,
env: BuildEnvironment,
node: Element,
is_ref: bool,
) -> tuple[list[Node], list[system_message]]:
"""Called before returning the finished nodes. *node* is the reference
node if one was created (*is_ref* is then true), else the content node.
This method can add other nodes and must return a ``(nodes, messages)``
tuple (the usual return value of a role function).
"""
return [node], []
| XRefRole |
python | astropy__astropy | astropy/utils/masked/tests/test_functions.py | {
"start": 19218,
"end": 20174
} | class ____(MaskedArraySetup):
@pytest.mark.parametrize("n,axis", [(1, -1), (2, -1), (1, 0)])
def test_diff(self, n, axis):
mda = np.diff(self.ma, n=n, axis=axis)
expected_data = np.diff(self.a, n, axis)
nan_mask = np.zeros_like(self.a)
nan_mask[self.ma.mask] = np.nan
expected_mask = np.isnan(np.diff(nan_mask, n=n, axis=axis))
assert_array_equal(mda.unmasked, expected_data)
assert_array_equal(mda.mask, expected_mask)
def test_diff_explicit(self):
ma = Masked(
np.arange(8.0), [True, False, False, False, False, True, False, False]
)
mda = np.diff(ma)
assert np.all(mda.unmasked == 1.0)
assert np.all(mda.mask == [True, False, False, False, True, True, False])
mda = np.diff(ma, n=2)
assert np.all(mda.unmasked == 0.0)
assert np.all(mda.mask == [True, False, False, True, True, True])
| TestMaskedArrayCalculation |
python | huggingface__transformers | src/transformers/models/funnel/modeling_funnel.py | {
"start": 49573,
"end": 52499
} | class ____(FunnelPreTrainedModel):
def __init__(self, config: FunnelConfig) -> None:
super().__init__(config)
self.funnel = FunnelBaseModel(config)
self.classifier = FunnelClassificationHead(config, 1)
# Initialize weights and apply final processing
self.post_init()
@auto_docstring
def forward(
self,
input_ids: Optional[torch.Tensor] = None,
attention_mask: Optional[torch.Tensor] = None,
token_type_ids: Optional[torch.Tensor] = None,
inputs_embeds: Optional[torch.Tensor] = None,
labels: Optional[torch.Tensor] = None,
output_attentions: Optional[bool] = None,
output_hidden_states: Optional[bool] = None,
return_dict: Optional[bool] = None,
) -> Union[tuple, MultipleChoiceModelOutput]:
r"""
labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
Labels for computing the multiple choice classification loss. Indices should be in `[0, ...,
num_choices-1]` where `num_choices` is the size of the second dimension of the input tensors. (See
`input_ids` above)
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
num_choices = input_ids.shape[1] if input_ids is not None else inputs_embeds.shape[1]
input_ids = input_ids.view(-1, input_ids.size(-1)) if input_ids is not None else None
attention_mask = attention_mask.view(-1, attention_mask.size(-1)) if attention_mask is not None else None
token_type_ids = token_type_ids.view(-1, token_type_ids.size(-1)) if token_type_ids is not None else None
inputs_embeds = (
inputs_embeds.view(-1, inputs_embeds.size(-2), inputs_embeds.size(-1))
if inputs_embeds is not None
else None
)
outputs = self.funnel(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
inputs_embeds=inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
last_hidden_state = outputs[0]
pooled_output = last_hidden_state[:, 0]
logits = self.classifier(pooled_output)
reshaped_logits = logits.view(-1, num_choices)
loss = None
if labels is not None:
loss_fct = CrossEntropyLoss()
loss = loss_fct(reshaped_logits, labels)
if not return_dict:
output = (reshaped_logits,) + outputs[1:]
return ((loss,) + output) if loss is not None else output
return MultipleChoiceModelOutput(
loss=loss,
logits=reshaped_logits,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
)
@auto_docstring
| FunnelForMultipleChoice |
python | huggingface__transformers | src/transformers/models/olmo2/modular_olmo2.py | {
"start": 9233,
"end": 9609
} | class ____(OlmoRotaryEmbedding):
pass
def rotate_half(x):
"""Rotates half the hidden dims of the input."""
x1 = x[..., : x.shape[-1] // 2]
x2 = x[..., x.shape[-1] // 2 :]
return torch.cat((-x2, x1), dim=-1)
# Olmo2 attention is identical to OLMo attention except:
# - Norm is applied to attention queries and keys.
# - No qkv clipping.
| Olmo2RotaryEmbedding |
python | tensorflow__tensorflow | tensorflow/python/compiler/tensorrt/test/conv2d_test.py | {
"start": 4663,
"end": 5935
} | class ____(trt_test.TfTrtIntegrationTestBase):
"""Testing conversion of strided Conv2D (data_format=NCHW)."""
def GraphFn(self, inp):
np.random.seed(1234)
num_filters = 5
output = inp
output = conv2d_layer(
output,
num_filters, (3, 2),
strides=(2, 2),
padding="same",
data_format="channels_first")
output = conv2d_layer(
output,
num_filters, (3, 3),
strides=(2, 2),
dilation_rate=(2, 3),
padding="same",
data_format="channels_first")
return array_ops.identity(output, name="output_0")
def GetParams(self):
return self.BuildParams(self.GraphFn, dtypes.float32, [[13, 3, 7, 11]],
[[13, 5, 2, 3]])
def ExpectedEnginesToBuild(self, run_params):
"""Return the expected engines to build."""
return ["TRTEngineOp_000"]
def ExpectedAbsoluteTolerance(self, run_params):
"""The absolute tolerance to compare floating point results."""
return 5.e-01 if run_params.precision_mode == "INT8" else 1.e-02
def ExpectedRelativeTolerance(self, run_params):
"""The relative tolerance to compare floating point results."""
return 5.e-00 if run_params.precision_mode == "INT8" else 1.e-02
| Conv2DStridedNCHWTest |
python | tensorflow__tensorflow | tensorflow/python/ops/image_ops_test.py | {
"start": 184540,
"end": 188778
} | class ____(test_util.TensorFlowTestCase):
def testExisting(self):
# Read some real PNGs, converting to different channel numbers
prefix = "tensorflow/core/lib/png/testdata/"
inputs = ((1, "lena_gray.png"), (4, "lena_rgba.png"),
(3, "lena_palette.png"), (4, "lena_palette_trns.png"))
for channels_in, filename in inputs:
for channels in 0, 1, 3, 4:
with self.cached_session():
png0 = io_ops.read_file(prefix + filename)
image0 = image_ops.decode_png(png0, channels=channels)
png0, image0 = self.evaluate([png0, image0])
self.assertEqual(image0.shape, (26, 51, channels or channels_in))
if channels == channels_in:
image1 = image_ops.decode_png(image_ops.encode_png(image0))
self.assertAllEqual(image0, self.evaluate(image1))
def testSynthetic(self):
with self.cached_session():
# Encode it, then decode it
image0 = constant_op.constant(simple_color_ramp())
png0 = image_ops.encode_png(image0, compression=7)
image1 = image_ops.decode_png(png0)
png0, image0, image1 = self.evaluate([png0, image0, image1])
# PNG is lossless
self.assertAllEqual(image0, image1)
# Smooth ramps compress well, but not too well
self.assertGreaterEqual(len(png0), 400)
self.assertLessEqual(len(png0), 1150)
def testSyntheticUint16(self):
with self.cached_session():
# Encode it, then decode it
image0 = constant_op.constant(simple_color_ramp(), dtype=dtypes.uint16)
png0 = image_ops.encode_png(image0, compression=7)
image1 = image_ops.decode_png(png0, dtype=dtypes.uint16)
png0, image0, image1 = self.evaluate([png0, image0, image1])
# PNG is lossless
self.assertAllEqual(image0, image1)
# Smooth ramps compress well, but not too well
self.assertGreaterEqual(len(png0), 800)
self.assertLessEqual(len(png0), 2100)
def testSyntheticTwoChannel(self):
with self.cached_session():
# Strip the b channel from an rgb image to get a two-channel image.
gray_alpha = simple_color_ramp()[:, :, 0:2]
image0 = constant_op.constant(gray_alpha)
png0 = image_ops.encode_png(image0, compression=7)
image1 = image_ops.decode_png(png0)
png0, image0, image1 = self.evaluate([png0, image0, image1])
self.assertEqual(2, image0.shape[-1])
self.assertAllEqual(image0, image1)
def testSyntheticTwoChannelUint16(self):
with self.cached_session():
# Strip the b channel from an rgb image to get a two-channel image.
gray_alpha = simple_color_ramp()[:, :, 0:2]
image0 = constant_op.constant(gray_alpha, dtype=dtypes.uint16)
png0 = image_ops.encode_png(image0, compression=7)
image1 = image_ops.decode_png(png0, dtype=dtypes.uint16)
png0, image0, image1 = self.evaluate([png0, image0, image1])
self.assertEqual(2, image0.shape[-1])
self.assertAllEqual(image0, image1)
def testBatchedEncodeSynthetic(self):
with self.cached_session():
image0 = simple_color_ramp()
image_stack = np.broadcast_to(image0, (3, 4) + image0.shape)
png0 = self.evaluate(image_ops.encode_png(image0, compression=7))
png_stack = self.evaluate(
image_ops.encode_png(image_stack, compression=7)
)
# PNG is lossless
expected = np.broadcast_to(png0, (3, 4))
self.assertAllEqual(png_stack, expected)
def testBatchedZeroLengthEncodeSynthetic(self):
with self.cached_session():
image0 = simple_color_ramp()
image_stack = np.broadcast_to(image0, (3, 4) + image0.shape)
image_stack = image_stack[:0]
png_stack = self.evaluate(
image_ops.encode_png(image_stack, compression=7)
)
self.assertAllEqual(png_stack.shape, (0, 4))
def testShape(self):
# Shape function requires placeholders and a graph.
with ops.Graph().as_default():
with self.cached_session():
png = constant_op.constant("nonsense")
for channels in 0, 1, 3:
image = image_ops.decode_png(png, channels=channels)
self.assertEqual(image.get_shape().as_list(),
[None, None, channels or None])
| PngTest |
python | airbytehq__airbyte | airbyte-integrations/connectors/source-github/source_github/github_schema.py | {
"start": 1122219,
"end": 1122596
} | class ____(sgqlc.types.Type, Contribution):
"""Represents the contribution a user made on GitHub by creating a
repository.
"""
__schema__ = github_schema
__field_names__ = ("repository",)
repository = sgqlc.types.Field(sgqlc.types.non_null("Repository"), graphql_name="repository")
"""The repository that was created."""
| CreatedRepositoryContribution |
python | apache__airflow | providers/openlineage/tests/unit/openlineage/extractors/test_base.py | {
"start": 2167,
"end": 2386
} | class ____(JobFacet):
failed: bool = field(default=False)
FINISHED_FACETS: dict[str, JobFacet] = {"complete": CompleteRunFacet(True)}
FAILED_FACETS: dict[str, JobFacet] = {"failure": FailRunFacet(True)}
| FailRunFacet |
python | dagster-io__dagster | python_modules/dagster/dagster/components/core/defs_module.py | {
"start": 7429,
"end": 14061
} | class ____(Component):
"""A component that represents a directory containing multiple Dagster definition modules.
DefsFolderComponent serves as a container for organizing and managing multiple subcomponents
within a folder structure. It automatically discovers and loads components from subdirectories
and files, enabling hierarchical organization of Dagster definitions. This component also
supports post-processing capabilities to modify metadata and properties of definitions
created by its child components.
Key Features:
- **Post-Processing**: Allows modification of child component definitions via configuration
- **Automatic Discovery**: Recursively finds and loads components from subdirectories
- **Hierarchical Organization**: Enables nested folder structures for complex projects
The component automatically scans its directory for:
- YAML component definitions (``defs.yaml`` files)
- Python modules containing Dagster definitions
- Nested subdirectories containing more components
Here is how a DefsFolderComponent is used in a project by the framework, along
with other framework-defined classes.
.. code-block:: text
my_project/
└── defs/
├── analytics/ # DefsFolderComponent
│ ├── defs.yaml # Post-processing configuration
│ ├── user_metrics/ # User-defined component
│ │ └── defs.yaml
│ └── sales_reports/ # User-defined component
│ └── defs.yaml
└── data_ingestion/ # DefsFolderComponent
├── api_sources/ # DefsFolderComponent
│ └── some_defs.py # PythonFileComponent
└── file_sources/ # DefsFolderComponent
└── files.py # PythonFileComponent
Args:
path: The filesystem path to the directory containing child components.
children: A mapping of child paths to their corresponding Component instances.
This is typically populated automatically during component discovery.
DefsFolderComponent supports post-processing through its ``defs.yaml`` configuration,
allowing you to modify definitions created by child components using target selectors
Examples:
Using post-processing in a folder's ``defs.yaml``:
.. code-block:: yaml
# analytics/defs.yaml
type: dagster.DefsFolderComponent
post_processing:
assets:
- target: "*" # add a top level tag to all assets in the folder
attributes:
tags:
top_level_tag: "true"
- target: "tag:defs_tag=true" # add a tag to all assets in the folder with the tag "defs_tag"
attributes:
tags:
new_tag: "true"
Please see documentation on post processing and the selection syntax for more examples.
Component Discovery:
The component automatically discovers children using these patterns:
1. **YAML Components**: Subdirectories with ``defs.yaml`` files
2. **Python Modules**: Any ``.py`` files containing Dagster definitions
3. **Nested Folders**: Subdirectories that contain any of the above
Files and directories matching these patterns are ignored:
- ``__pycache__`` directories
- Hidden directories (starting with ``.``)
.. note::
DefsFolderComponent instances are typically created automatically by Dagster's
component loading system. Manual instantiation is rarely needed unless building
custom loading logic or testing scenarios.
When used with post-processing, the folder's ``defs.yaml`` should only contain
post-processing configuration, not component type definitions.
"""
path: Path
children: Mapping[Path, Component]
@classmethod
def get_decl_type(cls) -> type["ComponentDecl"]:
from dagster.components.core.decl import DefsFolderDecl
return DefsFolderDecl
@classmethod
def get_model_cls(cls):
return DefsFolderComponentYamlSchema.model()
@classmethod
def load(cls, attributes: Any, context: ComponentLoadContext) -> "DefsFolderComponent":
return DefsFolderComponent(
path=context.path,
children=find_components_from_context(context),
)
def build_defs(self, context: ComponentLoadContext) -> Definitions:
child_defs = [
context.build_defs_at_path(child_decl.path)
for child_decl in context.component_decl.iterate_child_component_decls()
]
return Definitions.merge(*child_defs)
@classmethod
def get(cls, context: ComponentLoadContext) -> "DefsFolderComponent":
component = get_component(context)
return check.inst(
component,
DefsFolderComponent,
f"Expected DefsFolderComponent at {context.path}, got {component}.",
)
def iterate_components(self) -> Iterator[Component]:
for _, component in self.iterate_path_component_pairs():
yield component
def iterate_path_component_pairs(self) -> Iterator[tuple[ComponentPath, Component]]:
for path, component in self.children.items():
yield ComponentPath.from_path(path), component
if isinstance(component, DefsFolderComponent):
yield from component.iterate_path_component_pairs()
if isinstance(component, CompositeYamlComponent):
for idx, inner_comp in enumerate(component.components):
yield ComponentPath.from_path(path, idx), inner_comp
if isinstance(component, PythonFileComponent):
for attr, inner_comp in component.components.items():
yield ComponentPath.from_path(path, attr), inner_comp
EXPLICITLY_IGNORED_GLOB_PATTERNS = [
"__pycache__",
".*/",
]
def find_components_from_context(context: ComponentLoadContext) -> Mapping[Path, Component]:
found = {}
for subpath in sorted(context.path.iterdir()):
relative_subpath = subpath.relative_to(context.path)
if any(relative_subpath.match(pattern) for pattern in EXPLICITLY_IGNORED_GLOB_PATTERNS):
continue
component = get_component(
context.for_component_path(ComponentPath(file_path_posix=subpath.absolute().as_posix()))
)
if component:
found[subpath] = component
return found
@dataclass
| DefsFolderComponent |
python | apache__airflow | providers/fab/src/airflow/providers/fab/auth_manager/models/anonymous_user.py | {
"start": 971,
"end": 1947
} | class ____(AnonymousUserMixin, BaseUser):
"""User object used when no active user is logged in."""
_roles: set[tuple[str, str]] = set()
_perms: set[tuple[str, str]] = set()
first_name = "Anonymous"
last_name = ""
@property
def roles(self):
if not self._roles:
public_role = current_app.config.get("AUTH_ROLE_PUBLIC", None)
self._roles = {current_app.appbuilder.sm.find_role(public_role)} if public_role else set()
return list(self._roles)
@roles.setter
def roles(self, roles):
self._roles = roles
self._perms = set()
@property
def groups(self):
return []
@property
def perms(self):
if not self._perms:
self._perms = {
(perm.action.name, perm.resource.name) for role in self.roles for perm in role.permissions
}
return self._perms
def get_name(self) -> str:
return "Anonymous"
| AnonymousUser |
python | django__django | tests/transactions/models.py | {
"start": 286,
"end": 602
} | class ____(models.Model):
first_name = models.CharField(max_length=30)
last_name = models.CharField(max_length=30)
email = models.EmailField()
class Meta:
ordering = ("first_name", "last_name")
def __str__(self):
return ("%s %s" % (self.first_name, self.last_name)).strip()
| Reporter |
python | openai__openai-python | src/openai/types/static_file_chunking_strategy_object_param.py | {
"start": 319,
"end": 508
} | class ____(TypedDict, total=False):
static: Required[StaticFileChunkingStrategyParam]
type: Required[Literal["static"]]
"""Always `static`."""
| StaticFileChunkingStrategyObjectParam |
python | facebookresearch__faiss | tests/test_io.py | {
"start": 1399,
"end": 6372
} | class ____(unittest.TestCase):
def do_write_callback(self, bsz):
d, n = 32, 1000
x = np.random.uniform(size=(n, d)).astype('float32')
index = faiss.IndexFlatL2(d)
index.add(x)
f = io.BytesIO()
# test with small block size
writer = faiss.PyCallbackIOWriter(f.write, 1234)
if bsz > 0:
writer = faiss.BufferedIOWriter(writer, bsz)
faiss.write_index(index, writer)
del writer # make sure all writes committed
if sys.version_info[0] < 3:
buf = f.getvalue()
else:
buf = f.getbuffer()
index2 = faiss.deserialize_index(np.frombuffer(buf, dtype='uint8'))
self.assertEqual(index.d, index2.d)
np.testing.assert_array_equal(
faiss.vector_to_array(index.codes),
faiss.vector_to_array(index2.codes)
)
# This is not a callable function: should raise an exception
writer = faiss.PyCallbackIOWriter("blabla")
self.assertRaises(
Exception,
faiss.write_index, index, writer
)
def test_buf_read(self):
x = np.random.uniform(size=20)
fd, fname = tempfile.mkstemp()
os.close(fd)
try:
x.tofile(fname)
with open(fname, 'rb') as f:
reader = faiss.PyCallbackIOReader(f.read, 1234)
bsz = 123
reader = faiss.BufferedIOReader(reader, bsz)
y = np.zeros_like(x)
reader(faiss.swig_ptr(y), y.nbytes, 1)
np.testing.assert_array_equal(x, y)
finally:
if os.path.exists(fname):
os.unlink(fname)
def do_read_callback(self, bsz):
d, n = 32, 1000
x = np.random.uniform(size=(n, d)).astype('float32')
index = faiss.IndexFlatL2(d)
index.add(x)
fd, fname = tempfile.mkstemp()
os.close(fd)
try:
faiss.write_index(index, fname)
with open(fname, 'rb') as f:
reader = faiss.PyCallbackIOReader(f.read, 1234)
if bsz > 0:
reader = faiss.BufferedIOReader(reader, bsz)
index2 = faiss.read_index(reader)
self.assertEqual(index.d, index2.d)
np.testing.assert_array_equal(
faiss.vector_to_array(index.codes),
faiss.vector_to_array(index2.codes)
)
# This is not a callable function: should raise an exception
reader = faiss.PyCallbackIOReader("blabla")
self.assertRaises(
Exception,
faiss.read_index, reader
)
finally:
if os.path.exists(fname):
os.unlink(fname)
def test_write_callback(self):
self.do_write_callback(0)
def test_write_buffer(self):
self.do_write_callback(123)
self.do_write_callback(2345)
def test_read_callback(self):
self.do_read_callback(0)
def test_read_callback_buffered(self):
self.do_read_callback(123)
self.do_read_callback(12345)
def test_read_buffer(self):
d, n = 32, 1000
x = np.random.uniform(size=(n, d)).astype('float32')
index = faiss.IndexFlatL2(d)
index.add(x)
fd, fname = tempfile.mkstemp()
os.close(fd)
try:
faiss.write_index(index, fname)
reader = faiss.BufferedIOReader(
faiss.FileIOReader(fname), 1234)
index2 = faiss.read_index(reader)
self.assertEqual(index.d, index2.d)
np.testing.assert_array_equal(
faiss.vector_to_array(index.codes),
faiss.vector_to_array(index2.codes)
)
finally:
del reader
if os.path.exists(fname):
os.unlink(fname)
def test_transfer_pipe(self):
""" transfer an index through a Unix pipe """
d, n = 32, 1000
x = np.random.uniform(size=(n, d)).astype('float32')
index = faiss.IndexFlatL2(d)
index.add(x)
Dref, Iref = index.search(x, 10)
rf, wf = os.pipe()
# start thread that will decompress the index
def index_from_pipe():
reader = faiss.PyCallbackIOReader(lambda size: os.read(rf, size))
return faiss.read_index(reader)
with ThreadPool(1) as pool:
fut = pool.apply_async(index_from_pipe, ())
# write to pipe
writer = faiss.PyCallbackIOWriter(lambda b: os.write(wf, b))
faiss.write_index(index, writer)
index2 = fut.get()
# closing is not really useful but it does not hurt
os.close(wf)
os.close(rf)
Dnew, Inew = index2.search(x, 10)
np.testing.assert_array_equal(Iref, Inew)
np.testing.assert_array_equal(Dref, Dnew)
| TestCallbacks |
python | getsentry__sentry | tests/sentry/preprod/api/endpoints/pull_request/test_organization_pullrequest_details.py | {
"start": 431,
"end": 10360
} | class ____(TestCase):
def setUp(self):
super().setUp()
self.factory = APIRequestFactory()
self.integration = self.create_integration(
organization=self.organization,
provider="github",
name="Test GitHub Integration",
external_id="12345",
metadata={
"access_token": "test-token",
"expires_at": None,
"installation": {"id": 12345, "account": {"login": "getsentry"}},
},
)
self.repository = Repository.objects.create(
organization_id=self.organization.id,
name="getsentry/sentry",
provider="integrations:github",
integration_id=self.integration.id,
)
self.mock_pr_details = {
"id": 123456,
"number": 100,
"title": "Add new feature",
"body": "This PR adds a new feature to improve user experience",
"state": "open",
"user": {
"id": 789,
"login": "testuser",
"name": "Test User",
"avatar_url": "https://github.com/testuser.png",
},
"head": {"ref": "feature/new-feature"},
"base": {"ref": "main"},
"created_at": "2023-01-01T12:00:00Z",
"updated_at": "2023-01-02T10:30:00Z",
"merged_at": None,
"closed_at": None,
"html_url": "https://github.com/getsentry/sentry/pull/100",
"commits": 3,
"additions": 150,
"deletions": 50,
"changed_files": 5,
}
self.mock_pr_files = [
{
"filename": "src/components/Button.tsx",
"status": "modified",
"additions": 10,
"deletions": 2,
"changes": 12,
"sha": "abc123def456",
"patch": "@@ -1,3 +1,3 @@\n- old line\n+ new line",
},
{
"filename": "src/utils/helper.ts",
"status": "added",
"additions": 25,
"deletions": 0,
"changes": 25,
"sha": "def456ghi789",
"patch": None,
},
{
"filename": "tests/Button.test.tsx",
"status": "added",
"additions": 50,
"deletions": 0,
"changes": 50,
"sha": "ghi789jkl012",
"patch": None,
},
{
"filename": "old-file.js",
"status": "removed",
"additions": 0,
"deletions": 15,
"changes": 15,
"sha": None,
"patch": None,
},
{
"filename": "new-component.tsx",
"status": "renamed",
"additions": 5,
"deletions": 3,
"changes": 8,
"previous_filename": "old-component.tsx",
"sha": "jkl012mno345",
"patch": None,
},
]
def _make_request(self, repo_name="getsentry/sentry", pr_number="100"):
"""Helper to make API request."""
request = self.factory.get("/")
request.user = self.user
endpoint = OrganizationPullRequestDetailsEndpoint()
return endpoint.get(
request=request,
organization=self.organization,
repo_name=repo_name,
pr_number=pr_number,
)
@with_feature("organizations:pr-page")
@patch("sentry.integrations.github.client.GitHubApiClient.get_pullrequest_files")
@patch("sentry.integrations.github.client.GitHubApiClient.get")
def test_successful_pr_details_fetch(self, mock_get, mock_get_files):
"""Test successful PR details and files fetch with proper normalization."""
# Setup GitHub API response mocks (only mock the HTTP calls)
mock_get_files.return_value = self.mock_pr_files
mock_get.return_value = self.mock_pr_details
response = self._make_request()
assert response.status_code == 200
mock_get_files.assert_called_once_with("getsentry/sentry", "100")
mock_get.assert_called_once_with("/repos/getsentry/sentry/pulls/100")
assert "pull_request" in response.data
assert "files" in response.data
pr_data = response.data["pull_request"]
assert pr_data["id"] == "123456"
assert pr_data["number"] == 100
assert pr_data["title"] == "Add new feature"
assert pr_data["state"] == "open"
assert pr_data["author"]["username"] == "testuser"
assert pr_data["author"]["display_name"] == "Test User"
assert pr_data["source_branch"] == "feature/new-feature"
assert pr_data["target_branch"] == "main"
assert pr_data["additions"] == 150
assert pr_data["deletions"] == 50
assert pr_data["changed_files_count"] == 5
assert pr_data["created_at"] is not None
files_data = response.data["files"]
assert len(files_data) == 5
modified_file = files_data[0]
assert modified_file["filename"] == "src/components/Button.tsx"
assert modified_file["status"] == "modified"
assert modified_file["additions"] == 10
assert modified_file["deletions"] == 2
added_file = next(f for f in files_data if f["status"] == "added")
assert added_file["filename"] == "src/utils/helper.ts"
renamed_file = next(f for f in files_data if f["status"] == "renamed")
assert renamed_file["previous_filename"] == "old-component.tsx"
@with_feature("organizations:pr-page")
def test_no_github_client(self):
"""Test when no GitHub client is available (no integration set up)."""
Repository.objects.create(
organization_id=self.organization.id,
name="nonexistent/repo",
provider="integrations:github",
integration_id=None, # No integration
)
response = self._make_request(repo_name="nonexistent/repo")
assert response.status_code == 404
assert response.data["error"] == "integration_not_found"
assert "No GitHub integration found" in response.data["message"]
@with_feature("organizations:pr-page")
@patch("sentry.integrations.github.client.GitHubApiClient.get_pullrequest_files")
def test_github_api_error(self, mock_get_files):
"""Test GitHub API error handling."""
# Simulate GitHub API error
mock_get_files.side_effect = ApiError("API rate limit exceeded")
response = self._make_request()
assert response.status_code == 502
assert response.data["error"] == "api_error"
assert "Failed to fetch pull request data from GitHub" in response.data["message"]
@with_feature("organizations:pr-page")
@patch("sentry.integrations.github.client.GitHubApiClient.get_pullrequest_files")
@patch("sentry.integrations.github.client.GitHubApiClient.get")
def test_empty_pr_files(self, mock_get, mock_get_files):
"""Test handling of PR with no files changed."""
mock_get_files.return_value = []
mock_get.return_value = {**self.mock_pr_details, "changed_files": 0}
response = self._make_request()
assert response.status_code == 200
assert len(response.data["files"]) == 0
assert response.data["pull_request"]["changed_files_count"] == 0
@with_feature("organizations:pr-page")
def test_repository_not_found(self):
"""Test when repository doesn't exist in the database."""
response = self._make_request(repo_name="does-not/exist")
assert response.status_code == 404
assert response.data["error"] == "integration_not_found"
assert "No GitHub integration found" in response.data["message"]
@with_feature("organizations:pr-page")
@patch("sentry.integrations.github.client.GitHubApiClient.get_pullrequest_files")
@patch("sentry.integrations.github.client.GitHubApiClient.get")
def test_missing_timestamps_handled_correctly(self, mock_get, mock_get_files):
"""Test that missing timestamps are properly handled without type errors."""
# Create PR data missing created_at and updated_at timestamps
pr_data_missing_timestamps = {
"id": 123456,
"number": 100,
"title": "Add new feature",
"body": "This PR adds a new feature to improve user experience",
"state": "open",
"user": {
"id": 789,
"login": "testuser",
"name": "Test User",
"avatar_url": "https://github.com/testuser.png",
},
"head": {"ref": "feature/new-feature"},
"base": {"ref": "main"},
# Missing created_at and updated_at
"merged_at": None,
"closed_at": None,
"html_url": "https://github.com/getsentry/sentry/pull/100",
"commits": 3,
"additions": 150,
"deletions": 50,
"changed_files": 0,
}
mock_get_files.return_value = []
mock_get.return_value = pr_data_missing_timestamps
response = self._make_request()
assert response.status_code == 200
assert "pull_request" in response.data
pr_data = response.data["pull_request"]
# Verify that missing timestamps are handled as None
assert pr_data["created_at"] is None
assert pr_data["updated_at"] is None
# Verify other fields work correctly
assert pr_data["id"] == "123456"
assert pr_data["number"] == 100
assert pr_data["title"] == "Add new feature"
| OrganizationPullRequestDetailsEndpointTest |
python | django__django | tests/model_inheritance_regress/models.py | {
"start": 1821,
"end": 1897
} | class ____(SelfRefParent):
child_data = models.IntegerField()
| SelfRefChild |
python | numba__numba | numba/core/types/containers.py | {
"start": 13055,
"end": 14474
} | class ____(Literal, ConstSized, Hashable):
"""A heterogeneous immutable list (basically a tuple with list semantics).
"""
mutable = False
def __init__(self, literal_value):
self.is_types_iterable(literal_value)
self._literal_init(list(literal_value))
self.types = tuple(literal_value)
self.count = len(self.types)
self.name = "LiteralList({})".format(literal_value)
def __getitem__(self, i):
"""
Return element at position i
"""
return self.types[i]
def __len__(self):
return len(self.types)
def __iter__(self):
return iter(self.types)
@classmethod
def from_types(cls, tys):
return LiteralList(tys)
@staticmethod
def is_types_iterable(types):
if not isinstance(types, Iterable):
raise TypingError("Argument 'types' is not iterable")
@property
def iterator_type(self):
return ListIter(self)
def __unliteral__(self):
return Poison(self)
def unify(self, typingctx, other):
"""
Unify this with the *other* one.
"""
if isinstance(other, LiteralList) and self.count == other.count:
tys = []
for i1, i2 in zip(self.types, other.types):
tys.append(typingctx.unify_pairs(i1, i2))
if all(tys):
return LiteralList(tys)
| LiteralList |
python | FactoryBoy__factory_boy | tests/test_transformer.py | {
"start": 5650,
"end": 6012
} | class ____(factory.Factory):
class Meta:
model = TestObject
class Params:
upper_two = factory.Trait(
two=factory.Transformer("two", transform=str.upper)
)
odds = factory.Trait(
one="one",
three="three",
)
one = factory.Transformer("one", transform=str.upper)
| WithTraitFactory |
python | jazzband__django-pipeline | tests/tests/test_forms.py | {
"start": 1169,
"end": 6785
} | class ____(TestCase):
"""Unit tests for pipeline.forms.PipelineFormMedia."""
@pipeline_settings(PIPELINE_ENABLED=True)
def test_css_packages_with_pipeline_enabled(self):
"""Testing PipelineFormMedia.css_packages with PIPELINE_ENABLED=True"""
class MyMedia(PipelineFormMedia):
css_packages = {
"all": ("styles1", "styles2"),
"print": ("print",),
}
css = {"all": ("extra1.css", "extra2.css")}
media = Media(MyMedia)
self.assertEqual(
MyMedia.css,
{
"all": [
"extra1.css",
"extra2.css",
"/static/styles1.min.css",
"/static/styles2.min.css",
],
"print": ["/static/print.min.css"],
},
)
self.assertEqual(MyMedia.css, media._css)
expected_regex = [
r'<link href="{}"( type="text/css")? media="all" '
'rel="stylesheet"( /)?>'.format(path)
for path in (
"/static/extra1.css",
"/static/extra2.css",
"/static/styles1.min.css",
"/static/styles2.min.css",
)
] + [
r'<link href="/static/print.min.css" (type="text/css" )?'
'media="print" rel="stylesheet"( /)?>'
]
for rendered_node, expected_node in zip(media.render_css(), expected_regex):
self.assertRegex(rendered_node, expected_node)
@pipeline_settings(PIPELINE_ENABLED=False)
def test_css_packages_with_pipeline_disabled(self):
"""Testing PipelineFormMedia.css_packages with PIPELINE_ENABLED=False"""
class MyMedia(PipelineFormMedia):
css_packages = {
"all": ("styles1", "styles2"),
"print": ("print",),
}
css = {"all": ("extra1.css", "extra2.css")}
media = Media(MyMedia)
self.assertEqual(
MyMedia.css,
{
"all": [
"extra1.css",
"extra2.css",
"pipeline/css/first.css",
"pipeline/css/second.css",
"pipeline/css/unicode.css",
],
"print": ["pipeline/css/urls.css"],
},
)
self.assertEqual(MyMedia.css, media._css)
expected_regex = [
'<link href="{}"( type="text/css")? media="all" '
'rel="stylesheet"( /)?>'.format(path)
for path in (
"/static/extra1.css",
"/static/extra2.css",
"/static/pipeline/css/first.css",
"/static/pipeline/css/second.css",
"/static/pipeline/css/unicode.css",
)
] + [
'<link href="/static/pipeline/css/urls.css" (type="text/css" )?'
'media="print" rel="stylesheet"( /)?>'
]
for rendered_node, expected_node in zip(media.render_css(), expected_regex):
self.assertRegex(rendered_node, expected_node)
@pipeline_settings(PIPELINE_ENABLED=True)
def test_js_packages_with_pipeline_enabled(self):
"""Testing PipelineFormMedia.js_packages with PIPELINE_ENABLED=True"""
class MyMedia(PipelineFormMedia):
js_packages = ("scripts1", "scripts2")
js = ("extra1.js", "extra2.js")
media = Media(MyMedia)
if django_version() < "3.1":
script_tag = '<script type="text/javascript" src="%s"></script>'
else:
script_tag = '<script src="%s"></script>'
self.assertEqual(
MyMedia.js,
[
"extra1.js",
"extra2.js",
"/static/scripts1.min.js",
"/static/scripts2.min.js",
],
)
self.assertEqual(MyMedia.js, media._js)
self.assertEqual(
media.render_js(),
[
script_tag % path
for path in (
"/static/extra1.js",
"/static/extra2.js",
"/static/scripts1.min.js",
"/static/scripts2.min.js",
)
],
)
@pipeline_settings(PIPELINE_ENABLED=False)
def test_js_packages_with_pipeline_disabled(self):
"""Testing PipelineFormMedia.js_packages with PIPELINE_ENABLED=False"""
class MyMedia(PipelineFormMedia):
js_packages = ("scripts1", "scripts2")
js = ("extra1.js", "extra2.js")
media = Media(MyMedia)
if django_version() < "3.1":
script_tag = '<script type="text/javascript" src="%s"></script>'
else:
script_tag = '<script src="%s"></script>'
self.assertEqual(
MyMedia.js,
[
"extra1.js",
"extra2.js",
"pipeline/js/first.js",
"pipeline/js/second.js",
"pipeline/js/application.js",
],
)
self.assertEqual(MyMedia.js, media._js)
self.assertEqual(
media.render_js(),
[
script_tag % path
for path in (
"/static/extra1.js",
"/static/extra2.js",
"/static/pipeline/js/first.js",
"/static/pipeline/js/second.js",
"/static/pipeline/js/application.js",
)
],
)
| PipelineFormMediaTests |
python | doocs__leetcode | solution/0700-0799/0701.Insert into a Binary Search Tree/Solution.py | {
"start": 192,
"end": 534
} | class ____:
def insertIntoBST(self, root: Optional[TreeNode], val: int) -> Optional[TreeNode]:
if root is None:
return TreeNode(val)
if root.val > val:
root.left = self.insertIntoBST(root.left, val)
else:
root.right = self.insertIntoBST(root.right, val)
return root
| Solution |
python | PrefectHQ__prefect | src/prefect/client/schemas/objects.py | {
"start": 16484,
"end": 22415
} | class ____(TimeSeriesBaseModel, ObjectBaseModel):
name: str = Field(
default_factory=lambda: generate_slug(2),
description=(
"The name of the flow run. Defaults to a random slug if not specified."
),
examples=["my-flow-run"],
)
flow_id: UUID = Field(default=..., description="The id of the flow being run.")
state_id: Optional[UUID] = Field(
default=None, description="The id of the flow run's current state."
)
deployment_id: Optional[UUID] = Field(
default=None,
description=(
"The id of the deployment associated with this flow run, if available."
),
)
deployment_version: Optional[str] = Field(
default=None,
description="The version of the deployment associated with this flow run.",
examples=["1.0"],
)
work_queue_name: Optional[str] = Field(
default=None, description="The work queue that handled this flow run."
)
flow_version: Optional[str] = Field(
default=None,
description="The version of the flow executed in this flow run.",
examples=["1.0"],
)
parameters: dict[str, Any] = Field(
default_factory=dict, description="Parameters for the flow run."
)
idempotency_key: Optional[str] = Field(
default=None,
description=(
"An optional idempotency key for the flow run. Used to ensure the same flow"
" run is not created multiple times."
),
)
context: dict[str, Any] = Field(
default_factory=dict,
description="Additional context for the flow run.",
examples=[{"my_var": "my_val"}],
)
empirical_policy: FlowRunPolicy = Field(
default_factory=FlowRunPolicy,
)
tags: list[str] = Field(
default_factory=list,
description="A list of tags on the flow run",
examples=[["tag-1", "tag-2"]],
)
labels: KeyValueLabelsField = Field(default_factory=dict)
parent_task_run_id: Optional[UUID] = Field(
default=None,
description=(
"If the flow run is a subflow, the id of the 'dummy' task in the parent"
" flow used to track subflow state."
),
)
run_count: int = Field(
default=0, description="The number of times the flow run was executed."
)
expected_start_time: Optional[DateTime] = Field(
default=None,
description="The flow run's expected start time.",
)
next_scheduled_start_time: Optional[DateTime] = Field(
default=None,
description="The next time the flow run is scheduled to start.",
)
start_time: Optional[DateTime] = Field(
default=None, description="The actual start time."
)
end_time: Optional[DateTime] = Field(
default=None, description="The actual end time."
)
total_run_time: datetime.timedelta = Field(
default=datetime.timedelta(0),
description=(
"Total run time. If the flow run was executed multiple times, the time of"
" each run will be summed."
),
)
estimated_run_time: datetime.timedelta = Field(
default=datetime.timedelta(0),
description="A real-time estimate of the total run time.",
)
estimated_start_time_delta: datetime.timedelta = Field(
default=datetime.timedelta(0),
description="The difference between actual and expected start time.",
)
auto_scheduled: bool = Field(
default=False,
description="Whether or not the flow run was automatically scheduled.",
)
infrastructure_document_id: Optional[UUID] = Field(
default=None,
description="The block document defining infrastructure to use this flow run.",
)
infrastructure_pid: Optional[str] = Field(
default=None,
description="The id of the flow run as returned by an infrastructure block.",
)
created_by: Optional[CreatedBy] = Field(
default=None,
description="Optional information about the creator of this flow run.",
)
work_queue_id: Optional[UUID] = Field(
default=None, description="The id of the run's work pool queue."
)
work_pool_id: Optional[UUID] = Field(
default=None, description="The work pool with which the queue is associated."
)
work_pool_name: Optional[str] = Field(
default=None,
description="The name of the flow run's work pool.",
examples=["my-work-pool"],
)
state: Optional[State] = Field(
default=None,
description="The state of the flow run.",
examples=["State(type=StateType.COMPLETED)"],
)
job_variables: Optional[dict[str, Any]] = Field(
default=None,
description="Job variables for the flow run.",
)
# These are server-side optimizations and should not be present on client models
# TODO: Deprecate these fields
state_type: Optional[StateType] = Field(
default=None, description="The type of the current flow run state."
)
state_name: Optional[str] = Field(
default=None, description="The name of the current flow run state."
)
def __eq__(self, other: Any) -> bool:
"""
Check for "equality" to another flow run schema
Estimates times are rolling and will always change with repeated queries for
a flow run so we ignore them during equality checks.
"""
if isinstance(other, FlowRun):
exclude_fields = {"estimated_run_time", "estimated_start_time_delta"}
return self.model_dump(exclude=exclude_fields) == other.model_dump(
exclude=exclude_fields
)
return super().__eq__(other)
@field_validator("name", mode="before")
@classmethod
def set_default_name(cls, name: Optional[str]) -> str:
return get_or_create_run_name(name)
| FlowRun |
python | astropy__astropy | astropy/modeling/functional_models.py | {
"start": 50067,
"end": 53832
} | class ____(Fittable2DModel):
"""
Two-dimensional Lorentzian model.
Parameters
----------
amplitude : float or `~astropy.units.Quantity`.
Peak value.
x_0 : float or `~astropy.units.Quantity`.
Position of the peak in x.
y_0 : float or `~astropy.units.Quantity`.
Position of the peak in y.
fwhm : float or `~astropy.units.Quantity`.
Full width at half maximum (FWHM).
See Also
--------
Lorentz1D, Gaussian2D, Moffat2D
Notes
-----
The ``x``, ``y``, ``x_0``, ``y_0``, and ``fwhm`` inputs must have
compatible units or as unitless numbers.
Model formula:
.. math::
f(x, y) = \\frac{A \\gamma^{2}}{\\gamma^{2}
+ \\left(x - x_{0}\\right)^2 + \\left(y - y_{0}\\right)^{2}}
where :math:`\\gamma` is the half width at half maximum (HWHM), which
is half the FWHM.
The area under the `Lorentz2D` profile is infinite, therefore this
model profile cannot be normalized to sum to 1.
Examples
--------
.. plot::
:include-source:
import numpy as np
import matplotlib.pyplot as plt
from astropy.modeling.models import Lorentz2D
plt.figure()
model = Lorentz2D(x_0=12, y_0=12, fwhm=3)
yy, xx = np.mgrid[:25, :25]
data = model(xx, yy)
plt.imshow(data)
plt.show()
"""
amplitude = Parameter(default=1, description="Peak value")
x_0 = Parameter(default=0, description="Position of the peak in x")
y_0 = Parameter(default=0, description="Position of the peak in y")
fwhm = Parameter(default=1, description="Full width at half maximum")
@staticmethod
def evaluate(x, y, amplitude, x_0, y_0, fwhm):
"""Two dimensional Lorentzian model function."""
return amplitude * (
((fwhm / 2.0) ** 2) / ((x - x_0) ** 2 + (y - y_0) ** 2 + (fwhm / 2.0) ** 2)
)
@staticmethod
def fit_deriv(x, y, amplitude, x_0, y_0, fwhm):
"""Two dimensional Lorentzian model derivative with respect to parameters."""
gamma = fwhm / 2.0
r2 = (x - x_0) ** 2 + (y - y_0) ** 2
denom = gamma**2 + r2
denom2 = denom**2
d_amplitude = gamma**2 / denom
d_x_0 = amplitude * gamma**2 * 2 * (x - x_0) / denom2
d_y_0 = amplitude * gamma**2 * 2 * (y - y_0) / denom2
d_fwhm = (amplitude * (fwhm / 2) * r2) / denom2
return [d_amplitude, d_x_0, d_y_0, d_fwhm]
def bounding_box(self, factor=25):
"""Tuple defining the default ``bounding_box`` limits,
``(x_low, x_high), (y_low, y_high)``.
Parameters
----------
factor : float
The multiple of FWHM used to define the limits.
Default is chosen to include most (99%) of the
area under the curve, while still showing the
central feature of interest.
"""
x0 = self.x_0
y0 = self.y_0
dx = factor * self.fwhm
return ((x0 - dx, x0 + dx), (y0 - dx, y0 + dx))
@property
def input_units(self):
x_unit = self.x_0.input_unit
y_unit = self.y_0.input_unit
if x_unit is None and y_unit is None:
return None
return {self.inputs[0]: x_unit, self.inputs[1]: y_unit}
def _parameter_units_for_data_units(self, inputs_unit, outputs_unit):
if inputs_unit[self.inputs[0]] != inputs_unit[self.inputs[1]]:
raise UnitsError("Units of 'x' and 'y' inputs should match")
return {
"x_0": inputs_unit[self.inputs[0]],
"y_0": inputs_unit[self.inputs[0]],
"fwhm": inputs_unit[self.inputs[0]],
"amplitude": outputs_unit[self.outputs[0]],
}
| Lorentz2D |
python | ray-project__ray | rllib/env/vector/vector_multi_agent_env.py | {
"start": 264,
"end": 2910
} | class ____:
metadata: Dict[str, Any] = {}
spec: Optional[EnvSpec] = None
render_mode: Optional[str] = None
closed: bool = False
envs: Optional[List] = None
# TODO (simon, sven): We could think about enabling here different
# spaces for different envs (e.g. different high/lows). In this
# case we would need here actually "batched" spaces and not a
# single on that holds for all sub-envs.
single_observation_spaces: Optional[Dict[str, gym.Space]] = None
single_action_spaces: Optional[Dict[str, gym.Space]] = None
# Note, the proper `gym` spaces are needed for the connector pipeline.
single_observation_space: Optional[gym.spaces.Dict] = None
single_action_space: Optional[gym.spaces.Dict] = None
num_envs: int
_np_random: Optional[np.random.Generator] = None
_np_random_seed: Optional[int] = None
# @OldAPIStack, use `observation_spaces` and `action_spaces`, instead.
observation_space: Optional[gym.Space] = None
action_space: Optional[gym.Space] = None
# TODO (simon): Add docstrings, when final design is clear.
def reset(
self, *, seed: Optional[int] = None, options: Optional[Dict[str, Any]] = None
) -> Tuple[ArrayType, ArrayType]:
# Set random generators with the provided seeds.
if seed is not None:
self._np_random, self._np_random_seed = seeding.np_random(seed)
def step(
self, actions: ArrayType
) -> Tuple[ArrayType, ArrayType, ArrayType, ArrayType, ArrayType]:
raise NotImplementedError(f"{self.__str__()} step function is not implemented.")
def render(self) -> Optional[Tuple[RenderFrame, ...]]:
raise NotImplementedError(
f"{self.__str__()} render function is not implemented."
)
def close(self, **kwargs: Any):
# If already closed, there is nothing more to do.
if self.closed:
return
# Otherwise close environments gracefully.
self.close_extras(**kwargs)
self.closed = True
def close_extras(self, **kwargs: Any):
# Users must not implement this.
pass
@property
def unwrapped(self):
return self
def __del__(self):
# Close environemnts, if necessary when deleting instances.
if not getattr(self, "closed", True):
self.close()
def __repr__(self):
if self.spec is None:
return f"{self.__class__.__name__}(num_envs={self.num_envs})"
else:
return (
f"{self.__class__.__name__}({self.spec.id}, num_envs={self.num_envs})"
)
| VectorMultiAgentEnv |
python | getsentry__sentry | src/sentry/migrations/0001_squashed_0904_onboarding_task_project_id_idx.py | {
"start": 2032,
"end": 383721
} | class ____(CheckedMigration):
# This flag is used to mark that a migration shouldn't be automatically run in production.
# This should only be used for operations where it's safe to run the migration after your
# code has deployed. So this should not be used for most operations that alter the schema
# of a table.
# Here are some things that make sense to mark as post deployment:
# - Large data migrations. Typically we want these to be run manually so that they can be
# monitored and not block the deploy for a long period of time while they run.
# - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
# run this outside deployments so that we don't block them. Note that while adding an index
# is a schema change, it's completely safe to run the operation after the code has deployed.
# Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
is_post_deployment = True
replaces = [
("sentry", "0001_squashed_0484_break_org_member_user_fk"),
("sentry", "0485_remove_scheduled_job"),
("sentry", "0486_integer_pr_comment_issue_list"),
("sentry", "0487_add_indexes_to_bundles"),
("sentry", "0488_add_orgauthtoken"),
("sentry", "0489_index_checkin_timeout"),
("sentry", "0490_add_is_test_to_org"),
("sentry", "0491_remove_orgmemmap_unique_constraints"),
("sentry", "0492_pickle_to_json_sentry_groupedmessage"),
("sentry", "0493_pickle_to_json_sentry_activity"),
("sentry", "0494_add_traceid_checkin"),
("sentry", "0495_add_date_last_modified_to_artifact_bundle"),
("sentry", "0496_update_userid_state"),
("sentry", "0497_add_comment_reactions_column"),
("sentry", "0498_typed_bitfield"),
("sentry", "0499_typed_bitfield_revert"),
("sentry", "0500_set_none_date_last_modified_to_date_uploaded"),
("sentry", "0501_typed_bitfield_remove_labels"),
("sentry", "0502_savedsearch_update_me_myteams"),
("sentry", "0503_alter_notification_actor_nullable"),
("sentry", "0504_add_artifact_bundle_index"),
("sentry", "0505_debugfile_date_accessed"),
("sentry", "0506_null_boolean_fields"),
("sentry", "0507_add_oidc_scopes"),
("sentry", "0507_delete_pending_deletion_rules"),
("sentry", "0508_index_checkin_monitorenvironment"),
("sentry", "0508_merging_migrations"),
("sentry", "0509_merging_migrations"),
("sentry", "0510_index_checkin_traceid"),
("sentry", "0511_pickle_to_json_sentry_rawevent"),
("sentry", "0512_add_proguard_release_association"),
("sentry", "0513_django_jsonfield"),
("sentry", "0514_migrate_priority_saved_searches"),
("sentry", "0515_slugify_invalid_monitors"),
("sentry", "0516_switch_pagerduty_silo"),
("sentry", "0517_backfill_pagerdutyservices_into_org_integrations"),
("sentry", "0518_cleanup_bundles_indexes"),
("sentry", "0519_remove_repo_name_constraint"),
("sentry", "0520_add_flat_file_index_table"),
("sentry", "0521_migrate_world_map_widgets"),
("sentry", "0522_migrate_discover_savedquery_worldmaps"),
("sentry", "0523_add_new_index_to_groupedmessage"),
("sentry", "0524_flip_checkin_index"),
("sentry", "0525_add_next_checkin_latest"),
("sentry", "0526_pr_comment_type_column"),
("sentry", "0527_backfill_next_checkin_latest"),
("sentry", "0528_truncate_flat_index"),
("sentry", "0529_remove_pagerduty_service"),
("sentry", "0530_new_notification_tables"),
("sentry", "0531_add_notification_uuid_to_incident_activity"),
("sentry", "0532_denormalize_team_and_user_x_actor"),
("sentry", "0533_make_flatfile_unique_again"),
("sentry", "0534_add_notification_uuid_to_rule_fire_history"),
("sentry", "0535_add_created_date_to_outbox_model"),
("sentry", "0536_backfill_tombstones"),
("sentry", "0537_backfill_xactor_team_and_user_ids"),
("sentry", "0538_remove_name_data_from_rule"),
("sentry", "0539_add_last_state_change_monitorenv"),
("sentry", "0540_add_release_threshold_table"),
("sentry", "0541_add_replicated_auth_models"),
("sentry", "0542_rm_flatfile_file"),
("sentry", "0543_add_team_id_to_groupsubscription"),
("sentry", "0544_remove_groupsubscription_columns"),
("sentry", "0545_add_last_verified_auth_ident_replica"),
("sentry", "0546_backfill_fix_bad_xactors"),
("sentry", "0547_add_commitfilechange_language_column"),
("sentry", "0548_add_is_unclaimed_boolean_to_user"),
("sentry", "0549_re_add_groupsubscription_columns"),
("sentry", "0550_migrate_no_action_dupe_issue_alerts"),
("sentry", "0551_drop_xactor_actor"),
("sentry", "0552_create_neglectedalert_table"),
("sentry", "0553_add_new_index_to_groupedmessage_table"),
("sentry", "0554_add_team_replica"),
("sentry", "0555_set_neglectedrule_email_date_columns_nullable"),
("sentry", "0556_organizationmapping_replicate_require_2fa"),
("sentry", "0557_threshold_related_name"),
("sentry", "0558_add_organization_member_team_replica"),
("sentry", "0559_custom_dynamic_sampling_rule"),
("sentry", "0560_add_monitorincident_table"),
("sentry", "0561_backfill_new_notification_tables"),
("sentry", "0562_drop_xactor_actor_from_state"),
("sentry", "0563_commitfilechange_drop_language_column"),
("sentry", "0564_commitfilechange_delete_language_column"),
("sentry", "0565_fix_diff_env_dupe_alerts"),
("sentry", "0566_remove_cron_missed_margins_zero"),
("sentry", "0567_add_slug_reservation_model"),
("sentry", "0568_monitors_fix_next_checkin_latest"),
("sentry", "0569_dashboard_widgets_indicator"),
("sentry", "0570_repository_add_languages_column"),
("sentry", "0571_add_hybrid_cloud_foreign_key_to_slug_reservation"),
("sentry", "0572_sentry_remove_unused_eventuser_index"),
("sentry", "0573_add_first_seen_index_groupedmessage"),
("sentry", "0574_backfill_weekly_report_settings"),
("sentry", "0575_incident_date_added_index"),
("sentry", "0576_add_missing_org_integration_scope"),
("sentry", "0577_drop_latest_incident_index"),
("sentry", "0578_add_query_and_users_to_custom_dynamic_sampling_rules"),
("sentry", "0579_index_incident_trigger"),
("sentry", "0580_threhsold_window_positive_integer"),
("sentry", "0581_add_user_and_team_to_alert_rules"),
("sentry", "0582_add_status_indexes_checkins"),
("sentry", "0583_add_early_adopter_to_organization_mapping"),
("sentry", "0584_apitoken_add_name_and_last_chars"),
("sentry", "0585_add_orgmember_partnership_restricted_flag"),
("sentry", "0586_add_has_feedbacks_flag"),
("sentry", "0587_remove_unused_neglectedrule_rows"),
("sentry", "0588_add_relocation_models"),
("sentry", "0589_add_commit_date_added_indices"),
("sentry", "0590_add_metadata_to_sentry_app"),
("sentry", "0591_remove_relocation_hybrid_cloud_foreign_keys"),
("sentry", "0592_delete_relocation_hybrid_cloud_foreign_keys"),
("sentry", "0593_add_notification_flag_to_dynamic_sampling_custom_rule"),
("sentry", "0594_trivial_but_dangerous_2"),
("sentry", "0595_trivial_but_dangerous_3"),
("sentry", "0596_trivial_but_dangerous_4"),
("sentry", "0597_trivial_but_dangerous_5"),
("sentry", "0598_trivial_but_dangerous_1"),
("sentry", "0599_add_import_chunk"),
("sentry", "0600_eventattachment_metadata"),
("sentry", "0601_add_has_sourcemaps_project_flag"),
("sentry", "0602_import_chunk_unique_together"),
("sentry", "0603_add_dangerous_but_trivial_index"),
("sentry", "0604_remove_dangerous_but_trivial_index"),
("sentry", "0605_addremove_dangerous_but_trivial_index"),
("sentry", "0606_update_user_to_optional_organization_slug_reservation"),
("sentry", "0607_drop_externalactor_actorid"),
("sentry", "0608_notification_setting_db_constraint"),
("sentry", "0609_remove_notification_setting_model"),
("sentry", "0610_remove_notification_setting_table"),
("sentry", "0611_add_regression_group_model"),
("sentry", "0612_expand_relocation_model"),
("sentry", "0613_drop_eventuser_table_part_1"),
("sentry", "0614_drop_eventuser_table_part_2"),
("sentry", "0615_add_dashboard_widget_query_on_demand_table"),
("sentry", "0616_drop_event_user_id_from_userreport_table_step_1"),
("sentry", "0617_monitor_boolean_fields_muted_disabled"),
("sentry", "0618_drop_event_user_id_from_userreport_table_step_2"),
("sentry", "0619_monitors_migrate_is_muted"),
("sentry", "0620_add_has_new_feedbacks_flag"),
("sentry", "0621_set_muted_monitors_to_active"),
("sentry", "0622_add_has_custom_metrics_flag"),
("sentry", "0623_increase_regression_fingerprint_length"),
("sentry", "0624_add_is_muted_monitorenvironment"),
("sentry", "0625_change_rule_label_type_to_char256"),
("sentry", "0626_add_member_project_creation_bitfield"),
("sentry", "0627_change_to_sentry_slug"),
("sentry", "0628_better_menv_latest_index"),
("sentry", "0629_eventattachment_index"),
("sentry", "0630_better_monitor_latest_index"),
("sentry", "0631_add_priority_columns_to_groupedmessage"),
("sentry", "0632_apitoken_backfill_last_chars"),
("sentry", "0633_add_priority_locked_at_to_groupedmessage"),
("sentry", "0634_backfill_github_webhook_outbox_shard_ids"),
("sentry", "0635_groupenvironment_index"),
("sentry", "0636_monitor_incident_env_resolving_index"),
("sentry", "0637_remove_pr_comment_pr_id_constraint"),
("sentry", "0638_add_date_added_to_dashboard_widget_on_demand"),
("sentry", "0639_add_spec_version_to_dashboard_on_demand"),
("sentry", "0640_index_together"),
("sentry", "0641_backfill_group_attributes"),
("sentry", "0642_index_together_release"),
("sentry", "0643_add_date_modified_col_dashboard_widget_query"),
("sentry", "0644_backfill_priority_for_groups"),
("sentry", "0645_backfill_add_uuid_to_all_rule_actions"),
("sentry", "0646_create_notification_message_table"),
("sentry", "0647_apitoken_add_hashed_columns"),
("sentry", "0648_monitor_env_break_env_fk"),
("sentry", "0649_add_index_for_group_priority"),
("sentry", "0650_create_sentryshot"),
("sentry", "0651_enable_activated_alert_rules"),
("sentry", "0652_alert_rule_activation_condition"),
("sentry", "0653_apitoken_add_token_type"),
("sentry", "0654_rename_priority_sort_to_trends"),
("sentry", "0655_apitoken_increase_token_length"),
("sentry", "0656_add_discover_dataset_split_dashboard"),
("sentry", "0657_add_status_column_for_alert_rule_trigger_action"),
("sentry", "0658_projectkey_usecase"),
("sentry", "0659_artifactbundleindex_cleanup"),
("sentry", "0660_fix_cron_monitor_invalid_orgs"),
("sentry", "0661_artifactbundleindex_cleanup_step2"),
("sentry", "0662_monitor_drop_last_state_change"),
("sentry", "0663_artifactbundleindex_cleanup_step3"),
("sentry", "0664_create_new_broken_monitor_detection_table"),
("sentry", "0665_monitor_drop_last_state_change_db"),
("sentry", "0666_monitor_incident_default_grouphash"),
("sentry", "0667_drop_django_team_org_role_column"),
("sentry", "0668_add_active_monitor_incident_index"),
("sentry", "0669_alert_rule_activation"),
("sentry", "0670_monitor_incident_cleanup_duplicates"),
("sentry", "0671_enforce_unqiue_active_incidents"),
("sentry", "0672_backfill_ukraine_timezone_name"),
("sentry", "0673_add_env_muted_to_broken_detection"),
("sentry", "0674_monitor_clear_missed_timeout_as_error"),
("sentry", "0675_dashboard_widget_query_rename_priority_sort_to_trends"),
("sentry", "0676_apitoken_hashed_indexes"),
("sentry", "0677_unpickle_project_options_again"),
("sentry", "0678_add_is_hidden_dashboard_widget_query"),
("sentry", "0679_add_query_sub_fk_to_aar_activations"),
("sentry", "0680_unpickle_options_again"),
("sentry", "0681_unpickle_authenticator_again"),
("sentry", "0682_monitors_constrain_to_project_id_slug"),
("sentry", "0683_reprocessing_datetime_indexes"),
("sentry", "0684_monitor_check_in_config_nullable"),
("sentry", "0685_alert_rule_conditons_rename_singular"),
("sentry", "0686_remove_config_from_checkin_state_operation"),
("sentry", "0687_alert_rule_project_backfill_migration"),
("sentry", "0688_add_project_flag_high_priority_alerts"),
("sentry", "0689_drop_config_from_cron_checkin"),
("sentry", "0690_remove_project_team_avatar"),
("sentry", "0691_remove_project_team_avatar_model"),
("sentry", "0692_backfill_group_priority_again"),
("sentry", "0693_add_monitors_ownership_actor_id"),
("sentry", "0694_db_index_alert_rule_actions"),
("sentry", "0695_add_monitors_ownership_owner_user_id_team_id"),
("sentry", "0696_remove_monitor_owner_actor_id"),
("sentry", "0697_remove_monitor_owner_actor_id_db"),
("sentry", "0698_remove_file_id_from_control_avatars"),
("sentry", "0699_update_monitor_owner_team_id_cascsade"),
("sentry", "0700_drop_fileid_controlavatar"),
("sentry", "0701_backfill_alertrule_user_team"),
("sentry", "0702_alert_rule_project_backfill_migration_2"),
("sentry", "0703_add_team_user_to_rule"),
("sentry", "0704_backfill_rule_user_team"),
("sentry", "0705_grouphistory_add_userteam"),
("sentry", "0706_grouphistory_userteam_backfill"),
("sentry", "0707_alert_rule_activations_incidents_fk"),
("sentry", "0708_rule_remove_owner_state"),
("sentry", "0709_alertrule_remove_owner_state"),
("sentry", "0710_grouphistory_remove_actor_state"),
("sentry", "0711_backfill_group_attributes_to_self_hosted"),
("sentry", "0712_create_tombstone_compound_indexes"),
("sentry", "0713_team_remove_actor_state"),
("sentry", "0714_drop_project_team_avatar"),
("sentry", "0715_remove_actormodel_constraints"),
("sentry", "0716_remove_actormodel"),
("sentry", "0717_query_subscription_timebox"),
("sentry", "0718_delete_timebox_columns"),
("sentry", "0719_querysubscription_timebox_column_deletion_db"),
("sentry", "0720_remove_actor_columns"),
("sentry", "0721_delete_sentryfunctions"),
("sentry", "0722_drop_sentryfunctions"),
("sentry", "0723_project_template_models"),
("sentry", "0724_discover_saved_query_dataset"),
("sentry", "0725_create_sentry_groupsearchview_table"),
("sentry", "0726_apitoken_backfill_hashes"),
("sentry", "0727_add_description_alertrule"),
("sentry", "0728_incident_subscription_fk"),
("sentry", "0729_backfill_groupsearchviews_with_pinned_searches"),
("sentry", "0730_add_subscription_fk_to_incident"),
("sentry", "0731_add_insight_project_flags"),
("sentry", "0732_add_span_attribute_extraction_rules"),
("sentry", "0733_relocation_provenance"),
("sentry", "0734_rm_reprocessing_step1"),
("sentry", "0735_sunset_appstore_connect_integration"),
("sentry", "0736_rm_reprocessing_step2"),
("sentry", "0737_add_discover_saved_query_dataset_source"),
("sentry", "0738_rm_reprocessing_step3"),
("sentry", "0739_backfill_group_info_to_group_attributes"),
("sentry", "0740_one_relocation_file_kind_per_relocation"),
("sentry", "0741_metric_alert_anomaly_detection"),
("sentry", "0742_backfill_alertrule_detection_type"),
("sentry", "0743_backfill_broken_monitor_notification_setting_option"),
("sentry", "0744_add_dataset_source_field_to_dashboards"),
("sentry", "0745_add_prevent_superuser_access_bitflag"),
("sentry", "0746_add_bitflags_to_hybrid_cloud"),
("sentry", "0747_create_datasecrecywaiver_table"),
("sentry", "0748_create_grouphashmetadata_table"),
("sentry", "0749_disable_member_invite"),
("sentry", "0750_disable_member_invite_in_hybrid_cloud"),
("sentry", "0751_grouphashmetadata_use_one_to_one_field_for_grouphash"),
("sentry", "0752_fix_substatus_for_unresolved_groups"),
("sentry", "0753_fix_substatus_for_ignored_groups"),
("sentry", "0754_extend_broadcast_model"),
("sentry", "0755_remove_fk_constraints_spanattributeextraction"),
("sentry", "0756_grouprelease_represented_in_django"),
("sentry", "0757_add_scopes_to_apiapplication"),
("sentry", "0758_remove_spanattributeextraction_models"),
("sentry", "0759_remove_spanattributeextraction_tables"),
("sentry", "0760_remove_appstore_connect_integration_tables"),
("sentry", "0761_add_substatus_constraint_to_groups"),
("sentry", "0762_drop_substatus_constraint_to_groups"),
("sentry", "0763_add_created_by_to_broadcasts"),
("sentry", "0764_migrate_bad_status_substatus_rows"),
("sentry", "0765_add_org_to_api_auth"),
("sentry", "0766_fix_substatus_for_pending_merge"),
("sentry", "0767_add_selected_aggregate_to_dashboards_widget_query"),
("sentry", "0768_fix_old_group_first_seen_dates"),
("sentry", "0769_add_seer_fields_to_grouphash_metadata"),
("sentry", "0770_increase_project_slug_max_length"),
("sentry", "0771_add_grouping_config_to_grouphash_metadata"),
("sentry", "0772_backfill_grouphash_metadata_grouping_config"),
("sentry", "0773_make_group_score_nullable"),
("sentry", "0774_drop_group_score_in_state_only"),
("sentry", "0775_add_dashboard_permissions_model"),
("sentry", "0776_drop_group_score_in_database"),
("sentry", "0777_add_related_name_to_dashboard_permissions"),
("sentry", "0778_userreport_comments_max_length"),
("sentry", "0779_remove_groups_from_group_inbox"),
("sentry", "0780_create_sentry_rollback_models"),
("sentry", "0781_add_hash_basis_to_grouphash_metadata"),
("sentry", "0782_align_deletedproject_slug_length"),
("sentry", "0783_remove_release_project_id"),
("sentry", "0784_remove_broadcasts_cta_column"),
("sentry", "0785_add_new_field_to_dashboard_permissions"),
("sentry", "0786_drop_broadcasts_cta_column"),
("sentry", "0787_make_dashboard_perms_col_nullable"),
("sentry", "0788_remove__dashboard_perms_col"),
("sentry", "0789_add_unique_constraint_to_rollbackorganization"),
("sentry", "0790_delete_dashboard_perms_col"),
("sentry", "0791_add_hashing_metadata_to_grouphash_metadata"),
("sentry", "0792_add_unique_index_apiauthorization"),
("sentry", "0793_remove_db_constraint_alert_rule_exclusion"),
("sentry", "0794_rm_excluded_included_projects_alertrule"),
("sentry", "0795_drop_included_excluded_projects"),
("sentry", "0796_rm_excluded_projects_triggers"),
("sentry", "0797_drop_excluded_project_triggers"),
("sentry", "0798_add_favorite_dashboard_col"),
("sentry", "0799_cron_incident_index"),
("sentry", "0800_rm_incidentseen_incidentsubscription"),
("sentry", "0801_drop_incidentseen_incidentsubscription"),
("sentry", "0802_remove_grouping_auto_update_option"),
("sentry", "0803_delete_unused_metricskeyindexer_pt1"),
("sentry", "0804_delete_metrics_key_indexer_pt2"),
("sentry", "0805_add_alert_and_member_invite_scopes_to_sentry_apps"),
("sentry", "0806_remove_monitor_attachment_id_pt1"),
("sentry", "0807_remove_monitor_attachment_id_pt2"),
("sentry", "0808_change_grouphash_metadata_seer_matched_grouphash_deletion_config"),
("sentry", "0809_delete_auth_provider_default_teams"),
("sentry", "0810_add_project_has_flag"),
("sentry", "0811_fully_delete_auth_provider_default_teams"),
("sentry", "0812_rm_activation_incident"),
("sentry", "0813_rm_alertruleactivation_models"),
("sentry", "0814_drop_alertactivations"),
("sentry", "0816_add_timestamp_to_group_tombstone"),
("sentry", "0818_enforce_check_in_environment_not_null"),
("sentry", "0819_alert_rule_snuba_query_non_null"),
("sentry", "0820_snuba_query_non_none"),
("sentry", "0821_create_groupsearchview_page_filter_columns"),
("sentry", "0822_alert_rule_always_organization"),
("sentry", "0823_projectcodeowners_raw_never_null"),
("sentry", "0824_projectcodeowners_schema_non_null"),
("sentry", "0825_remove_notificationmessage_unique_ag_constraint"),
("sentry", "0826_make_sentryapp_uuid_unique"),
("sentry", "0827_projectteam_non_null"),
("sentry", "0828_add_platform_to_grouphash_metadata"),
("sentry", "0829_add_additional_timestamps_to_checkins"),
("sentry", "0830_add_external_id_to_project"),
("sentry", "0831_add_index_external_id_organization_to_project"),
("sentry", "0832_make_grouphash_metadata_date_added_nullable"),
("sentry", "0833_add_relocationtransfer_models"),
("sentry", "0834_add_index_on_authidentity_last_synced"),
("sentry", "0835_add_schema_version_to_grouphash_metadata"),
("sentry", "0836_create_groupsearchviewstarred_table"),
("sentry", "0837_create_groupsearchviewlastseen_table"),
("sentry", "0838_backfill_groupsearchview_positions_to_gsvstarred"),
("sentry", "0839_add_visibility_column_to_groupsearchview"),
("sentry", "0840_savedsearch_type_non_null"),
("sentry", "0841_backfill_desynced_starred_views"),
("sentry", "0842_create_organization_member_invite_table"),
("sentry", "0843_make_groupsearchview_postition_nullable_for_deletion"),
("sentry", "0844_remove_project_cascade_in_create_first_project"),
("sentry", "0845_safe_drop_groupsearchviewposition"),
("sentry", "0846_hard_drop_groupsearchview_position"),
("sentry", "0847_remove_duplicate_and_unused_indexes"),
("sentry", "0848_crons_post_migration_constraints"),
("sentry", "0849_monitor_checkin_unknown"),
("sentry", "0850_crons_drop_location"),
("sentry", "0851_new_group_nullable"),
("sentry", "0852_delete_new_groups_column"),
("sentry", "0853_add_group_open_periods"),
("sentry", "0854_add_project_sdk_model"),
("sentry", "0855_give_monitor_type_a_db_default"),
("sentry", "0856_monitors_remove_type_column_state"),
("sentry", "0857_update_group_open_periods_constraint"),
("sentry", "0858_backfill_groupsearchviews_with_org_visibility"),
("sentry", "0859_monitors_remove_type_column_db"),
("sentry", "0860_add_new_groupopenperiod_constraint"),
("sentry", "0861_monitors_remove_location"),
("sentry", "0862_monitors_remove_location_db"),
("sentry", "0863_update_organization_member_invite_model"),
("sentry", "0864_move_monitors"),
("sentry", "0865_file_offsets"),
("sentry", "0866_grouptype_index"),
("sentry", "0867_fix_drift_default_to_db_default"),
("sentry", "0868_delete_group_open_periods"),
("sentry", "0869_fix_drift_db_default_pt2"),
("sentry", "0870_delete_non_member_views"),
("sentry", "0871_fix_some_drift"),
("sentry", "0872_fix_drift_deleted_columns"),
("sentry", "0873_update_groupsearchview_visibility_default"),
("sentry", "0874_positive_integer_drift"),
("sentry", "0875_integer_drift_group_1"),
("sentry", "0876_integer_drift_group_2"),
("sentry", "0877_integer_drift_release"),
("sentry", "0878_backfill_open_periods"),
("sentry", "0879_add_seer_fields_to_group"),
("sentry", "0880_orgauthtoken_bigint"),
("sentry", "0881_delete_single_prioritized_groupsearchviews"),
("sentry", "0882_projectoptions_idx_on_key"),
("sentry", "0883_delete_incident_snapshot_tables_pt1"),
("sentry", "0884_delete_incident_snapshot_tables_pt2"),
("sentry", "0885_remove_project_integrations_table"),
("sentry", "0886_django_arrayfield_scope_list"),
("sentry", "0887_environment_deleted_column"),
("sentry", "0888_groupopenperiod_gist_index_in_code"),
("sentry", "0889_remove_index_on_key_field"),
("sentry", "0890_remove_index_on_group_release_last_seen"),
("sentry", "0891_remove_project_integrations_table_physical"),
("sentry", "0892_sentry_app_json_field"),
("sentry", "0893_rulesnooze_added_with_timezone"),
("sentry", "0894_split_discover_dataset_saved_queries"),
("sentry", "0895_relocation_provenance_smallint"),
("sentry", "0896_org_level_access_not_null"),
("sentry", "0897_rm_extraneous_groupedmessage_index"),
("sentry", "0898_groupedmessage_wrong_int_type"),
("sentry", "0899_organization_slug_upper_idx"),
("sentry", "0900_group_link_group_id_no_index"),
("sentry", "0901_org_slug_wrong_index_name"),
("sentry", "0902_detection_type_match_size"),
("sentry", "0903_missing_indexes_in_state"),
("sentry", "0904_onboarding_task_project_id_idx"),
]
initial = True
checked = False # This is an initial migration and can take locks
dependencies = []
operations = [
migrations.CreateModel(
name="User",
fields=[
("password", models.CharField(max_length=128)),
("last_login", models.DateTimeField(blank=True, null=True)),
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("username", models.CharField(max_length=128, unique=True)),
("name", models.CharField(blank=True, db_column="first_name", max_length=200)),
("email", models.EmailField(blank=True, max_length=75)),
("is_staff", models.BooleanField(default=False)),
("is_active", models.BooleanField(default=True)),
("is_unclaimed", models.BooleanField(db_default=False, default=False)),
("is_superuser", models.BooleanField(default=False)),
("is_managed", models.BooleanField(default=False)),
("is_sentry_app", models.BooleanField(default=None, null=True)),
("is_password_expired", models.BooleanField(default=False)),
("last_password_change", models.DateTimeField(null=True)),
(
"flags",
bitfield.models.BitField(["newsletter_consent_prompt"], default=0, null=True),
),
("session_nonce", models.CharField(max_length=12, null=True)),
("date_joined", models.DateTimeField(default=django.utils.timezone.now)),
("last_active", models.DateTimeField(default=django.utils.timezone.now, null=True)),
("avatar_type", models.PositiveSmallIntegerField(db_default=0, default=0)),
(
"avatar_url",
models.CharField(db_default=None, default=None, max_length=120, null=True),
),
],
options={
"verbose_name": "user",
"verbose_name_plural": "users",
"db_table": "auth_user",
},
managers=[
("objects", sentry.users.models.user.UserManager(cache_fields=["pk"])),
],
),
migrations.CreateModel(
name="AlertRule",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"user_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=True, null=True, on_delete="SET_NULL"
),
),
("name", models.TextField()),
("status", models.SmallIntegerField(db_default=0, default=0)),
("threshold_type", models.SmallIntegerField(null=True)),
("resolve_threshold", models.FloatField(null=True)),
("threshold_period", models.IntegerField()),
("comparison_delta", models.IntegerField(null=True)),
("date_modified", models.DateTimeField(default=django.utils.timezone.now)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"monitor_type",
models.IntegerField(
db_default=sentry.incidents.models.alert_rule.AlertRuleMonitorTypeInt[
"CONTINUOUS"
],
default=sentry.incidents.models.alert_rule.AlertRuleMonitorTypeInt[
"CONTINUOUS"
],
),
),
("description", models.CharField(max_length=1000, null=True)),
(
"detection_type",
models.CharField(db_default="static", default="static", max_length=32),
),
("sensitivity", models.CharField(null=True)),
("seasonality", models.CharField(null=True)),
],
options={
"db_table": "sentry_alertrule",
"base_manager_name": "objects_with_snapshots",
"default_manager_name": "objects_with_snapshots",
},
managers=[
("objects_with_snapshots", django.db.models.manager.Manager()),
],
),
migrations.CreateModel(
name="ApiKey",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"scopes",
bitfield.models.BitField(
[
"project:read",
"project:write",
"project:admin",
"project:releases",
"team:read",
"team:write",
"team:admin",
"event:read",
"event:write",
"event:admin",
"org:read",
"org:write",
"org:admin",
"member:read",
"member:write",
"member:admin",
"org:integrations",
"alerts:read",
"alerts:write",
"member:invite",
],
default=None,
),
),
(
"scope_list",
django.contrib.postgres.fields.ArrayField(
base_field=models.TextField(), default=list, size=None
),
),
(
"organization_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.Organization", db_index=True, on_delete="CASCADE"
),
),
("label", models.CharField(blank=True, default="Default", max_length=64)),
("key", models.CharField(max_length=32, unique=True)),
(
"status",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
db_index=True, default=0
),
),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
("allowed_origins", models.TextField(blank=True, null=True)),
],
options={
"db_table": "sentry_apikey",
},
),
migrations.CreateModel(
name="ArtifactBundle",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"organization_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True),
),
(
"bundle_id",
models.UUIDField(db_index=True, default="00000000-00000000-00000000-00000000"),
),
("artifact_count", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
("indexing_state", models.IntegerField(default=None, null=True)),
(
"date_added",
models.DateTimeField(db_index=True, default=django.utils.timezone.now),
),
("date_uploaded", models.DateTimeField(default=django.utils.timezone.now)),
("date_last_modified", models.DateTimeField(null=True)),
],
options={
"db_table": "sentry_artifactbundle",
},
),
migrations.CreateModel(
name="AuthProvider",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"organization_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.Organization", db_index=True, on_delete="CASCADE", unique=True
),
),
("provider", models.CharField(max_length=128)),
("config", sentry.db.models.fields.jsonfield.JSONField(default=dict)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"sync_time",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True),
),
("last_sync", models.DateTimeField(null=True)),
(
"default_role",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(default=50),
),
("default_global_access", models.BooleanField(default=True)),
("flags", bitfield.models.BitField(["allow_unlinked", "scim_enabled"], default=0)),
],
options={
"db_table": "sentry_authprovider",
},
),
migrations.CreateModel(
name="ControlFile",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("name", models.TextField()),
("type", models.CharField(max_length=64)),
(
"timestamp",
models.DateTimeField(db_index=True, default=django.utils.timezone.now),
),
("headers", sentry.db.models.fields.jsonfield.JSONField(default=dict)),
("size", sentry.db.models.fields.bounded.WrappingU32IntegerField(null=True)),
("checksum", models.CharField(db_index=True, max_length=40, null=True)),
],
options={
"db_table": "sentry_controlfile",
},
bases=(models.Model, sentry.models.files.abstractfile._Parent),
),
migrations.CreateModel(
name="ControlFileBlob",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("path", models.TextField(null=True)),
("size", sentry.db.models.fields.bounded.WrappingU32IntegerField(null=True)),
("checksum", models.CharField(max_length=40, unique=True)),
(
"timestamp",
models.DateTimeField(db_index=True, default=django.utils.timezone.now),
),
],
options={
"db_table": "sentry_controlfileblob",
},
bases=(models.Model, sentry.models.files.abstractfileblob._Parent),
),
migrations.CreateModel(
name="ControlOption",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("key", models.CharField(max_length=128, unique=True)),
("last_updated", models.DateTimeField(default=django.utils.timezone.now)),
(
"last_updated_by",
models.CharField(db_default="unknown", default="unknown", max_length=16),
),
("value", sentry.db.models.fields.picklefield.PickledObjectField(editable=False)),
],
options={
"db_table": "sentry_controloption",
},
bases=(sentry.backup.mixins.OverwritableConfigMixin, models.Model),
),
migrations.CreateModel(
name="ControlRelocationTransfer",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_updated", models.DateTimeField(auto_now=True)),
("date_added", models.DateTimeField(auto_now_add=True)),
("relocation_uuid", sentry.db.models.fields.uuid.UUIDField(max_length=32)),
("org_slug", models.CharField()),
("requesting_region", models.CharField()),
("exporting_region", models.CharField()),
("state", models.CharField(default="request")),
(
"scheduled_for",
models.DateTimeField(default=django.utils.timezone.now, null=True),
),
("public_key", models.BinaryField(null=True)),
],
options={
"db_table": "sentry_controlrelocationtransfer",
},
),
migrations.CreateModel(
name="Dashboard",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("title", models.CharField(max_length=255)),
(
"created_by_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=True, on_delete="CASCADE"
),
),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"visits",
sentry.db.models.fields.bounded.BoundedBigIntegerField(default=1, null=True),
),
(
"last_visited",
models.DateTimeField(default=django.utils.timezone.now, null=True),
),
("filters", sentry.db.models.fields.jsonfield.JSONField(null=True)),
],
options={
"db_table": "sentry_dashboard",
},
),
migrations.CreateModel(
name="DeletedOrganization",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("actor_label", models.CharField(max_length=64, null=True)),
("actor_id", sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True)),
("actor_key", models.CharField(max_length=32, null=True)),
("ip_address", models.GenericIPAddressField(null=True, unpack_ipv4=True)),
("date_deleted", models.DateTimeField(default=django.utils.timezone.now)),
("date_created", models.DateTimeField(null=True)),
("reason", models.TextField(blank=True, null=True)),
("name", models.CharField(max_length=64, null=True)),
("slug", models.CharField(max_length=50, null=True)),
],
options={
"db_table": "sentry_deletedorganization",
},
),
migrations.CreateModel(
name="DeletedProject",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("actor_label", models.CharField(max_length=64, null=True)),
("actor_id", sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True)),
("actor_key", models.CharField(max_length=32, null=True)),
("ip_address", models.GenericIPAddressField(null=True, unpack_ipv4=True)),
("date_deleted", models.DateTimeField(default=django.utils.timezone.now)),
("date_created", models.DateTimeField(null=True)),
("reason", models.TextField(blank=True, null=True)),
("slug", models.CharField(max_length=100, null=True)),
("name", models.CharField(max_length=200, null=True)),
(
"organization_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True),
),
("organization_name", models.CharField(max_length=64, null=True)),
("organization_slug", models.CharField(max_length=50, null=True)),
("platform", models.CharField(max_length=64, null=True)),
],
options={
"db_table": "sentry_deletedproject",
},
),
migrations.CreateModel(
name="DeletedTeam",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("actor_label", models.CharField(max_length=64, null=True)),
("actor_id", sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True)),
("actor_key", models.CharField(max_length=32, null=True)),
("ip_address", models.GenericIPAddressField(null=True, unpack_ipv4=True)),
("date_deleted", models.DateTimeField(default=django.utils.timezone.now)),
("date_created", models.DateTimeField(null=True)),
("reason", models.TextField(blank=True, null=True)),
("name", models.CharField(max_length=64, null=True)),
("slug", models.CharField(max_length=50, null=True)),
(
"organization_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True),
),
("organization_name", models.CharField(max_length=64, null=True)),
("organization_slug", models.CharField(max_length=50, null=True)),
],
options={
"db_table": "sentry_deletedteam",
},
),
migrations.CreateModel(
name="DocIntegration",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("name", models.CharField(max_length=64)),
(
"slug",
sentry.db.models.fields.slug.SentrySlugField(
db_index=False, max_length=64, unique=True
),
),
("author", models.CharField(max_length=255)),
("description", models.TextField()),
("url", models.URLField()),
("popularity", models.PositiveSmallIntegerField(default=1, null=True)),
("is_draft", models.BooleanField(default=True)),
("metadata", sentry.db.models.fields.jsonfield.JSONField(null=True)),
],
options={
"db_table": "sentry_docintegration",
},
),
migrations.CreateModel(
name="Email",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("email", sentry.db.models.fields.citext.CIEmailField(max_length=75, unique=True)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
],
options={
"db_table": "sentry_email",
},
),
migrations.CreateModel(
name="Environment",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("organization_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
("name", models.CharField(max_length=64)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
],
options={
"db_table": "sentry_environment",
},
),
migrations.CreateModel(
name="ExportedData",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"user_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=True, null=True, on_delete="SET_NULL"
),
),
(
"file_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(
db_index=True, null=True
),
),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
("date_finished", models.DateTimeField(null=True)),
("date_expired", models.DateTimeField(db_index=True, null=True)),
("query_type", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
("query_info", sentry.db.models.fields.jsonfield.JSONField(default=dict)),
],
options={
"db_table": "sentry_exporteddata",
},
),
migrations.CreateModel(
name="File",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("name", models.TextField()),
("type", models.CharField(max_length=64)),
(
"timestamp",
models.DateTimeField(db_index=True, default=django.utils.timezone.now),
),
("headers", sentry.db.models.fields.jsonfield.JSONField(default=dict)),
("size", sentry.db.models.fields.bounded.WrappingU32IntegerField(null=True)),
("checksum", models.CharField(db_index=True, max_length=40, null=True)),
("path", models.TextField(null=True)),
],
options={
"db_table": "sentry_file",
},
bases=(models.Model, sentry.models.files.abstractfile._Parent),
),
migrations.CreateModel(
name="FileBlob",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("path", models.TextField(null=True)),
("size", sentry.db.models.fields.bounded.WrappingU32IntegerField(null=True)),
("checksum", models.CharField(max_length=40, unique=True)),
(
"timestamp",
models.DateTimeField(db_index=True, default=django.utils.timezone.now),
),
],
options={
"db_table": "sentry_fileblob",
},
bases=(models.Model, sentry.models.files.abstractfileblob._Parent),
),
migrations.CreateModel(
name="Group",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("logger", models.CharField(blank=True, db_index=True, default="", max_length=64)),
(
"level",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
blank=True, db_index=True, default=40
),
),
("message", models.TextField()),
(
"culprit",
models.CharField(blank=True, db_column="view", max_length=200, null=True),
),
(
"num_comments",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
default=0, null=True
),
),
("platform", models.CharField(max_length=64, null=True)),
(
"status",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
db_index=True, default=0
),
),
("substatus", sentry.db.models.fields.bounded.BoundedIntegerField(null=True)),
(
"times_seen",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
db_index=True, default=1
),
),
(
"last_seen",
models.DateTimeField(db_index=True, default=django.utils.timezone.now),
),
(
"first_seen",
models.DateTimeField(db_index=True, default=django.utils.timezone.now),
),
("resolved_at", models.DateTimeField(db_index=True, null=True)),
("active_at", models.DateTimeField(db_index=True, null=True)),
(
"time_spent_total",
sentry.db.models.fields.bounded.BoundedIntegerField(default=0),
),
(
"time_spent_count",
sentry.db.models.fields.bounded.BoundedIntegerField(default=0),
),
("is_public", models.BooleanField(default=False, null=True)),
(
"data",
sentry.db.models.fields.gzippeddict.GzippedDictField(blank=True, null=True),
),
("short_id", sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True)),
(
"type",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
db_default=1, db_index=True, default=1
),
),
("priority", models.PositiveIntegerField(null=True)),
("priority_locked_at", models.DateTimeField(null=True)),
("seer_fixability_score", models.FloatField(null=True)),
("seer_autofix_last_triggered", models.DateTimeField(null=True)),
],
options={
"verbose_name": "grouped message",
"verbose_name_plural": "grouped messages",
"db_table": "sentry_groupedmessage",
"permissions": (("can_view", "Can view"),),
},
),
migrations.CreateModel(
name="GroupSearchView",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
("name", models.TextField(max_length=128)),
(
"user_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=True, on_delete="CASCADE"
),
),
("visibility", models.CharField(db_default="organization", max_length=16)),
("query", models.TextField()),
(
"query_sort",
models.CharField(
default=sentry.models.savedsearch.SortOptions["DATE"], max_length=16
),
),
("is_all_projects", models.BooleanField(db_default=False)),
("environments", sentry.db.models.fields.array.ArrayField(default=list, null=True)),
("time_filters", models.JSONField(db_default={"period": "14d"})),
],
options={
"db_table": "sentry_groupsearchview",
},
),
migrations.CreateModel(
name="Option",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("key", models.CharField(max_length=128, unique=True)),
("last_updated", models.DateTimeField(default=django.utils.timezone.now)),
(
"last_updated_by",
models.CharField(db_default="unknown", default="unknown", max_length=16),
),
("value", sentry.db.models.fields.picklefield.PickledObjectField(editable=False)),
],
options={
"db_table": "sentry_option",
},
bases=(sentry.backup.mixins.OverwritableConfigMixin, models.Model),
),
migrations.CreateModel(
name="OrganizationMapping",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"organization_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(
db_index=True, unique=True
),
),
("slug", models.SlugField(unique=True)),
("name", models.CharField(max_length=64)),
("date_created", models.DateTimeField(default=django.utils.timezone.now)),
("customer_id", models.CharField(db_index=True, max_length=255, null=True)),
("verified", models.BooleanField(default=False)),
("idempotency_key", models.CharField(max_length=48)),
("region_name", models.CharField(max_length=48)),
("status", sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True)),
("allow_joinleave", models.BooleanField(db_default=False, default=False)),
("enhanced_privacy", models.BooleanField(db_default=False, default=False)),
("require_2fa", models.BooleanField(db_default=False, default=False)),
("early_adopter", models.BooleanField(db_default=False, default=False)),
("disable_shared_issues", models.BooleanField(db_default=False, default=False)),
(
"disable_new_visibility_features",
models.BooleanField(db_default=False, default=False),
),
(
"require_email_verification",
models.BooleanField(db_default=False, default=False),
),
("codecov_access", models.BooleanField(db_default=False, default=False)),
(
"disable_member_project_creation",
models.BooleanField(db_default=False, default=False),
),
("prevent_superuser_access", models.BooleanField(db_default=False, default=False)),
("disable_member_invite", models.BooleanField(db_default=False, default=False)),
],
options={
"db_table": "sentry_organizationmapping",
},
),
migrations.CreateModel(
name="RegionRelocationTransfer",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_updated", models.DateTimeField(auto_now=True)),
("date_added", models.DateTimeField(auto_now_add=True)),
("relocation_uuid", sentry.db.models.fields.uuid.UUIDField(max_length=32)),
("org_slug", models.CharField()),
("requesting_region", models.CharField()),
("exporting_region", models.CharField()),
("state", models.CharField(default="request")),
(
"scheduled_for",
models.DateTimeField(default=django.utils.timezone.now, null=True),
),
],
options={
"db_table": "sentry_regionrelocationtransfer",
},
),
migrations.CreateModel(
name="Relay",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("relay_id", models.CharField(max_length=64, unique=True)),
("public_key", models.CharField(max_length=200)),
("first_seen", models.DateTimeField(default=None, null=True)),
("last_seen", models.DateTimeField(default=None, null=True)),
("is_internal", models.BooleanField(default=None, null=True)),
],
options={
"db_table": "sentry_relay",
},
bases=(sentry.backup.mixins.OverwritableConfigMixin, models.Model),
),
migrations.CreateModel(
name="SentryShot",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("uuid", models.UUIDField(db_index=True, default=uuid.uuid4, editable=False)),
("sentry_url", models.URLField()),
("component_identifier", models.CharField()),
(
"organization_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.Organization", db_index=True, on_delete="CASCADE"
),
),
(
"date_added",
models.DateTimeField(db_index=True, default=django.utils.timezone.now),
),
],
options={
"db_table": "sentry_sentryshot",
},
),
migrations.CreateModel(
name="ServiceHook",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("guid", models.CharField(max_length=32, null=True, unique=True)),
(
"application_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.ApiApplication", db_index=True, null=True, on_delete="CASCADE"
),
),
("actor_id", sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True)),
(
"installation_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.SentryAppInstallation",
db_index=True,
null=True,
on_delete="CASCADE",
),
),
(
"project_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(
db_index=True, null=True
),
),
(
"organization_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(
db_index=True, null=True
),
),
("url", models.URLField(max_length=512)),
(
"secret",
models.TextField(default=sentry.sentry_apps.models.servicehook.generate_secret),
),
("events", sentry.db.models.fields.array.ArrayField(null=True)),
(
"status",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
db_index=True, default=0
),
),
("version", sentry.db.models.fields.bounded.BoundedPositiveIntegerField(default=0)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
],
options={
"db_table": "sentry_servicehook",
},
),
migrations.CreateModel(
name="UserRole",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
("name", models.CharField(max_length=32, unique=True)),
("permissions", sentry.db.models.fields.array.ArrayField(null=True)),
],
options={
"db_table": "sentry_userrole",
},
bases=(sentry.backup.mixins.OverwritableConfigMixin, models.Model),
),
migrations.CreateModel(
name="AlertRuleActivity",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"user_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=True, null=True, on_delete="SET_NULL"
),
),
("type", models.IntegerField()),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"alert_rule",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.alertrule"
),
),
(
"previous_alert_rule",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="previous_alert_rule",
to="sentry.alertrule",
),
),
],
options={
"db_table": "sentry_alertruleactivity",
},
),
migrations.CreateModel(
name="AlertRuleTrigger",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("label", models.TextField()),
("threshold_type", models.SmallIntegerField(null=True)),
("alert_threshold", models.FloatField()),
("resolve_threshold", models.FloatField(null=True)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"alert_rule",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.alertrule"
),
),
],
options={
"db_table": "sentry_alertruletrigger",
},
),
migrations.CreateModel(
name="AlertRuleTriggerAction",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"integration_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.Integration",
blank=True,
db_index=True,
null=True,
on_delete="CASCADE",
),
),
(
"sentry_app_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.SentryApp",
blank=True,
db_index=True,
null=True,
on_delete="CASCADE",
),
),
("type", models.SmallIntegerField()),
("target_type", models.SmallIntegerField()),
("target_identifier", models.TextField(null=True)),
("target_display", models.TextField(null=True)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
("sentry_app_config", sentry.db.models.fields.jsonfield.JSONField(null=True)),
(
"status",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
db_default=0, default=0
),
),
(
"alert_rule_trigger",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.alertruletrigger"
),
),
],
options={
"db_table": "sentry_alertruletriggeraction",
},
),
migrations.CreateModel(
name="ApiApplication",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"client_id",
models.CharField(
default=sentry.models.apiapplication.generate_token,
max_length=64,
unique=True,
),
),
(
"client_secret",
models.TextField(default=sentry.models.apiapplication.generate_token),
),
(
"name",
models.CharField(
blank=True,
default=sentry.models.apiapplication.generate_name,
max_length=64,
),
),
(
"status",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
db_index=True, default=0
),
),
("allowed_origins", models.TextField(blank=True, null=True)),
("redirect_uris", models.TextField()),
("homepage_url", models.URLField(null=True)),
("privacy_url", models.URLField(null=True)),
("terms_url", models.URLField(null=True)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
("scopes", sentry.db.models.fields.array.ArrayField(null=True)),
("requires_org_level_access", models.BooleanField(db_default=False, default=False)),
(
"owner",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"db_table": "sentry_apiapplication",
},
),
migrations.CreateModel(
name="ApiGrant",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"code",
models.CharField(
db_index=True, default=sentry.models.apigrant.generate_code, max_length=64
),
),
(
"expires_at",
models.DateTimeField(
db_index=True, default=sentry.models.apigrant.default_expiration
),
),
("redirect_uri", models.CharField(max_length=255)),
(
"scopes",
bitfield.models.BitField(
[
"project:read",
"project:write",
"project:admin",
"project:releases",
"team:read",
"team:write",
"team:admin",
"event:read",
"event:write",
"event:admin",
"org:read",
"org:write",
"org:admin",
"member:read",
"member:write",
"member:admin",
"openid",
"profile",
"email",
],
default=None,
),
),
(
"scope_list",
django.contrib.postgres.fields.ArrayField(
base_field=models.TextField(), default=list, size=None
),
),
(
"organization_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.Organization", db_index=True, null=True, on_delete="CASCADE"
),
),
(
"application",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.apiapplication"
),
),
(
"user",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
options={
"db_table": "sentry_apigrant",
},
),
migrations.CreateModel(
name="ApiToken",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"scopes",
bitfield.models.BitField(
[
"project:read",
"project:write",
"project:admin",
"project:releases",
"team:read",
"team:write",
"team:admin",
"event:read",
"event:write",
"event:admin",
"org:read",
"org:write",
"org:admin",
"member:read",
"member:write",
"member:admin",
"org:integrations",
"alerts:read",
"alerts:write",
"member:invite",
],
default=None,
),
),
(
"scope_list",
django.contrib.postgres.fields.ArrayField(
base_field=models.TextField(), default=list, size=None
),
),
(
"scoping_organization_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.Organization", db_index=True, null=True, on_delete="CASCADE"
),
),
("name", models.CharField(max_length=255, null=True)),
(
"token",
models.CharField(
default=sentry.models.apitoken.generate_token, max_length=71, unique=True
),
),
("hashed_token", models.CharField(max_length=128, null=True, unique=True)),
("token_type", models.CharField(max_length=7, null=True)),
("token_last_characters", models.CharField(max_length=4, null=True)),
(
"refresh_token",
models.CharField(
default=sentry.models.apitoken.generate_token,
max_length=71,
null=True,
unique=True,
),
),
("hashed_refresh_token", models.CharField(max_length=128, null=True, unique=True)),
(
"expires_at",
models.DateTimeField(
default=sentry.models.apitoken.default_expiration, null=True
),
),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"application",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.apiapplication",
),
),
(
"user",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
options={
"db_table": "sentry_apitoken",
},
),
migrations.CreateModel(
name="AuthIdentityReplica",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"auth_identity_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.AuthIdentity", db_index=True, on_delete="CASCADE", unique=True
),
),
(
"user_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=True, on_delete="CASCADE"
),
),
(
"auth_provider_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.AuthProvider", db_index=True, on_delete="CASCADE"
),
),
("ident", models.CharField(max_length=128)),
("data", sentry.db.models.fields.jsonfield.JSONField(default=dict)),
(
"last_verified",
models.DateTimeField(
db_default=django.db.models.functions.datetime.Now(),
default=django.utils.timezone.now,
),
),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
],
options={
"db_table": "sentry_authidentityreplica",
"unique_together": {("auth_provider_id", "ident"), ("auth_provider_id", "user_id")},
},
),
migrations.CreateModel(
name="Broadcast",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("upstream_id", models.CharField(blank=True, max_length=32, null=True)),
("title", models.CharField(max_length=64)),
("message", models.CharField(max_length=256)),
("link", models.URLField(blank=True, null=True)),
("is_active", models.BooleanField(db_index=True, default=True)),
(
"date_expires",
models.DateTimeField(
blank=True, default=sentry.models.broadcast.default_expiration, null=True
),
),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
("media_url", models.URLField(blank=True, null=True)),
("category", models.CharField(blank=True, max_length=32, null=True)),
(
"created_by_id",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"db_table": "sentry_broadcast",
},
),
migrations.CreateModel(
name="CommitAuthor",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"organization_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True),
),
("name", models.CharField(max_length=128, null=True)),
("email", models.CharField(max_length=200)),
("external_id", models.CharField(max_length=164, null=True)),
],
options={
"db_table": "sentry_commitauthor",
"unique_together": {
("organization_id", "email"),
("organization_id", "external_id"),
},
},
),
migrations.CreateModel(
name="Commit",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"organization_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True),
),
("repository_id", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
("key", models.CharField(max_length=64)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
("message", models.TextField(null=True)),
(
"author",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.commitauthor",
),
),
],
options={
"db_table": "sentry_commit",
},
),
migrations.CreateModel(
name="CommitFileChange",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"organization_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True),
),
("filename", models.TextField()),
("type", models.CharField(max_length=1)),
(
"commit",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.commit"
),
),
],
options={
"db_table": "sentry_commitfilechange",
},
),
migrations.CreateModel(
name="ControlFileBlobIndex",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("offset", sentry.db.models.fields.bounded.WrappingU32IntegerField()),
(
"blob",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.PROTECT, to="sentry.controlfileblob"
),
),
(
"file",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.controlfile"
),
),
],
options={
"db_table": "sentry_controlfileblobindex",
},
),
migrations.AddField(
model_name="controlfile",
name="blobs",
field=models.ManyToManyField(
through="sentry.ControlFileBlobIndex", to="sentry.controlfileblob"
),
),
migrations.CreateModel(
name="ControlFileBlobOwner",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"organization_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True),
),
(
"blob",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.controlfileblob"
),
),
],
options={
"db_table": "sentry_controlfileblobowner",
},
),
migrations.CreateModel(
name="ControlImportChunk",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
(
"import_uuid",
sentry.db.models.fields.uuid.UUIDField(db_index=True, max_length=32),
),
("model", models.CharField(db_index=True, max_length=64)),
("min_ordinal", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
("max_ordinal", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
("min_source_pk", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
("max_source_pk", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
(
"min_inserted_pk",
sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True),
),
(
"max_inserted_pk",
sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True),
),
("inserted_map", models.JSONField(default=dict)),
("existing_map", models.JSONField(default=dict)),
("overwrite_map", models.JSONField(default=dict)),
("inserted_identifiers", models.JSONField(default=dict)),
],
options={
"db_table": "sentry_controlimportchunk",
"unique_together": {("import_uuid", "model", "min_ordinal")},
},
),
migrations.CreateModel(
name="ControlImportChunkReplica",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
(
"import_uuid",
sentry.db.models.fields.uuid.UUIDField(db_index=True, max_length=32),
),
("model", models.CharField(db_index=True, max_length=64)),
("min_ordinal", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
("max_ordinal", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
("min_source_pk", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
("max_source_pk", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
(
"min_inserted_pk",
sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True),
),
(
"max_inserted_pk",
sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True),
),
("inserted_map", models.JSONField(default=dict)),
("existing_map", models.JSONField(default=dict)),
("overwrite_map", models.JSONField(default=dict)),
("inserted_identifiers", models.JSONField(default=dict)),
],
options={
"db_table": "sentry_controlimportchunkreplica",
"unique_together": {("import_uuid", "model", "min_ordinal")},
},
),
migrations.CreateModel(
name="ControlOutbox",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("shard_scope", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
("shard_identifier", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
("category", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
("object_identifier", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
("payload", sentry.db.models.fields.jsonfield.JSONField(null=True)),
("scheduled_from", models.DateTimeField(default=django.utils.timezone.now)),
(
"scheduled_for",
models.DateTimeField(
default=datetime.datetime(2016, 8, 1, 0, 0, tzinfo=datetime.UTC)
),
),
(
"date_added",
models.DateTimeField(
db_default=django.db.models.functions.datetime.Now(),
default=django.utils.timezone.now,
editable=False,
),
),
("region_name", models.CharField(max_length=48)),
],
options={
"db_table": "sentry_controloutbox",
"indexes": [
models.Index(
fields=[
"region_name",
"shard_scope",
"shard_identifier",
"category",
"object_identifier",
],
name="sentry_cont_region__1c1c72_idx",
),
models.Index(
fields=["region_name", "shard_scope", "shard_identifier", "scheduled_for"],
name="sentry_cont_region__0c4512_idx",
),
models.Index(
fields=["region_name", "shard_scope", "shard_identifier", "id"],
name="sentry_cont_region__a95d82_idx",
),
],
},
),
migrations.CreateModel(
name="ControlTombstone",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("table_name", models.CharField(max_length=48)),
("object_identifier", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
("created_at", models.DateTimeField(default=django.utils.timezone.now)),
],
options={
"db_table": "sentry_controltombstone",
"indexes": [
models.Index(
fields=["table_name", "object_identifier"],
name="sentry_cont_table_n_940c6d_idx",
)
],
},
),
migrations.CreateModel(
name="DashboardFavoriteUser",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_updated", models.DateTimeField(auto_now=True)),
("date_added", models.DateTimeField(auto_now_add=True)),
(
"user_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=True, on_delete="CASCADE"
),
),
(
"dashboard",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.dashboard"
),
),
],
options={
"db_table": "sentry_dashboardfavoriteuser",
},
),
migrations.CreateModel(
name="DashboardPermissions",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("is_editable_by_everyone", models.BooleanField(db_default=True, default=True)),
(
"dashboard",
models.OneToOneField(
on_delete=django.db.models.deletion.CASCADE,
related_name="permissions",
to="sentry.dashboard",
),
),
],
options={
"db_table": "sentry_dashboardpermissions",
},
),
migrations.CreateModel(
name="DashboardWidget",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("order", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
("title", models.CharField(max_length=255)),
("description", models.CharField(max_length=255, null=True)),
("thresholds", sentry.db.models.fields.jsonfield.JSONField(null=True)),
("interval", models.CharField(max_length=10, null=True)),
("display_type", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"widget_type",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True),
),
("limit", models.IntegerField(null=True)),
("detail", sentry.db.models.fields.jsonfield.JSONField(null=True)),
(
"discover_widget_split",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True),
),
(
"dataset_source",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
db_default=0, default=0
),
),
(
"dashboard",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.dashboard"
),
),
],
options={
"db_table": "sentry_dashboardwidget",
},
),
migrations.CreateModel(
name="DashboardWidgetQuery",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("name", models.CharField(max_length=255)),
("fields", sentry.db.models.fields.array.ArrayField(null=True)),
("conditions", models.TextField()),
(
"aggregates",
django.contrib.postgres.fields.ArrayField(
base_field=models.TextField(), null=True, size=None
),
),
(
"columns",
django.contrib.postgres.fields.ArrayField(
base_field=models.TextField(), null=True, size=None
),
),
(
"field_aliases",
django.contrib.postgres.fields.ArrayField(
base_field=models.TextField(), null=True, size=None
),
),
("orderby", models.TextField(default="")),
("order", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"date_modified",
models.DateTimeField(
db_default=django.db.models.functions.datetime.Now(),
default=django.utils.timezone.now,
),
),
("is_hidden", models.BooleanField(db_default=False, default=False)),
("selected_aggregate", models.IntegerField(null=True)),
(
"widget",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.dashboardwidget"
),
),
],
options={
"db_table": "sentry_dashboardwidgetquery",
},
),
migrations.CreateModel(
name="DashboardWidgetQueryOnDemand",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("spec_hashes", sentry.db.models.fields.array.ArrayField(null=True)),
("spec_version", models.IntegerField(null=True)),
("extraction_state", models.CharField(max_length=30)),
("date_modified", models.DateTimeField(default=django.utils.timezone.now)),
(
"date_added",
models.DateTimeField(
db_default=django.db.models.functions.datetime.Now(),
default=django.utils.timezone.now,
),
),
(
"dashboard_widget_query",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="sentry.dashboardwidgetquery",
),
),
],
options={
"db_table": "sentry_dashboardwidgetqueryondemand",
},
),
migrations.CreateModel(
name="DebugIdArtifactBundle",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"organization_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True),
),
("debug_id", models.UUIDField()),
("source_file_type", models.IntegerField()),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"artifact_bundle",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.artifactbundle"
),
),
],
options={
"db_table": "sentry_debugidartifactbundle",
},
),
migrations.CreateModel(
name="DocIntegrationAvatar",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("ident", models.CharField(db_index=True, max_length=32, unique=True)),
(
"control_file_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True, unique=True),
),
("avatar_type", models.PositiveSmallIntegerField(db_default=0, default=0)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"doc_integration",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="avatar",
to="sentry.docintegration",
),
),
],
options={
"db_table": "sentry_docintegrationavatar",
},
),
migrations.CreateModel(
name="EventAttachment",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("project_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
(
"group_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(
db_index=True, null=True
),
),
("event_id", models.CharField(db_index=True, max_length=32)),
("type", models.CharField(db_index=True, max_length=64)),
("name", models.TextField()),
("content_type", models.TextField(null=True)),
("size", sentry.db.models.fields.bounded.BoundedIntegerField(null=True)),
("sha1", models.CharField(max_length=40, null=True)),
(
"date_added",
models.DateTimeField(db_index=True, default=django.utils.timezone.now),
),
(
"file_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(
db_index=True, null=True
),
),
("blob_path", models.TextField(null=True)),
],
options={
"db_table": "sentry_eventattachment",
"indexes": [
models.Index(
fields=["project_id", "date_added"], name="sentry_even_project_62b83b_idx"
),
models.Index(
fields=["project_id", "event_id"], name="sentry_even_project_974f7b_idx"
),
],
},
),
migrations.CreateModel(
name="ExportedDataBlob",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("blob_id", sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True)),
("offset", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
(
"data_export",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.exporteddata"
),
),
],
options={
"db_table": "sentry_exporteddatablob",
},
),
migrations.AddField(
model_name="artifactbundle",
name="file",
field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.file"
),
),
migrations.AddField(
model_name="file",
name="blob",
field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="legacy_blob",
to="sentry.fileblob",
),
),
migrations.CreateModel(
name="FileBlobIndex",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("offset", sentry.db.models.fields.bounded.WrappingU32IntegerField()),
(
"blob",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.PROTECT, to="sentry.fileblob"
),
),
(
"file",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.file"
),
),
],
options={
"db_table": "sentry_fileblobindex",
},
),
migrations.AddField(
model_name="file",
name="blobs",
field=models.ManyToManyField(through="sentry.FileBlobIndex", to="sentry.fileblob"),
),
migrations.CreateModel(
name="FileBlobOwner",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"organization_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True),
),
(
"blob",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.fileblob"
),
),
],
options={
"db_table": "sentry_fileblobowner",
},
),
migrations.CreateModel(
name="Activity",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("type", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
("ident", models.CharField(max_length=64, null=True)),
(
"user_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=True, null=True, on_delete="SET_NULL"
),
),
("datetime", models.DateTimeField(default=django.utils.timezone.now)),
("data", sentry.db.models.fields.gzippeddict.GzippedDictField(null=True)),
(
"group",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True, on_delete=django.db.models.deletion.CASCADE, to="sentry.group"
),
),
],
options={
"db_table": "sentry_activity",
},
),
migrations.CreateModel(
name="GroupCommitResolution",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("group_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
(
"commit_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True),
),
(
"datetime",
models.DateTimeField(db_index=True, default=django.utils.timezone.now),
),
],
options={
"db_table": "sentry_groupcommitresolution",
"unique_together": {("group_id", "commit_id")},
},
),
migrations.CreateModel(
name="GroupHash",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("hash", models.CharField(max_length=32)),
(
"group_tombstone_id",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
db_index=True, null=True
),
),
("state", sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True)),
(
"group",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True, on_delete=django.db.models.deletion.CASCADE, to="sentry.group"
),
),
],
options={
"db_table": "sentry_grouphash",
},
),
migrations.CreateModel(
name="GroupHashMetadata",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
("schema_version", models.CharField(null=True)),
("platform", models.CharField(null=True)),
("latest_grouping_config", models.CharField(null=True)),
("hash_basis", models.CharField(null=True)),
("hashing_metadata", sentry.db.models.fields.jsonfield.JSONField(null=True)),
("seer_date_sent", models.DateTimeField(null=True)),
("seer_event_sent", models.CharField(max_length=32, null=True)),
("seer_model", models.CharField(null=True)),
("seer_match_distance", models.FloatField(null=True)),
(
"grouphash",
models.OneToOneField(
on_delete=django.db.models.deletion.CASCADE,
related_name="_metadata",
to="sentry.grouphash",
),
),
(
"seer_matched_grouphash",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="seer_matchees",
to="sentry.grouphash",
),
),
],
options={
"db_table": "sentry_grouphashmetadata",
},
),
migrations.CreateModel(
name="GroupMeta",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("key", models.CharField(max_length=64)),
("value", models.TextField()),
(
"group",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.group"
),
),
],
options={
"db_table": "sentry_groupmeta",
},
),
migrations.CreateModel(
name="GroupRedirect",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"organization_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True),
),
("group_id", sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True)),
(
"previous_group_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(unique=True),
),
(
"previous_short_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True),
),
("previous_project_slug", models.SlugField(null=True)),
],
options={
"db_table": "sentry_groupredirect",
"unique_together": {
("organization_id", "previous_short_id", "previous_project_slug")
},
},
),
migrations.CreateModel(
name="GroupRelease",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"project_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True),
),
("group_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
(
"release_id",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(db_index=True),
),
("environment", models.CharField(default="", max_length=64)),
("first_seen", models.DateTimeField(default=django.utils.timezone.now)),
("last_seen", models.DateTimeField(default=django.utils.timezone.now)),
],
options={
"db_table": "sentry_grouprelease",
"indexes": [
models.Index(
fields=["group_id", "first_seen"], name="sentry_grou_group_i_6eaff8_idx"
),
models.Index(
fields=["group_id", "-last_seen"], name="sentry_grou_group_i_b6e502_idx"
),
],
"unique_together": {("group_id", "release_id", "environment")},
},
),
migrations.CreateModel(
name="GroupSnooze",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("until", models.DateTimeField(null=True)),
("count", sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True)),
("window", sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True)),
(
"user_count",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True),
),
(
"user_window",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True),
),
("state", sentry.db.models.fields.jsonfield.JSONField(null=True)),
(
"actor_id",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True),
),
(
"group",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.group", unique=True
),
),
],
options={
"db_table": "sentry_groupsnooze",
},
),
migrations.CreateModel(
name="IdentityProvider",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("type", models.CharField(max_length=64)),
("config", sentry.db.models.fields.jsonfield.JSONField(default=dict)),
("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
("external_id", models.CharField(max_length=64, null=True)),
],
options={
"db_table": "sentry_identityprovider",
"unique_together": {("type", "external_id")},
},
),
migrations.CreateModel(
name="Identity",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("external_id", models.TextField()),
("data", sentry.db.models.fields.jsonfield.JSONField(default=dict)),
("status", sentry.db.models.fields.bounded.BoundedPositiveIntegerField(default=0)),
("scopes", sentry.db.models.fields.array.ArrayField(null=True)),
("date_verified", models.DateTimeField(default=django.utils.timezone.now)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"user",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
(
"idp",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.identityprovider"
),
),
],
options={
"db_table": "sentry_identity",
},
),
migrations.CreateModel(
name="Incident",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("identifier", models.IntegerField()),
(
"detection_uuid",
sentry.db.models.fields.uuid.UUIDField(db_index=True, max_length=32, null=True),
),
("status", models.PositiveSmallIntegerField(default=1)),
("status_method", models.PositiveSmallIntegerField(default=3)),
("type", models.PositiveSmallIntegerField()),
("title", models.TextField()),
("date_started", models.DateTimeField(default=django.utils.timezone.now)),
("date_detected", models.DateTimeField(default=django.utils.timezone.now)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
("date_closed", models.DateTimeField(null=True)),
(
"alert_rule",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.PROTECT, to="sentry.alertrule"
),
),
],
options={
"db_table": "sentry_incident",
},
),
migrations.CreateModel(
name="IncidentActivity",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"user_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=True, null=True, on_delete="CASCADE"
),
),
("type", models.IntegerField()),
("value", models.TextField(null=True)),
("previous_value", models.TextField(null=True)),
("comment", models.TextField(null=True)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
("notification_uuid", models.UUIDField(null=True)),
(
"incident",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.incident"
),
),
],
options={
"db_table": "sentry_incidentactivity",
},
),
migrations.CreateModel(
name="IncidentTrigger",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("status", models.SmallIntegerField()),
("date_modified", models.DateTimeField(default=django.utils.timezone.now)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"alert_rule_trigger",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.alertruletrigger"
),
),
(
"incident",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_index=False,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.incident",
),
),
],
options={
"db_table": "sentry_incidenttrigger",
},
),
migrations.AddField(
model_name="alertruletrigger",
name="triggered_incidents",
field=models.ManyToManyField(
related_name="triggers", through="sentry.IncidentTrigger", to="sentry.incident"
),
),
migrations.CreateModel(
name="Integration",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
("provider", models.CharField(max_length=64)),
("external_id", models.CharField(max_length=64)),
("name", models.CharField(max_length=200)),
("metadata", sentry.db.models.fields.jsonfield.JSONField(default=dict)),
(
"status",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
default=0, null=True
),
),
],
options={
"db_table": "sentry_integration",
"unique_together": {("provider", "external_id")},
},
),
migrations.CreateModel(
name="IntegrationExternalProject",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"organization_integration_id",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(db_index=True),
),
("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
("name", models.CharField(max_length=128)),
("external_id", models.CharField(max_length=64)),
("resolved_status", models.CharField(max_length=64)),
("unresolved_status", models.CharField(max_length=64)),
],
options={
"db_table": "sentry_integrationexternalproject",
"unique_together": {("organization_integration_id", "external_id")},
},
),
migrations.CreateModel(
name="IntegrationFeature",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("target_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
(
"target_type",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(default=0),
),
("user_description", models.TextField(null=True)),
("feature", sentry.db.models.fields.bounded.BoundedPositiveIntegerField(default=0)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
],
options={
"db_table": "sentry_integrationfeature",
"unique_together": {("target_id", "target_type", "feature")},
},
),
migrations.CreateModel(
name="LatestRepoReleaseEnvironment",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("repository_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
("environment_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
("release_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
("deploy_id", sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True)),
("commit_id", sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True)),
],
options={
"db_table": "sentry_latestrelease",
"unique_together": {("repository_id", "environment_id")},
},
),
migrations.CreateModel(
name="LostPasswordHash",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("hash", models.CharField(max_length=32)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"user",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
unique=True,
),
),
],
options={
"db_table": "sentry_lostpasswordhash",
},
),
migrations.CreateModel(
name="NotificationSettingOption",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
("scope_type", models.CharField(max_length=32)),
("scope_identifier", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
(
"team_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.Team", db_index=True, null=True, on_delete="CASCADE"
),
),
("type", models.CharField(max_length=32)),
("value", models.CharField(max_length=32)),
(
"user",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"db_table": "sentry_notificationsettingoption",
},
),
migrations.CreateModel(
name="NotificationSettingProvider",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
("scope_type", models.CharField(max_length=32)),
("scope_identifier", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
(
"team_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.Team", db_index=True, null=True, on_delete="CASCADE"
),
),
("type", models.CharField(max_length=32)),
("value", models.CharField(max_length=32)),
("provider", models.CharField(max_length=32)),
(
"user",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"db_table": "sentry_notificationsettingprovider",
},
),
migrations.CreateModel(
name="Organization",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("name", models.CharField(max_length=64)),
("slug", sentry.db.models.fields.slug.SentryOrgSlugField(unique=True)),
("status", sentry.db.models.fields.bounded.BoundedPositiveIntegerField(default=0)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
("default_role", models.CharField(default="member", max_length=32)),
("is_test", models.BooleanField(db_default=False, default=False)),
(
"flags",
bitfield.models.BitField(
[
"allow_joinleave",
"enhanced_privacy",
"disable_shared_issues",
"early_adopter",
"require_2fa",
"disable_new_visibility_features",
"require_email_verification",
"codecov_access",
"disable_member_project_creation",
"prevent_superuser_access",
"disable_member_invite",
],
default=1,
),
),
],
options={
"db_table": "sentry_organization",
"indexes": [
sentry.db.models.indexes.IndexWithPostgresNameLimits(
django.db.models.functions.text.Upper("slug"),
name="sentry_organization_slug_upper_idx",
)
],
},
),
migrations.CreateModel(
name="NotificationAction",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"integration_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.Integration",
blank=True,
db_index=True,
null=True,
on_delete="CASCADE",
),
),
(
"sentry_app_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.SentryApp",
blank=True,
db_index=True,
null=True,
on_delete="CASCADE",
),
),
("type", models.SmallIntegerField()),
("target_type", models.SmallIntegerField()),
("target_identifier", models.TextField(null=True)),
("target_display", models.TextField(null=True)),
("trigger_type", models.SmallIntegerField()),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.organization"
),
),
],
options={
"db_table": "sentry_notificationaction",
},
),
migrations.AddField(
model_name="incident",
name="organization",
field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.organization"
),
),
migrations.CreateModel(
name="GroupSearchViewStarred",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_updated", models.DateTimeField(auto_now=True)),
("date_added", models.DateTimeField(auto_now_add=True)),
(
"user_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=True, on_delete="CASCADE"
),
),
("position", models.PositiveSmallIntegerField()),
(
"group_search_view",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.groupsearchview"
),
),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.organization"
),
),
],
options={
"db_table": "sentry_groupsearchviewstarred",
},
),
migrations.CreateModel(
name="GroupSearchViewLastVisited",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_updated", models.DateTimeField(auto_now=True)),
("date_added", models.DateTimeField(auto_now_add=True)),
(
"user_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=True, on_delete="CASCADE"
),
),
("last_visited", models.DateTimeField(default=django.utils.timezone.now)),
(
"group_search_view",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.groupsearchview"
),
),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.organization"
),
),
],
options={
"db_table": "sentry_groupsearchviewlastvisited",
},
),
migrations.AddField(
model_name="groupsearchview",
name="organization",
field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.organization"
),
),
migrations.CreateModel(
name="FeatureAdoption",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("feature_id", models.PositiveIntegerField()),
("date_completed", models.DateTimeField(default=django.utils.timezone.now)),
("complete", models.BooleanField(default=False)),
("applicable", models.BooleanField(default=True)),
("data", sentry.db.models.fields.jsonfield.JSONField(default=dict)),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.organization"
),
),
],
options={
"db_table": "sentry_featureadoption",
},
),
migrations.CreateModel(
name="ExternalIssue",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"integration_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.Integration", db_index=True, on_delete="CASCADE"
),
),
("key", models.CharField(max_length=256)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
("title", models.TextField(null=True)),
("description", models.TextField(null=True)),
("metadata", sentry.db.models.fields.jsonfield.JSONField(null=True)),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.organization",
),
),
],
options={
"db_table": "sentry_externalissue",
},
),
migrations.AddField(
model_name="exporteddata",
name="organization",
field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.organization"
),
),
migrations.CreateModel(
name="DiscoverSavedQuery",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"created_by_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=True, null=True, on_delete="SET_NULL"
),
),
("name", models.CharField(max_length=255)),
("query", sentry.db.models.fields.jsonfield.JSONField(default=dict)),
("version", models.IntegerField(null=True)),
("date_created", models.DateTimeField(auto_now_add=True)),
("date_updated", models.DateTimeField(auto_now=True)),
(
"visits",
sentry.db.models.fields.bounded.BoundedBigIntegerField(default=1, null=True),
),
(
"last_visited",
models.DateTimeField(default=django.utils.timezone.now, null=True),
),
("is_homepage", models.BooleanField(blank=True, null=True)),
(
"dataset",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
db_default=0, default=0
),
),
(
"dataset_source",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
db_default=0, default=0
),
),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.organization"
),
),
],
options={
"db_table": "sentry_discoversavedquery",
},
),
migrations.CreateModel(
name="DataSecrecyWaiver",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
("access_start", models.DateTimeField(default=django.utils.timezone.now)),
("access_end", models.DateTimeField(default=django.utils.timezone.now)),
(
"zendesk_tickets",
sentry.db.models.fields.array.ArrayField(default=list, null=True),
),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="sentry.organization",
unique=True,
),
),
],
options={
"db_table": "sentry_datasecrecywaiver",
},
),
migrations.CreateModel(
name="DashboardTombstone",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"slug",
sentry.db.models.fields.slug.SentrySlugField(db_index=False, max_length=255),
),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.organization"
),
),
],
options={
"db_table": "sentry_dashboardtombstone",
},
),
migrations.AddField(
model_name="dashboard",
name="organization",
field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.organization"
),
),
migrations.CreateModel(
name="CustomDynamicSamplingRule",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
("is_active", models.BooleanField(default=True)),
("is_org_level", models.BooleanField(default=False)),
("rule_id", models.IntegerField(default=0)),
("condition", models.TextField()),
("sample_rate", models.FloatField(default=0.0)),
("start_date", models.DateTimeField(default=django.utils.timezone.now)),
("end_date", models.DateTimeField()),
("num_samples", models.IntegerField()),
("condition_hash", models.CharField(max_length=40)),
("query", models.TextField(null=True)),
(
"created_by_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", blank=True, db_index=True, null=True, on_delete="CASCADE"
),
),
("notification_sent", models.BooleanField(blank=True, null=True)),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.organization"
),
),
],
options={
"db_table": "sentry_customdynamicsamplingrule",
},
),
migrations.CreateModel(
name="AuthProviderReplica",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"auth_provider_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.AuthProvider", db_index=True, on_delete="CASCADE", unique=True
),
),
("provider", models.CharField(max_length=128)),
("config", sentry.db.models.fields.jsonfield.JSONField(default=dict)),
(
"default_role",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(default=50),
),
("default_global_access", models.BooleanField(default=True)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
("scim_enabled", models.BooleanField()),
("allow_unlinked", models.BooleanField()),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="sentry.organization",
unique=True,
),
),
],
options={
"db_table": "sentry_authproviderreplica",
},
),
migrations.AddField(
model_name="alertrule",
name="organization",
field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.organization"
),
),
migrations.CreateModel(
name="OrganizationAvatar",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("ident", models.CharField(db_index=True, max_length=32, unique=True)),
(
"file_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True, unique=True),
),
("avatar_type", models.PositiveSmallIntegerField(default=0)),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="avatar",
to="sentry.organization",
unique=True,
),
),
],
options={
"db_table": "sentry_organizationavatar",
},
),
migrations.CreateModel(
name="OrganizationIntegration",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
(
"organization_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.Organization", db_index=True, on_delete="CASCADE"
),
),
("config", sentry.db.models.fields.jsonfield.JSONField(default=dict)),
(
"default_auth_id",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
db_index=True, null=True
),
),
("status", sentry.db.models.fields.bounded.BoundedPositiveIntegerField(default=0)),
("grace_period_end", models.DateTimeField(blank=True, db_index=True, null=True)),
(
"integration",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.integration"
),
),
],
options={
"db_table": "sentry_organizationintegration",
},
),
migrations.CreateModel(
name="OrganizationMember",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"user_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", blank=True, db_index=True, null=True, on_delete="CASCADE"
),
),
("email", models.EmailField(blank=True, max_length=75, null=True)),
("role", models.CharField(default="member", max_length=32)),
(
"flags",
bitfield.models.BitField(
[
"sso:linked",
"sso:invalid",
"member-limit:restricted",
"idp:provisioned",
"idp:role-restricted",
"partnership:restricted",
],
default=0,
),
),
("token", models.CharField(blank=True, max_length=64, null=True, unique=True)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
("token_expires_at", models.DateTimeField(default=None, null=True)),
("has_global_access", models.BooleanField(default=True)),
(
"inviter_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", blank=True, db_index=True, null=True, on_delete="SET_NULL"
),
),
("invite_status", models.PositiveSmallIntegerField(default=0, null=True)),
(
"type",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
blank=True, default=50
),
),
("user_is_active", models.BooleanField(db_default=True, default=True)),
("user_email", models.CharField(blank=True, max_length=75, null=True)),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="member_set",
to="sentry.organization",
),
),
],
options={
"db_table": "sentry_organizationmember",
},
),
migrations.CreateModel(
name="OrganizationMemberInvite",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_updated", models.DateTimeField(auto_now=True)),
("date_added", models.DateTimeField(auto_now_add=True)),
(
"inviter_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", blank=True, db_index=True, null=True, on_delete="SET_NULL"
),
),
("invite_status", models.PositiveSmallIntegerField(default=0)),
("email", models.EmailField(max_length=75)),
("role", models.CharField(default="member", max_length=32)),
("organization_member_team_data", models.JSONField(default=list)),
(
"token",
models.CharField(
default=sentry.models.organizationmemberinvite.generate_token,
max_length=64,
unique=True,
),
),
(
"token_expires_at",
models.DateTimeField(
default=sentry.models.organizationmemberinvite.default_expiration
),
),
("sso_linked", models.BooleanField(default=False)),
("sso_invalid", models.BooleanField(default=False)),
("member_limit_restricted", models.BooleanField(default=False)),
("idp_provisioned", models.BooleanField(db_default=False, default=False)),
("idp_role_restricted", models.BooleanField(default=False)),
("partnership_restricted", models.BooleanField(default=False)),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="invite_set",
to="sentry.organization",
),
),
(
"organization_member",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.organizationmember"
),
),
],
options={
"db_table": "sentry_organizationmemberinvite",
},
),
migrations.CreateModel(
name="OrganizationMemberMapping",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"organization_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.Organization", db_index=True, on_delete="CASCADE"
),
),
(
"organizationmember_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(
db_index=True, null=True
),
),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
("role", models.CharField(default="member", max_length=32)),
("email", models.EmailField(blank=True, max_length=75, null=True)),
("invite_status", models.PositiveSmallIntegerField(default=0, null=True)),
(
"inviter",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="inviter_orgmembermapping_set",
to=settings.AUTH_USER_MODEL,
),
),
(
"user",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="orgmembermapping_set",
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"db_table": "sentry_organizationmembermapping",
},
),
migrations.CreateModel(
name="OrganizationMemberTeamReplica",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedAutoField(
primary_key=True, serialize=False
),
),
(
"team_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.Team", db_index=True, on_delete="CASCADE"
),
),
(
"organization_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.Organization", db_index=True, on_delete="CASCADE"
),
),
(
"organizationmember_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True),
),
(
"organizationmemberteam_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True),
),
("is_active", models.BooleanField()),
("role", models.CharField(blank=True, max_length=32, null=True)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
],
options={
"db_table": "sentry_organizationmember_teamsreplica",
"unique_together": {("team_id", "organizationmember_id", "organization_id")},
},
),
migrations.CreateModel(
name="OrganizationOption",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("key", models.CharField(max_length=64)),
("value", sentry.db.models.fields.picklefield.PickledObjectField(editable=False)),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.organization"
),
),
],
options={
"db_table": "sentry_organizationoptions",
},
),
migrations.CreateModel(
name="OrganizationSlugReservation",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("slug", models.SlugField(unique=True)),
(
"organization_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.organization", db_index=True, on_delete="CASCADE"
),
),
(
"user_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(
db_index=True, null=True
),
),
("region_name", models.CharField(max_length=48)),
(
"reservation_type",
sentry.db.models.fields.bounded.BoundedBigIntegerField(default=0),
),
(
"date_added",
models.DateTimeField(default=django.utils.timezone.now, editable=False),
),
],
options={
"db_table": "sentry_organizationslugreservation",
"unique_together": {("organization_id", "reservation_type")},
},
),
migrations.CreateModel(
name="OrgAuthToken",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"organization_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.Organization", db_index=True, on_delete="CASCADE"
),
),
("token_hashed", models.TextField(unique=True)),
("token_last_characters", models.CharField(max_length=4, null=True)),
("name", models.CharField(max_length=255)),
(
"scope_list",
django.contrib.postgres.fields.ArrayField(
base_field=models.TextField(),
default=list,
size=None,
validators=[sentry.models.orgauthtoken.validate_scope_list],
),
),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
("date_last_used", models.DateTimeField(blank=True, null=True)),
(
"project_last_used_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.Project", blank=True, db_index=True, null=True, on_delete="SET_NULL"
),
),
("date_deactivated", models.DateTimeField(blank=True, null=True)),
(
"created_by",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"db_table": "sentry_orgauthtoken",
},
),
migrations.CreateModel(
name="PerfStringIndexer",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("string", models.CharField(max_length=200)),
("organization_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"last_seen",
models.DateTimeField(db_index=True, default=django.utils.timezone.now),
),
("retention_days", models.IntegerField(default=90)),
(
"use_case_id",
models.CharField(
db_default="performance", default="performance", max_length=120
),
),
],
options={
"db_table": "sentry_perfstringindexer",
"constraints": [
models.UniqueConstraint(
fields=("string", "organization_id", "use_case_id"),
name="perf_unique_org_string_usecase",
)
],
},
),
migrations.CreateModel(
name="Project",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("slug", sentry.db.models.fields.slug.SentrySlugField(max_length=100, null=True)),
("name", models.CharField(max_length=200)),
("forced_color", models.CharField(blank=True, max_length=6, null=True)),
("public", models.BooleanField(default=False)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"status",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
db_index=True, default=0
),
),
("first_event", models.DateTimeField(null=True)),
("external_id", models.CharField(max_length=256, null=True)),
(
"flags",
bitfield.models.BitField(
[
"has_releases",
"has_issue_alerts_targeting",
"has_transactions",
"has_alert_filters",
"has_sessions",
"has_profiles",
"has_replays",
"has_feedbacks",
"has_new_feedbacks",
"spike_protection_error_currently_active",
"spike_protection_transaction_currently_active",
"spike_protection_attachment_currently_active",
"has_minified_stack_trace",
"has_cron_monitors",
"has_cron_checkins",
"has_sourcemaps",
"has_custom_metrics",
"has_high_priority_alerts",
"has_insights_http",
"has_insights_db",
"has_insights_assets",
"has_insights_app_start",
"has_insights_screen_load",
"has_insights_vitals",
"has_insights_caches",
"has_insights_queues",
"has_insights_llm_monitoring",
"has_flags",
],
default=10,
null=True,
),
),
("platform", models.CharField(max_length=64, null=True)),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.organization"
),
),
],
options={
"db_table": "sentry_project",
},
),
migrations.CreateModel(
name="PlatformExternalIssue",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("service_type", models.CharField(max_length=64)),
("display_name", models.TextField()),
("web_url", models.URLField()),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"group",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
db_index=False,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.group",
),
),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.project",
),
),
],
options={
"db_table": "sentry_platformexternalissue",
},
),
migrations.CreateModel(
name="OrganizationOnboardingTask",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"user_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=True, null=True, on_delete="SET_NULL"
),
),
("status", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
("completion_seen", models.DateTimeField(null=True)),
("date_completed", models.DateTimeField(default=django.utils.timezone.now)),
("data", sentry.db.models.fields.jsonfield.JSONField(default=dict)),
("task", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.organization"
),
),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="sentry.project",
),
),
],
options={
"db_table": "sentry_organizationonboardingtask",
},
),
migrations.CreateModel(
name="NotificationActionProject",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"action",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.notificationaction"
),
),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.project"
),
),
],
options={
"db_table": "sentry_notificationactionproject",
},
),
migrations.AddField(
model_name="notificationaction",
name="projects",
field=models.ManyToManyField(
through="sentry.NotificationActionProject", to="sentry.project"
),
),
migrations.CreateModel(
name="IncidentProject",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"incident",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.incident"
),
),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
db_index=False,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.project",
),
),
],
options={
"db_table": "sentry_incidentproject",
},
),
migrations.AddField(
model_name="incident",
name="projects",
field=models.ManyToManyField(
related_name="incidents", through="sentry.IncidentProject", to="sentry.project"
),
),
migrations.CreateModel(
name="GroupTombstone",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
(
"previous_group_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(unique=True),
),
(
"level",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
blank=True, default=40
),
),
("message", models.TextField()),
("culprit", models.CharField(blank=True, max_length=200, null=True)),
(
"data",
sentry.db.models.fields.gzippeddict.GzippedDictField(blank=True, null=True),
),
(
"actor_id",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True),
),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.project"
),
),
],
options={
"db_table": "sentry_grouptombstone",
},
),
migrations.CreateModel(
name="GroupShare",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"uuid",
models.CharField(
default=sentry.models.groupshare.default_uuid, max_length=32, unique=True
),
),
(
"user_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=True, null=True, on_delete="CASCADE"
),
),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"group",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.group", unique=True
),
),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.project"
),
),
],
options={
"db_table": "sentry_groupshare",
},
),
migrations.CreateModel(
name="GroupSeen",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"user_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=False, on_delete="CASCADE"
),
),
("last_seen", models.DateTimeField(default=django.utils.timezone.now)),
(
"group",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.group"
),
),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.project"
),
),
],
options={
"db_table": "sentry_groupseen",
},
),
migrations.CreateModel(
name="GroupSearchViewProject",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_updated", models.DateTimeField(auto_now=True)),
("date_added", models.DateTimeField(auto_now_add=True)),
(
"group_search_view",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.groupsearchview"
),
),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.project"
),
),
],
options={
"db_table": "sentry_groupsearchviewproject",
},
),
migrations.AddField(
model_name="groupsearchview",
name="projects",
field=models.ManyToManyField(
through="sentry.GroupSearchViewProject", to="sentry.project"
),
),
migrations.CreateModel(
name="GroupOpenPeriod",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_updated", models.DateTimeField(auto_now=True)),
("date_added", models.DateTimeField(auto_now_add=True)),
(
"user_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=True, null=True, on_delete="SET_NULL"
),
),
("date_started", models.DateTimeField(default=django.utils.timezone.now)),
("date_ended", models.DateTimeField(null=True)),
("data", models.JSONField(default=dict)),
(
"group",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.group"
),
),
(
"resolution_activity",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="sentry.activity",
),
),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.project"
),
),
],
options={
"db_table": "sentry_groupopenperiod",
},
),
migrations.CreateModel(
name="GroupLink",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"linked_type",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(default=1),
),
("linked_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
(
"relationship",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(default=2),
),
("data", sentry.db.models.fields.jsonfield.JSONField(default=dict)),
(
"datetime",
models.DateTimeField(db_index=True, default=django.utils.timezone.now),
),
(
"group",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
db_index=False,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.group",
),
),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.project",
),
),
],
options={
"db_table": "sentry_grouplink",
},
),
migrations.CreateModel(
name="GroupInbox",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("reason", models.PositiveSmallIntegerField(default=0)),
("reason_details", sentry.db.models.fields.jsonfield.JSONField(null=True)),
(
"date_added",
models.DateTimeField(db_index=True, default=django.utils.timezone.now),
),
(
"group",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.group",
unique=True,
),
),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.organization",
),
),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.project",
),
),
],
options={
"db_table": "sentry_groupinbox",
},
),
migrations.AddField(
model_name="grouphash",
name="project",
field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True, on_delete=django.db.models.deletion.CASCADE, to="sentry.project"
),
),
migrations.CreateModel(
name="GroupEmailThread",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("email", models.EmailField(max_length=75)),
("msgid", models.CharField(max_length=100)),
("date", models.DateTimeField(db_index=True, default=django.utils.timezone.now)),
(
"group",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="groupemail_set",
to="sentry.group",
),
),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="groupemail_set",
to="sentry.project",
),
),
],
options={
"db_table": "sentry_groupemailthread",
},
),
migrations.CreateModel(
name="GroupBookmark",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"user_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=True, on_delete="CASCADE"
),
),
("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
(
"group",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="bookmark_set",
to="sentry.group",
),
),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="bookmark_set",
to="sentry.project",
),
),
],
options={
"db_table": "sentry_groupbookmark",
},
),
migrations.AddField(
model_name="group",
name="project",
field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.project"
),
),
migrations.CreateModel(
name="EnvironmentProject",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("is_hidden", models.BooleanField(null=True)),
(
"environment",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.environment"
),
),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.project"
),
),
],
options={
"db_table": "sentry_environmentproject",
},
),
migrations.AddField(
model_name="environment",
name="projects",
field=models.ManyToManyField(through="sentry.EnvironmentProject", to="sentry.project"),
),
migrations.CreateModel(
name="DiscoverSavedQueryProject",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"discover_saved_query",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.discoversavedquery"
),
),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.project"
),
),
],
options={
"db_table": "sentry_discoversavedqueryproject",
},
),
migrations.AddField(
model_name="discoversavedquery",
name="projects",
field=models.ManyToManyField(
through="sentry.DiscoverSavedQueryProject", to="sentry.project"
),
),
migrations.CreateModel(
name="DashboardProject",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"dashboard",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.dashboard"
),
),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.project"
),
),
],
options={
"db_table": "sentry_dashboardproject",
},
),
migrations.AddField(
model_name="dashboard",
name="projects",
field=models.ManyToManyField(through="sentry.DashboardProject", to="sentry.project"),
),
migrations.CreateModel(
name="CustomDynamicSamplingRuleProject",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"custom_dynamic_sampling_rule",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="sentry.customdynamicsamplingrule",
),
),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.project"
),
),
],
options={
"db_table": "sentry_customdynamicsamplingruleproject",
},
),
migrations.AddField(
model_name="customdynamicsamplingrule",
name="projects",
field=models.ManyToManyField(
related_name="custom_dynamic_sampling_rules",
through="sentry.CustomDynamicSamplingRuleProject",
to="sentry.project",
),
),
migrations.CreateModel(
name="Counter",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("value", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="sentry.project",
unique=True,
),
),
],
options={
"db_table": "sentry_projectcounter",
},
),
migrations.CreateModel(
name="AlertRuleProjects",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"alert_rule",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_index=False,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.alertrule",
),
),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.project"
),
),
],
options={
"db_table": "sentry_alertruleprojects",
},
),
migrations.AddField(
model_name="alertrule",
name="projects",
field=models.ManyToManyField(
related_name="alert_rule_projects",
through="sentry.AlertRuleProjects",
to="sentry.project",
),
),
migrations.AddField(
model_name="activity",
name="project",
field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.project"
),
),
migrations.CreateModel(
name="ProjectArtifactBundle",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"organization_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True),
),
("project_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"artifact_bundle",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.artifactbundle"
),
),
],
options={
"db_table": "sentry_projectartifactbundle",
},
),
migrations.CreateModel(
name="ProjectBookmark",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"user_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=True, on_delete="CASCADE"
),
),
("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
blank=True,
db_constraint=False,
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.project",
),
),
],
options={
"db_table": "sentry_projectbookmark",
},
),
migrations.CreateModel(
name="ProjectDebugFile",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("checksum", models.CharField(db_index=True, max_length=40, null=True)),
("object_name", models.TextField()),
("cpu_name", models.CharField(max_length=40)),
(
"project_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(
db_index=True, null=True
),
),
("debug_id", models.CharField(db_column="uuid", max_length=64)),
("code_id", models.CharField(max_length=64, null=True)),
("data", sentry.db.models.fields.jsonfield.JSONField(null=True)),
(
"date_accessed",
models.DateTimeField(
db_default=django.db.models.functions.datetime.Now(),
default=django.utils.timezone.now,
),
),
(
"file",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.file"
),
),
],
options={
"db_table": "sentry_projectdsymfile",
},
),
migrations.CreateModel(
name="ProguardArtifactRelease",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("organization_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
("project_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
("release_name", models.CharField(max_length=250)),
("proguard_uuid", models.UUIDField(db_index=True)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"project_debug_file",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.projectdebugfile"
),
),
],
options={
"db_table": "sentry_proguardartifactrelease",
},
),
migrations.CreateModel(
name="ProjectKey",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("label", models.CharField(blank=True, max_length=64, null=True)),
("public_key", models.CharField(max_length=32, null=True, unique=True)),
("secret_key", models.CharField(max_length=32, null=True, unique=True)),
("roles", bitfield.models.BitField(["store", "api"], default=1)),
(
"status",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
db_index=True, default=0
),
),
("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
(
"rate_limit_count",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True),
),
(
"rate_limit_window",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True),
),
("data", sentry.db.models.fields.jsonfield.JSONField(default=dict)),
("use_case", models.CharField(db_default="user", default="user", max_length=32)),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="key_set",
to="sentry.project",
),
),
],
options={
"db_table": "sentry_projectkey",
},
),
migrations.CreateModel(
name="ProjectOption",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("key", models.CharField(max_length=64)),
("value", sentry.db.models.fields.picklefield.PickledObjectField(editable=False)),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.project"
),
),
],
options={
"db_table": "sentry_projectoptions",
},
),
migrations.CreateModel(
name="ProjectOwnership",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("raw", models.TextField(null=True)),
("schema", sentry.db.models.fields.jsonfield.JSONField(null=True)),
("fallthrough", models.BooleanField(default=True)),
("auto_assignment", models.BooleanField(default=True)),
("date_created", models.DateTimeField(default=django.utils.timezone.now)),
("last_updated", models.DateTimeField(default=django.utils.timezone.now)),
("is_active", models.BooleanField(default=True)),
("codeowners_auto_sync", models.BooleanField(default=True, null=True)),
(
"suspect_committer_auto_assignment",
models.BooleanField(db_default=False, default=False),
),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="sentry.project",
unique=True,
),
),
],
options={
"db_table": "sentry_projectownership",
},
),
migrations.CreateModel(
name="ProjectPlatform",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("project_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
("platform", models.CharField(max_length=64)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
("last_seen", models.DateTimeField(default=django.utils.timezone.now)),
],
options={
"db_table": "sentry_projectplatform",
"unique_together": {("project_id", "platform")},
},
),
migrations.CreateModel(
name="ProjectRedirect",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("redirect_slug", models.SlugField()),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.organization"
),
),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.project"
),
),
],
options={
"db_table": "sentry_projectredirect",
},
),
migrations.CreateModel(
name="ProjectSDK",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_added", models.DateTimeField(auto_now_add=True)),
("date_updated", models.DateTimeField(auto_now=True)),
("event_type", sentry.db.models.fields.bounded.BoundedIntegerField()),
("sdk_name", models.CharField()),
("sdk_version", models.CharField()),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.project"
),
),
],
),
migrations.CreateModel(
name="ProjectTemplate",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
("name", models.CharField(max_length=200)),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.organization"
),
),
],
options={
"db_table": "sentry_projecttemplate",
},
),
migrations.AddField(
model_name="project",
name="template",
field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True, on_delete=django.db.models.deletion.CASCADE, to="sentry.projecttemplate"
),
),
migrations.CreateModel(
name="ProjectTemplateOption",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("key", models.CharField(max_length=64)),
("value", sentry.db.models.fields.picklefield.PickledObjectField(editable=False)),
(
"project_template",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="options",
to="sentry.projecttemplate",
),
),
],
options={
"db_table": "sentry_projecttemplateoption",
},
),
migrations.CreateModel(
name="ProjectTransactionThreshold",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
("threshold", models.IntegerField()),
("metric", models.PositiveSmallIntegerField(default=1)),
(
"edited_by_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=True, null=True, on_delete="SET_NULL"
),
),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.organization"
),
),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.project",
unique=True,
),
),
],
options={
"db_table": "sentry_projecttransactionthreshold",
},
),
migrations.CreateModel(
name="ProjectTransactionThresholdOverride",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
("transaction", models.CharField(max_length=200)),
("threshold", models.IntegerField()),
("metric", models.PositiveSmallIntegerField(default=1)),
(
"edited_by_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=True, null=True, on_delete="SET_NULL"
),
),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.organization"
),
),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.project",
),
),
],
options={
"db_table": "sentry_projecttransactionthresholdoverride",
},
),
migrations.CreateModel(
name="PromptsActivity",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"organization_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True),
),
(
"project_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True),
),
(
"user_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=True, on_delete="CASCADE"
),
),
("feature", models.CharField(max_length=64)),
("data", sentry.db.models.fields.jsonfield.JSONField(default={})),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
],
options={
"db_table": "sentry_promptsactivity",
"unique_together": {("user_id", "feature", "organization_id", "project_id")},
},
),
migrations.CreateModel(
name="PullRequest",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"organization_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True),
),
("repository_id", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
("key", models.CharField(max_length=64)),
(
"date_added",
models.DateTimeField(db_index=True, default=django.utils.timezone.now),
),
("title", models.TextField(null=True)),
("message", models.TextField(null=True)),
("merge_commit_sha", models.CharField(db_index=True, max_length=64, null=True)),
(
"author",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.commitauthor",
),
),
],
options={
"db_table": "sentry_pull_request",
},
),
migrations.CreateModel(
name="PullRequestComment",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("external_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
("created_at", models.DateTimeField()),
("updated_at", models.DateTimeField()),
(
"group_ids",
django.contrib.postgres.fields.ArrayField(
base_field=sentry.db.models.fields.bounded.BoundedBigIntegerField(),
size=None,
),
),
("reactions", sentry.db.models.fields.jsonfield.JSONField(null=True)),
(
"comment_type",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
db_default=0, default=0
),
),
(
"pull_request",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.pullrequest"
),
),
],
options={
"db_table": "sentry_pullrequest_comment",
},
),
migrations.CreateModel(
name="PullRequestCommit",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"commit",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.commit"
),
),
(
"pull_request",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.pullrequest"
),
),
],
options={
"db_table": "sentry_pullrequest_commit",
},
),
migrations.CreateModel(
name="QuerySubscription",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("type", models.TextField()),
("status", models.SmallIntegerField(db_index=True, default=0)),
("subscription_id", models.TextField(null=True, unique=True)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"date_updated",
models.DateTimeField(default=django.utils.timezone.now, null=True),
),
("query_extra", models.TextField(null=True)),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.project",
),
),
],
options={
"db_table": "sentry_querysubscription",
},
),
migrations.AddField(
model_name="incident",
name="subscription",
field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="sentry.querysubscription",
),
),
migrations.CreateModel(
name="RecentSearch",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"user_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=False, on_delete="CASCADE"
),
),
("type", models.PositiveSmallIntegerField()),
("query", models.TextField()),
("query_hash", models.CharField(max_length=32)),
("last_seen", models.DateTimeField(default=django.utils.timezone.now)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.organization"
),
),
],
options={
"db_table": "sentry_recentsearch",
},
),
migrations.CreateModel(
name="RegionImportChunk",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
(
"import_uuid",
sentry.db.models.fields.uuid.UUIDField(db_index=True, max_length=32),
),
("model", models.CharField(db_index=True, max_length=64)),
("min_ordinal", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
("max_ordinal", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
("min_source_pk", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
("max_source_pk", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
(
"min_inserted_pk",
sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True),
),
(
"max_inserted_pk",
sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True),
),
("inserted_map", models.JSONField(default=dict)),
("existing_map", models.JSONField(default=dict)),
("overwrite_map", models.JSONField(default=dict)),
("inserted_identifiers", models.JSONField(default=dict)),
],
options={
"db_table": "sentry_regionimportchunk",
"unique_together": {("import_uuid", "model", "min_ordinal")},
},
),
migrations.CreateModel(
name="RegionOutbox",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("shard_scope", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
("shard_identifier", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
("category", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
("object_identifier", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
("payload", sentry.db.models.fields.jsonfield.JSONField(null=True)),
("scheduled_from", models.DateTimeField(default=django.utils.timezone.now)),
(
"scheduled_for",
models.DateTimeField(
default=datetime.datetime(2016, 8, 1, 0, 0, tzinfo=datetime.UTC)
),
),
(
"date_added",
models.DateTimeField(
db_default=django.db.models.functions.datetime.Now(),
default=django.utils.timezone.now,
editable=False,
),
),
],
options={
"db_table": "sentry_regionoutbox",
"indexes": [
models.Index(
fields=["shard_scope", "shard_identifier", "category", "object_identifier"],
name="sentry_regi_shard_s_bfff84_idx",
),
models.Index(
fields=["shard_scope", "shard_identifier", "scheduled_for"],
name="sentry_regi_shard_s_cd9995_idx",
),
models.Index(
fields=["shard_scope", "shard_identifier", "id"],
name="sentry_regi_shard_s_e7412f_idx",
),
],
},
),
migrations.CreateModel(
name="RegionScheduledDeletion",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"guid",
models.CharField(
default=sentry.deletions.models.scheduleddeletion.default_guid,
max_length=32,
unique=True,
),
),
("app_label", models.CharField(max_length=64)),
("model_name", models.CharField(max_length=64)),
("object_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"date_scheduled",
models.DateTimeField(
default=sentry.deletions.models.scheduleddeletion.default_date_schedule
),
),
("actor_id", sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True)),
("data", sentry.db.models.fields.jsonfield.JSONField(default={})),
("in_progress", models.BooleanField(default=False)),
],
options={
"db_table": "sentry_regionscheduleddeletion",
"unique_together": {("app_label", "model_name", "object_id")},
},
),
migrations.CreateModel(
name="RegionTombstone",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("table_name", models.CharField(max_length=48)),
("object_identifier", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
("created_at", models.DateTimeField(default=django.utils.timezone.now)),
],
options={
"db_table": "sentry_regiontombstone",
"indexes": [
models.Index(
fields=["table_name", "object_identifier"],
name="sentry_regi_table_n_cd667a_idx",
)
],
},
),
migrations.CreateModel(
name="RegressionGroup",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_added", models.DateTimeField(auto_now_add=True)),
("date_updated", models.DateTimeField(auto_now=True)),
("date_regressed", models.DateTimeField()),
("date_resolved", models.DateTimeField(null=True)),
("version", models.IntegerField()),
("active", models.BooleanField(default=True)),
(
"project_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True),
),
("type", sentry.db.models.fields.bounded.BoundedIntegerField()),
("fingerprint", models.CharField(max_length=64)),
("baseline", models.FloatField()),
("regressed", models.FloatField()),
],
options={
"indexes": [
models.Index(
fields=["type", "project_id", "fingerprint", "active"],
name="sentry_regr_type_3a29e7_idx",
)
],
"unique_together": {("type", "project_id", "fingerprint", "version")},
},
),
migrations.CreateModel(
name="RelayUsage",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("relay_id", models.CharField(max_length=64)),
("version", models.CharField(default="0.0.1", max_length=32)),
("first_seen", models.DateTimeField(default=django.utils.timezone.now)),
("last_seen", models.DateTimeField(default=django.utils.timezone.now)),
("public_key", models.CharField(db_index=True, max_length=200, null=True)),
],
options={
"db_table": "sentry_relayusage",
"unique_together": {("relay_id", "version")},
},
bases=(sentry.backup.mixins.OverwritableConfigMixin, models.Model),
),
migrations.CreateModel(
name="Release",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"status",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
default=0, null=True
),
),
("version", models.CharField(max_length=250)),
("ref", models.CharField(blank=True, max_length=250, null=True)),
("url", models.URLField(blank=True, null=True)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
("date_started", models.DateTimeField(blank=True, null=True)),
("date_released", models.DateTimeField(blank=True, null=True)),
("data", sentry.db.models.fields.jsonfield.JSONField(default={})),
(
"owner_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", blank=True, db_index=True, null=True, on_delete="SET_NULL"
),
),
(
"commit_count",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
default=0, null=True
),
),
(
"last_commit_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True),
),
("authors", sentry.db.models.fields.array.ArrayField(null=True)),
(
"total_deploys",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
default=0, null=True
),
),
(
"last_deploy_id",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True),
),
("package", models.TextField(null=True)),
("major", models.BigIntegerField(null=True)),
("minor", models.BigIntegerField(null=True)),
("patch", models.BigIntegerField(null=True)),
("revision", models.BigIntegerField(null=True)),
("prerelease", models.TextField(null=True)),
("build_code", models.TextField(null=True)),
("build_number", models.BigIntegerField(null=True)),
("user_agent", models.TextField(null=True)),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.organization"
),
),
],
options={
"db_table": "sentry_release",
},
),
migrations.CreateModel(
name="GroupResolution",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"current_release_version",
models.CharField(blank=True, max_length=250, null=True),
),
("type", sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True)),
(
"actor_id",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True),
),
(
"datetime",
models.DateTimeField(db_index=True, default=django.utils.timezone.now),
),
("status", sentry.db.models.fields.bounded.BoundedPositiveIntegerField(default=0)),
(
"group",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.group", unique=True
),
),
(
"release",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.release"
),
),
],
options={
"db_table": "sentry_groupresolution",
},
),
migrations.CreateModel(
name="GroupEnvironment",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"first_seen",
models.DateTimeField(
db_index=True, default=django.utils.timezone.now, null=True
),
),
(
"environment",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.environment",
),
),
(
"group",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.group",
),
),
(
"first_release",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
null=True,
on_delete=django.db.models.deletion.DO_NOTHING,
to="sentry.release",
),
),
],
options={
"db_table": "sentry_groupenvironment",
},
),
migrations.AddField(
model_name="group",
name="first_release",
field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True, on_delete=django.db.models.deletion.PROTECT, to="sentry.release"
),
),
migrations.CreateModel(
name="Distribution",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"organization_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True),
),
("name", models.CharField(max_length=64)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"release",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.release"
),
),
],
options={
"db_table": "sentry_distribution",
},
),
migrations.CreateModel(
name="Deploy",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"organization_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True),
),
(
"environment_id",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(db_index=True),
),
(
"date_finished",
models.DateTimeField(db_index=True, default=django.utils.timezone.now),
),
("date_started", models.DateTimeField(blank=True, null=True)),
("name", models.CharField(blank=True, max_length=64, null=True)),
("url", models.URLField(blank=True, null=True)),
("notified", models.BooleanField(db_index=True, default=False, null=True)),
(
"release",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.release"
),
),
],
options={
"db_table": "sentry_deploy",
},
),
migrations.CreateModel(
name="ReleaseActivity",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("type", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
("data", models.JSONField(default=dict)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"release",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.release"
),
),
],
options={
"db_table": "sentry_releaseactivity",
},
),
migrations.CreateModel(
name="ReleaseArtifactBundle",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"organization_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True),
),
("release_name", models.CharField(max_length=250)),
("dist_name", models.CharField(default="", max_length=64)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"artifact_bundle",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.artifactbundle"
),
),
],
options={
"db_table": "sentry_releaseartifactbundle",
},
),
migrations.CreateModel(
name="ReleaseCommit",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"organization_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True),
),
("project_id", sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True)),
("order", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
(
"commit",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.commit"
),
),
(
"release",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.release"
),
),
],
options={
"db_table": "sentry_releasecommit",
},
),
migrations.CreateModel(
name="ReleaseEnvironment",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("project_id", sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True)),
("first_seen", models.DateTimeField(default=django.utils.timezone.now)),
(
"last_seen",
models.DateTimeField(db_index=True, default=django.utils.timezone.now),
),
(
"environment",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.environment",
),
),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.organization",
),
),
(
"release",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.release",
),
),
],
options={
"db_table": "sentry_environmentrelease",
},
),
migrations.CreateModel(
name="ReleaseFile",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"organization_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True),
),
("project_id", sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True)),
(
"release_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True),
),
("ident", models.CharField(max_length=40)),
("name", models.TextField()),
(
"dist_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(
db_index=True, null=True
),
),
(
"artifact_count",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
default=1, null=True
),
),
(
"file",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.file"
),
),
],
options={
"db_table": "sentry_releasefile",
},
),
migrations.CreateModel(
name="ReleaseHeadCommit",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"organization_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True),
),
("repository_id", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
(
"commit",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.commit"
),
),
(
"release",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.release"
),
),
],
options={
"db_table": "sentry_releaseheadcommit",
},
),
migrations.CreateModel(
name="ReleaseProject",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"new_groups",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
default=0, null=True
),
),
("adopted", models.DateTimeField(blank=True, null=True)),
("unadopted", models.DateTimeField(blank=True, null=True)),
("first_seen_transaction", models.DateTimeField(blank=True, null=True)),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.project"
),
),
(
"release",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.release"
),
),
],
options={
"db_table": "sentry_release_project",
},
),
migrations.AddField(
model_name="release",
name="projects",
field=models.ManyToManyField(
related_name="releases", through="sentry.ReleaseProject", to="sentry.project"
),
),
migrations.CreateModel(
name="ReleaseProjectEnvironment",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"new_issues_count",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(default=0),
),
("first_seen", models.DateTimeField(default=django.utils.timezone.now)),
(
"last_seen",
models.DateTimeField(db_index=True, default=django.utils.timezone.now),
),
(
"last_deploy_id",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
db_index=True, null=True
),
),
("adopted", models.DateTimeField(blank=True, null=True)),
("unadopted", models.DateTimeField(blank=True, null=True)),
(
"environment",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.environment"
),
),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.project"
),
),
(
"release",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.release"
),
),
],
options={
"db_table": "sentry_releaseprojectenvironment",
},
),
migrations.CreateModel(
name="ReleaseThreshold",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("threshold_type", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
("trigger_type", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
("value", models.IntegerField()),
("window_in_seconds", models.PositiveIntegerField()),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"environment",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.environment",
),
),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="release_thresholds",
to="sentry.project",
),
),
],
options={
"abstract": False,
},
),
migrations.CreateModel(
name="Relocation",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
("creator_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
("owner_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
(
"uuid",
sentry.db.models.fields.uuid.UUIDField(
db_index=True,
default=sentry.relocation.models.relocation.default_guid,
max_length=32,
unique=True,
),
),
("step", models.SmallIntegerField(default=None)),
(
"provenance",
models.SmallIntegerField(
db_default=sentry.relocation.models.relocation.Relocation.Provenance[
"SELF_HOSTED"
],
default=sentry.relocation.models.relocation.Relocation.Provenance[
"SELF_HOSTED"
],
),
),
("status", models.SmallIntegerField(default=0)),
("scheduled_pause_at_step", models.SmallIntegerField(default=None, null=True)),
("scheduled_cancel_at_step", models.SmallIntegerField(default=None, null=True)),
("want_org_slugs", models.JSONField(default=list)),
("want_usernames", models.JSONField(null=True)),
("latest_notified", models.SmallIntegerField(default=None, null=True)),
("latest_unclaimed_emails_sent_at", models.DateTimeField(default=None, null=True)),
("latest_task", models.CharField(default="", max_length=64)),
("latest_task_attempts", models.SmallIntegerField(default=0)),
("failure_reason", models.CharField(default=None, max_length=256, null=True)),
],
options={
"db_table": "sentry_relocation",
"constraints": [
models.CheckConstraint(
condition=models.Q(
("scheduled_pause_at_step__gt", models.F("step")),
("scheduled_pause_at_step__isnull", True),
_connector="OR",
),
name="scheduled_pause_at_step_greater_than_current_step",
),
models.CheckConstraint(
condition=models.Q(
("scheduled_cancel_at_step__gt", models.F("step")),
("scheduled_cancel_at_step__isnull", True),
_connector="OR",
),
name="scheduled_cancel_at_step_greater_than_current_step",
),
],
},
),
migrations.CreateModel(
name="RelocationFile",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
("kind", models.SmallIntegerField()),
(
"file",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.file"
),
),
(
"relocation",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.relocation"
),
),
],
options={
"db_table": "sentry_relocationfile",
},
),
migrations.CreateModel(
name="RelocationValidation",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
("status", models.SmallIntegerField(default=0)),
("attempts", models.SmallIntegerField(default=0)),
(
"relocation",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.relocation"
),
),
],
options={
"db_table": "sentry_relocationvalidation",
},
),
migrations.CreateModel(
name="RelocationValidationAttempt",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
("status", models.SmallIntegerField(default=0)),
(
"build_id",
sentry.db.models.fields.uuid.UUIDField(
db_index=True, max_length=32, unique=True
),
),
(
"relocation",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.relocation"
),
),
(
"relocation_validation",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="sentry.relocationvalidation",
),
),
],
options={
"db_table": "sentry_relocationvalidationattempt",
},
),
migrations.CreateModel(
name="Repository",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"organization_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True),
),
("name", models.CharField(max_length=200)),
("url", models.URLField(null=True)),
("provider", models.CharField(max_length=64, null=True)),
("external_id", models.CharField(max_length=64, null=True)),
("config", sentry.db.models.fields.jsonfield.JSONField(default=dict)),
(
"status",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
db_index=True, default=0
),
),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"integration_id",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
db_index=True, null=True
),
),
("languages", sentry.db.models.fields.array.ArrayField(null=True)),
],
options={
"db_table": "sentry_repository",
"unique_together": {("organization_id", "provider", "external_id")},
},
),
migrations.CreateModel(
name="RepositoryProjectPathConfig",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
(
"organization_integration_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.OrganizationIntegration", db_index=True, on_delete="CASCADE"
),
),
(
"organization_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True),
),
("integration_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
("stack_root", models.TextField()),
("source_root", models.TextField()),
("default_branch", models.TextField(null=True)),
("automatically_generated", models.BooleanField(db_default=False, default=False)),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.project",
),
),
(
"repository",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.repository"
),
),
],
options={
"db_table": "sentry_repositoryprojectpathconfig",
},
),
migrations.CreateModel(
name="ProjectCodeOwners",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("raw", models.TextField()),
("schema", sentry.db.models.fields.jsonfield.JSONField(default=dict)),
("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.project",
),
),
(
"repository_project_path_config",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.PROTECT,
to="sentry.repositoryprojectpathconfig",
unique=True,
),
),
],
options={
"db_table": "sentry_projectcodeowners",
},
),
migrations.CreateModel(
name="RollbackOrganization",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_updated", models.DateTimeField(auto_now=True)),
("date_added", models.DateTimeField(auto_now_add=True)),
("data", models.JSONField(default=None, null=True)),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.organization"
),
),
],
options={
"db_table": "sentry_rollbackorganization",
},
),
migrations.CreateModel(
name="RollbackUser",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_updated", models.DateTimeField(auto_now=True)),
("date_added", models.DateTimeField(auto_now_add=True)),
(
"user_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=True, on_delete="CASCADE"
),
),
("uuid", models.UUIDField(default=uuid.uuid4, editable=False, unique=True)),
("share_uuid", models.UUIDField(default=uuid.uuid4, editable=False, unique=True)),
("data", models.JSONField(default=None, null=True)),
("share_data", models.JSONField(default=None, null=True)),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.organization"
),
),
],
options={
"db_table": "sentry_rollbackuser",
},
),
migrations.CreateModel(
name="Rule",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"environment_id",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True),
),
("label", models.CharField(max_length=256)),
("data", sentry.db.models.fields.gzippeddict.GzippedDictField()),
(
"status",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
db_index=True, default=0
),
),
(
"source",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
db_default=sentry.models.rule.RuleSource["ISSUE"],
default=sentry.models.rule.RuleSource["ISSUE"],
),
),
(
"owner_user_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=True, null=True, on_delete="SET_NULL"
),
),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.project"
),
),
],
options={
"db_table": "sentry_rule",
},
),
migrations.CreateModel(
name="NeglectedRule",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("disable_date", models.DateTimeField()),
("opted_out", models.BooleanField(default=False)),
("sent_initial_email_date", models.DateTimeField(null=True)),
("sent_final_email_date", models.DateTimeField(null=True)),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.organization"
),
),
(
"rule",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.rule"
),
),
],
options={
"abstract": False,
},
),
migrations.CreateModel(
name="GroupRuleStatus",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("status", models.PositiveSmallIntegerField(default=0)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
("last_active", models.DateTimeField(null=True)),
(
"group",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.group"
),
),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.project"
),
),
(
"rule",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.rule"
),
),
],
options={
"db_table": "sentry_grouprulestatus",
},
),
migrations.CreateModel(
name="RuleActivity",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"user_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=True, null=True, on_delete="SET_NULL"
),
),
("type", models.IntegerField()),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"rule",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.rule"
),
),
],
options={
"db_table": "sentry_ruleactivity",
},
),
migrations.CreateModel(
name="RuleFireHistory",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("event_id", sentry.db.models.fields.text.CharField(max_length=32, null=True)),
(
"date_added",
models.DateTimeField(db_index=True, default=django.utils.timezone.now),
),
("notification_uuid", models.UUIDField(null=True)),
(
"group",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.group",
),
),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.project",
),
),
(
"rule",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.rule"
),
),
],
options={
"db_table": "sentry_rulefirehistory",
},
),
migrations.CreateModel(
name="NotificationMessage",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("error_details", sentry.db.models.fields.jsonfield.JSONField(null=True)),
("error_code", models.IntegerField(db_index=True, null=True)),
("message_identifier", sentry.db.models.fields.text.CharField(null=True)),
(
"rule_action_uuid",
sentry.db.models.fields.text.CharField(db_index=True, null=True),
),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
("open_period_start", models.DateTimeField(null=True)),
(
"group",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True, on_delete=django.db.models.deletion.CASCADE, to="sentry.group"
),
),
(
"incident",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True, on_delete=django.db.models.deletion.CASCADE, to="sentry.incident"
),
),
(
"parent_notification_message",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.notificationmessage",
),
),
(
"trigger_action",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.alertruletriggeraction",
),
),
(
"rule_fire_history",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.rulefirehistory",
),
),
],
options={
"db_table": "sentry_notificationmessage",
},
),
migrations.CreateModel(
name="RuleSnooze",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"user_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=True, null=True, on_delete="CASCADE"
),
),
(
"owner_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=True, null=True, on_delete="SET_NULL"
),
),
("until", models.DateTimeField(db_index=True, null=True)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"alert_rule",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.alertrule",
),
),
(
"rule",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True, on_delete=django.db.models.deletion.CASCADE, to="sentry.rule"
),
),
],
options={
"db_table": "sentry_rulesnooze",
},
),
migrations.CreateModel(
name="SavedSearch",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("type", models.PositiveSmallIntegerField(default=0)),
("name", models.CharField(max_length=128)),
("query", models.TextField()),
(
"sort",
sentry.db.models.fields.text.CharField(
default=sentry.models.savedsearch.SortOptions["DATE"],
max_length=16,
null=True,
),
),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
("is_global", models.BooleanField(db_index=True, default=False, null=True)),
(
"owner_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=True, null=True, on_delete="CASCADE"
),
),
("visibility", models.CharField(default="owner", max_length=16)),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.organization",
),
),
],
options={
"db_table": "sentry_savedsearch",
},
),
migrations.CreateModel(
name="ScheduledDeletion",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"guid",
models.CharField(
default=sentry.deletions.models.scheduleddeletion.default_guid,
max_length=32,
unique=True,
),
),
("app_label", models.CharField(max_length=64)),
("model_name", models.CharField(max_length=64)),
("object_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"date_scheduled",
models.DateTimeField(
default=sentry.deletions.models.scheduleddeletion.default_date_schedule
),
),
("actor_id", sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True)),
("data", sentry.db.models.fields.jsonfield.JSONField(default={})),
("in_progress", models.BooleanField(default=False)),
],
options={
"db_table": "sentry_scheduleddeletion",
"unique_together": {("app_label", "model_name", "object_id")},
},
),
migrations.CreateModel(
name="SentryApp",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"scopes",
bitfield.models.BitField(
[
"project:read",
"project:write",
"project:admin",
"project:releases",
"team:read",
"team:write",
"team:admin",
"event:read",
"event:write",
"event:admin",
"org:read",
"org:write",
"org:admin",
"member:read",
"member:write",
"member:admin",
"org:integrations",
"alerts:read",
"alerts:write",
"member:invite",
],
default=None,
),
),
(
"scope_list",
django.contrib.postgres.fields.ArrayField(
base_field=models.TextField(), default=list, size=None
),
),
("date_deleted", models.DateTimeField(blank=True, null=True)),
(
"owner_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.Organization", db_index=True, on_delete="CASCADE"
),
),
("name", models.TextField()),
(
"slug",
sentry.db.models.fields.slug.SentrySlugField(
db_index=False, max_length=64, unique=True
),
),
("author", models.TextField(null=True)),
(
"status",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
db_index=True, default=0
),
),
(
"uuid",
models.CharField(
default=sentry.sentry_apps.models.sentry_app.default_uuid,
max_length=64,
unique=True,
),
),
("redirect_url", models.URLField(null=True)),
("webhook_url", models.URLField(max_length=512, null=True)),
("is_alertable", models.BooleanField(default=False)),
("verify_install", models.BooleanField(default=True)),
("events", sentry.db.models.fields.array.ArrayField(null=True)),
("overview", models.TextField(null=True)),
("schema", sentry.db.models.fields.jsonfield.JSONField(default=dict)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
("date_published", models.DateTimeField(blank=True, null=True)),
("creator_label", models.TextField(null=True)),
("popularity", models.PositiveSmallIntegerField(default=1, null=True)),
("metadata", sentry.db.models.fields.jsonfield.JSONField(default=dict)),
(
"application",
models.OneToOneField(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="sentry_app",
to="sentry.apiapplication",
),
),
(
"creator_user",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to=settings.AUTH_USER_MODEL,
),
),
(
"proxy_user",
models.OneToOneField(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="sentry_app",
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"db_table": "sentry_sentryapp",
},
),
migrations.CreateModel(
name="SentryAppAvatar",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("ident", models.CharField(db_index=True, max_length=32, unique=True)),
(
"control_file_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True, unique=True),
),
("avatar_type", models.PositiveSmallIntegerField(default=0)),
("color", models.BooleanField(default=False)),
(
"sentry_app",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="avatar",
to="sentry.sentryapp",
),
),
],
options={
"db_table": "sentry_sentryappavatar",
},
),
migrations.CreateModel(
name="SentryAppComponent",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"uuid",
sentry.db.models.fields.uuid.UUIDField(
editable=False, max_length=32, unique=True
),
),
("type", models.CharField(max_length=64)),
("schema", sentry.db.models.fields.jsonfield.JSONField(default=dict)),
(
"sentry_app",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="components",
to="sentry.sentryapp",
),
),
],
options={
"db_table": "sentry_sentryappcomponent",
},
),
migrations.CreateModel(
name="SentryAppInstallation",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_deleted", models.DateTimeField(blank=True, null=True)),
(
"organization_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.Organization", db_index=True, on_delete="CASCADE"
),
),
(
"uuid",
models.CharField(
default=sentry.sentry_apps.models.sentry_app_installation.default_uuid,
max_length=64,
),
),
(
"status",
sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
db_index=True, default=0
),
),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
(
"api_grant",
models.OneToOneField(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="sentry_app_installation",
to="sentry.apigrant",
),
),
(
"api_token",
models.OneToOneField(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="sentry_app_installation",
to="sentry.apitoken",
),
),
(
"sentry_app",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="installations",
to="sentry.sentryapp",
),
),
],
options={
"db_table": "sentry_sentryappinstallation",
},
),
migrations.CreateModel(
name="SentryAppInstallationForProvider",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
(
"organization_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.Organization", db_index=True, on_delete="CASCADE"
),
),
("provider", models.CharField(max_length=64)),
(
"sentry_app_installation",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="sentry.sentryappinstallation",
),
),
],
options={
"db_table": "sentry_sentryappinstallationforprovider",
},
),
migrations.CreateModel(
name="SentryAppInstallationToken",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"api_token",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.apitoken"
),
),
(
"sentry_app_installation",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="sentry.sentryappinstallation",
),
),
],
options={
"db_table": "sentry_sentryappinstallationtoken",
},
),
migrations.CreateModel(
name="ServiceHookProject",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"project_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True),
),
(
"service_hook",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.servicehook"
),
),
],
options={
"db_table": "sentry_servicehookproject",
},
),
migrations.CreateModel(
name="SnubaQuery",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("type", models.SmallIntegerField()),
("dataset", models.TextField()),
("query", models.TextField()),
("aggregate", models.TextField()),
("time_window", models.IntegerField()),
("resolution", models.IntegerField()),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"environment",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.environment",
),
),
],
options={
"db_table": "sentry_snubaquery",
},
),
migrations.AddField(
model_name="querysubscription",
name="snuba_query",
field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="subscriptions",
to="sentry.snubaquery",
),
),
migrations.AddField(
model_name="alertrule",
name="snuba_query",
field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.snubaquery", unique=True
),
),
migrations.CreateModel(
name="SnubaQueryEventType",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("type", models.SmallIntegerField()),
(
"snuba_query",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.snubaquery"
),
),
],
options={
"db_table": "sentry_snubaqueryeventtype",
},
),
migrations.CreateModel(
name="StringIndexer",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("string", models.CharField(max_length=200)),
("organization_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"last_seen",
models.DateTimeField(db_index=True, default=django.utils.timezone.now),
),
("retention_days", models.IntegerField(default=90)),
],
options={
"db_table": "sentry_stringindexer",
"constraints": [
models.UniqueConstraint(
fields=("string", "organization_id"), name="unique_org_string"
)
],
},
),
migrations.CreateModel(
name="Team",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("slug", sentry.db.models.fields.slug.SentrySlugField()),
("name", models.CharField(max_length=64)),
("status", sentry.db.models.fields.bounded.BoundedPositiveIntegerField(default=0)),
("idp_provisioned", models.BooleanField(db_default=False, default=False)),
("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.organization"
),
),
],
options={
"db_table": "sentry_team",
},
),
migrations.AddField(
model_name="rule",
name="owner_team",
field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True, on_delete=django.db.models.deletion.SET_NULL, to="sentry.team"
),
),
migrations.CreateModel(
name="ProjectTeam",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.project"
),
),
(
"team",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.team"
),
),
],
options={
"db_table": "sentry_projectteam",
},
),
migrations.AddField(
model_name="project",
name="teams",
field=models.ManyToManyField(
related_name="teams", through="sentry.ProjectTeam", to="sentry.team"
),
),
migrations.CreateModel(
name="OrganizationMemberTeam",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedAutoField(
primary_key=True, serialize=False
),
),
("is_active", models.BooleanField(default=True)),
("role", models.CharField(blank=True, max_length=32, null=True)),
(
"organizationmember",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.organizationmember"
),
),
(
"team",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.team"
),
),
],
options={
"db_table": "sentry_organizationmember_teams",
},
),
migrations.AddField(
model_name="organizationmember",
name="teams",
field=models.ManyToManyField(
blank=True, through="sentry.OrganizationMemberTeam", to="sentry.team"
),
),
migrations.CreateModel(
name="OrganizationAccessRequest",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"requester_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=True, null=True, on_delete="CASCADE"
),
),
(
"member",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.organizationmember"
),
),
(
"team",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.team"
),
),
],
options={
"db_table": "sentry_organizationaccessrequest",
},
),
migrations.CreateModel(
name="GroupSubscription",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"user_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=True, null=True, on_delete="CASCADE"
),
),
("is_active", models.BooleanField(default=True)),
("reason", sentry.db.models.fields.bounded.BoundedPositiveIntegerField(default=0)),
("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
(
"group",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="subscription_set",
to="sentry.group",
),
),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="subscription_set",
to="sentry.project",
),
),
(
"team",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True, on_delete=django.db.models.deletion.CASCADE, to="sentry.team"
),
),
],
options={
"db_table": "sentry_groupsubscription",
},
),
migrations.CreateModel(
name="GroupOwner",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("type", models.PositiveSmallIntegerField()),
("context", sentry.db.models.fields.jsonfield.JSONField(null=True)),
(
"user_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=True, null=True, on_delete="CASCADE"
),
),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"group",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.group",
),
),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.organization",
),
),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.project",
),
),
(
"team",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True, on_delete=django.db.models.deletion.CASCADE, to="sentry.team"
),
),
],
options={
"db_table": "sentry_groupowner",
},
),
migrations.CreateModel(
name="GroupHistory",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"user_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=True, null=True, on_delete="SET_NULL"
),
),
("status", sentry.db.models.fields.bounded.BoundedPositiveIntegerField(default=0)),
("prev_history_date", models.DateTimeField(null=True)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"group",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.group",
),
),
(
"prev_history",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.grouphistory",
),
),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.organization",
),
),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.project",
),
),
(
"release",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.release",
),
),
(
"team",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True, on_delete=django.db.models.deletion.SET_NULL, to="sentry.team"
),
),
],
options={
"db_table": "sentry_grouphistory",
},
),
migrations.CreateModel(
name="GroupAssignee",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"user_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=True, null=True, on_delete="CASCADE"
),
),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"group",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="assignee_set",
to="sentry.group",
unique=True,
),
),
(
"project",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="assignee_set",
to="sentry.project",
),
),
(
"team",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="sentry_assignee_set",
to="sentry.team",
),
),
],
options={
"db_table": "sentry_groupasignee",
},
),
migrations.CreateModel(
name="ExternalActor",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
(
"user_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.User", db_index=True, null=True, on_delete="CASCADE"
),
),
(
"integration_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.Integration", db_index=True, on_delete="CASCADE"
),
),
("provider", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
("external_name", models.TextField()),
("external_id", models.TextField(null=True)),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.organization"
),
),
(
"team",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True, on_delete=django.db.models.deletion.CASCADE, to="sentry.team"
),
),
],
options={
"db_table": "sentry_externalactor",
},
),
migrations.CreateModel(
name="DashboardPermissionsTeam",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"permissions",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="sentry.dashboardpermissions",
),
),
(
"team",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.team"
),
),
],
options={
"db_table": "sentry_dashboardpermissionsteam",
},
),
migrations.AddField(
model_name="dashboardpermissions",
name="teams_with_edit_access",
field=models.ManyToManyField(
blank=True, through="sentry.DashboardPermissionsTeam", to="sentry.team"
),
),
migrations.AddField(
model_name="alertrule",
name="team",
field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True, on_delete=django.db.models.deletion.SET_NULL, to="sentry.team"
),
),
migrations.CreateModel(
name="TeamKeyTransaction",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("transaction", models.CharField(max_length=200)),
(
"organization",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.organization"
),
),
(
"project_team",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.projectteam",
),
),
],
options={
"db_table": "sentry_performanceteamkeytransaction",
},
),
migrations.CreateModel(
name="TeamReplica",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"team_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.Team", db_index=True, on_delete="CASCADE"
),
),
(
"organization_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.Organization", db_index=True, on_delete="CASCADE"
),
),
("slug", models.SlugField()),
("name", models.CharField(max_length=64)),
("status", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
("org_role", models.CharField(max_length=32, null=True)),
],
options={
"db_table": "sentry_teamreplica",
"unique_together": {("organization_id", "slug")},
},
),
migrations.CreateModel(
name="UserAvatar",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("ident", models.CharField(db_index=True, max_length=32, unique=True)),
(
"control_file_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True, unique=True),
),
("avatar_type", models.PositiveSmallIntegerField(default=0)),
(
"user",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="avatar",
to=settings.AUTH_USER_MODEL,
unique=True,
),
),
],
options={
"db_table": "sentry_useravatar",
},
),
migrations.CreateModel(
name="UserEmail",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("email", models.EmailField(max_length=75)),
(
"validation_hash",
models.CharField(
default=sentry.utils.security.hash.get_secure_token, max_length=32
),
),
("date_hash_added", models.DateTimeField(default=django.utils.timezone.now)),
("is_verified", models.BooleanField(default=False)),
(
"user",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="emails",
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"db_table": "sentry_useremail",
},
),
migrations.CreateModel(
name="UserIP",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("ip_address", models.GenericIPAddressField()),
("country_code", models.CharField(max_length=16, null=True)),
("region_code", models.CharField(max_length=16, null=True)),
("first_seen", models.DateTimeField(default=django.utils.timezone.now)),
("last_seen", models.DateTimeField(default=django.utils.timezone.now)),
(
"user",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
options={
"db_table": "sentry_userip",
},
),
migrations.CreateModel(
name="UserOption",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"project_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.Project", db_index=True, null=True, on_delete="CASCADE"
),
),
(
"organization_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.Organization", db_index=True, null=True, on_delete="CASCADE"
),
),
("key", models.CharField(max_length=64)),
("value", sentry.db.models.fields.picklefield.PickledObjectField(editable=False)),
(
"user",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
options={
"db_table": "sentry_useroption",
},
),
migrations.CreateModel(
name="UserPermission",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("permission", models.CharField(max_length=32)),
(
"user",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
options={
"db_table": "sentry_userpermission",
},
bases=(sentry.backup.mixins.OverwritableConfigMixin, models.Model),
),
migrations.CreateModel(
name="UserReport",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"project_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True),
),
(
"group_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(
db_index=True, null=True
),
),
("event_id", models.CharField(max_length=32)),
(
"environment_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(
db_index=True, null=True
),
),
("name", models.CharField(max_length=128)),
("email", models.EmailField(max_length=75)),
("comments", models.TextField(max_length=4096)),
(
"date_added",
models.DateTimeField(db_index=True, default=django.utils.timezone.now),
),
],
options={
"db_table": "sentry_userreport",
"indexes": [
models.Index(
fields=["project_id", "event_id"], name="sentry_user_project_cbfd59_idx"
),
models.Index(
fields=["project_id", "date_added"], name="sentry_user_project_b8faaf_idx"
),
],
"unique_together": {("project_id", "event_id")},
},
),
migrations.CreateModel(
name="UserRoleUser",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
(
"role",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.userrole"
),
),
(
"user",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
options={
"db_table": "sentry_userrole_users",
},
),
migrations.AddField(
model_name="userrole",
name="users",
field=models.ManyToManyField(
through="sentry.UserRoleUser", to=settings.AUTH_USER_MODEL
),
),
migrations.CreateModel(
name="ApiAuthorization",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"scopes",
bitfield.models.BitField(
[
"project:read",
"project:write",
"project:admin",
"project:releases",
"team:read",
"team:write",
"team:admin",
"event:read",
"event:write",
"event:admin",
"org:read",
"org:write",
"org:admin",
"member:read",
"member:write",
"member:admin",
"org:integrations",
"alerts:read",
"alerts:write",
"member:invite",
],
default=None,
),
),
(
"scope_list",
django.contrib.postgres.fields.ArrayField(
base_field=models.TextField(), default=list, size=None
),
),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"organization_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.Organization", db_index=True, null=True, on_delete="CASCADE"
),
),
(
"application",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.apiapplication",
),
),
(
"user",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
options={
"db_table": "sentry_apiauthorization",
"constraints": [
models.UniqueConstraint(
condition=models.Q(("organization_id__isnull", True)),
fields=("user", "application"),
name="apiauthorization_user_app",
),
models.UniqueConstraint(
condition=models.Q(("organization_id__isnull", False)),
fields=("user", "application", "organization_id"),
name="apiauthorization_user_app_org",
),
],
},
),
migrations.CreateModel(
name="ArtifactBundleIndex",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"organization_id",
sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True),
),
("url", models.TextField()),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"artifact_bundle",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.artifactbundle"
),
),
],
options={
"db_table": "sentry_artifactbundleindex",
"indexes": [
models.Index(
fields=["url", "artifact_bundle"], name="sentry_arti_url_7e628a_idx"
)
],
},
),
migrations.CreateModel(
name="AssistantActivity",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("guide_id", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
("viewed_ts", models.DateTimeField(null=True)),
("dismissed_ts", models.DateTimeField(null=True)),
("useful", models.BooleanField(null=True)),
(
"user",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
options={
"db_table": "sentry_assistant_activity",
"unique_together": {("user", "guide_id")},
},
),
migrations.CreateModel(
name="AuditLogEntry",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
(
"organization_id",
sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
"sentry.Organization", db_index=True, on_delete="CASCADE"
),
),
("actor_label", models.CharField(blank=True, max_length=64, null=True)),
(
"target_object",
sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True),
),
("event", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
("ip_address", models.GenericIPAddressField(null=True, unpack_ipv4=True)),
("data", sentry.db.models.fields.gzippeddict.GzippedDictField()),
("datetime", models.DateTimeField(default=django.utils.timezone.now)),
(
"actor",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="audit_actors",
to=settings.AUTH_USER_MODEL,
),
),
(
"actor_key",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="sentry.apikey",
),
),
(
"target_user",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="audit_targets",
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"db_table": "sentry_auditlogentry",
"indexes": [
models.Index(
fields=["organization_id", "datetime"],
name="sentry_audi_organiz_c8bd18_idx",
),
models.Index(
fields=["organization_id", "event", "datetime"],
name="sentry_audi_organiz_588b1e_idx",
),
],
},
),
migrations.CreateModel(
name="Authenticator",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedAutoField(
primary_key=True, serialize=False
),
),
("created_at", models.DateTimeField(default=django.utils.timezone.now)),
("last_used_at", models.DateTimeField(null=True)),
("type", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
("config", sentry.users.models.authenticator.AuthenticatorConfig(editable=False)),
(
"user",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
options={
"verbose_name": "authenticator",
"verbose_name_plural": "authenticators",
"db_table": "auth_authenticator",
"unique_together": {("user", "type")},
},
),
migrations.CreateModel(
name="AuthIdentity",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("ident", models.CharField(max_length=128)),
("data", sentry.db.models.fields.jsonfield.JSONField(default=dict)),
("last_verified", models.DateTimeField(default=django.utils.timezone.now)),
("last_synced", models.DateTimeField(default=django.utils.timezone.now)),
("date_added", models.DateTimeField(default=django.utils.timezone.now)),
(
"user",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
(
"auth_provider",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.authprovider"
),
),
],
options={
"db_table": "sentry_authidentity",
"indexes": [
models.Index(fields=["last_synced"], name="auth_identity_last_synced_idx")
],
"unique_together": {("auth_provider", "ident"), ("auth_provider", "user")},
},
),
migrations.CreateModel(
name="BroadcastSeen",
fields=[
(
"id",
sentry.db.models.fields.bounded.BoundedBigAutoField(
primary_key=True, serialize=False
),
),
("date_seen", models.DateTimeField(default=django.utils.timezone.now)),
(
"broadcast",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="sentry.broadcast"
),
),
(
"user",
sentry.db.models.fields.foreignkey.FlexibleForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
options={
"db_table": "sentry_broadcastseen",
"unique_together": {("broadcast", "user")},
},
),
migrations.AddIndex(
model_name="commit",
index=models.Index(
fields=["repository_id", "date_added"], name="sentry_comm_reposit_da31f2_idx"
),
),
migrations.AddIndex(
model_name="commit",
index=models.Index(
fields=["author", "date_added"], name="sentry_comm_author__131211_idx"
),
),
migrations.AddIndex(
model_name="commit",
index=models.Index(
fields=["organization_id", "date_added"], name="sentry_comm_organiz_7be514_idx"
),
),
migrations.AlterUniqueTogether(
name="commit",
unique_together={("repository_id", "key")},
),
migrations.AlterUniqueTogether(
name="commitfilechange",
unique_together={("commit", "filename")},
),
migrations.AlterUniqueTogether(
name="controlfileblobindex",
unique_together={("file", "blob", "offset")},
),
migrations.AlterUniqueTogether(
name="controlfileblobowner",
unique_together={("blob", "organization_id")},
),
migrations.AlterUniqueTogether(
name="dashboardfavoriteuser",
unique_together={("user_id", "dashboard")},
),
migrations.AlterUniqueTogether(
name="dashboardwidget",
unique_together={("dashboard", "order")},
),
migrations.AlterUniqueTogether(
name="dashboardwidgetquery",
unique_together={("widget", "order")},
),
migrations.AddIndex(
model_name="debugidartifactbundle",
index=models.Index(
fields=["debug_id", "artifact_bundle"], name="sentry_debu_debug_i_8c6c44_idx"
),
),
migrations.AlterUniqueTogether(
name="exporteddatablob",
unique_together={("data_export", "blob_id", "offset")},
),
migrations.AlterUniqueTogether(
name="fileblobindex",
unique_together={("file", "blob", "offset")},
),
migrations.AlterUniqueTogether(
name="fileblobowner",
unique_together={("blob", "organization_id")},
),
migrations.AlterUniqueTogether(
name="groupmeta",
unique_together={("group", "key")},
),
migrations.AlterUniqueTogether(
name="identity",
unique_together={("idp", "external_id"), ("idp", "user")},
),
migrations.AddIndex(
model_name="incidenttrigger",
index=models.Index(
fields=["alert_rule_trigger", "incident_id"], name="sentry_inci_alert_r_33da01_idx"
),
),
migrations.AlterUniqueTogether(
name="incidenttrigger",
unique_together={("incident", "alert_rule_trigger")},
),
migrations.AlterUniqueTogether(
name="alertruletrigger",
unique_together={("alert_rule", "label")},
),
migrations.AddConstraint(
model_name="notificationsettingoption",
constraint=models.CheckConstraint(
condition=models.Q(
models.Q(("team_id__isnull", False), ("user_id__isnull", True)),
models.Q(("team_id__isnull", True), ("user_id__isnull", False)),
_connector="OR",
),
name="notification_setting_option_team_or_user_check",
),
),
migrations.AlterUniqueTogether(
name="notificationsettingoption",
unique_together={("scope_type", "scope_identifier", "user_id", "team_id", "type")},
),
migrations.AddConstraint(
model_name="notificationsettingprovider",
constraint=models.CheckConstraint(
condition=models.Q(
models.Q(("team_id__isnull", False), ("user_id__isnull", True)),
models.Q(("team_id__isnull", True), ("user_id__isnull", False)),
_connector="OR",
),
name="notification_setting_provider_team_or_user_check",
),
),
migrations.AlterUniqueTogether(
name="notificationsettingprovider",
unique_together={
("scope_type", "scope_identifier", "user_id", "team_id", "provider", "type")
},
),
migrations.AddConstraint(
model_name="groupsearchviewstarred",
constraint=models.UniqueConstraint(
deferrable=django.db.models.constraints.Deferrable["DEFERRED"],
fields=("user_id", "organization_id", "position"),
name="sentry_groupsearchviewstarred_unique_view_position_per_org_user",
),
),
migrations.AddConstraint(
model_name="groupsearchviewlastvisited",
constraint=models.UniqueConstraint(
fields=("user_id", "organization_id", "group_search_view_id"),
name="sentry_groupsearchviewlastvisited_unique_last_visited_per_org_user_view",
),
),
migrations.AlterUniqueTogether(
name="featureadoption",
unique_together={("organization", "feature_id")},
),
migrations.AlterUniqueTogether(
name="externalissue",
unique_together={("organization", "integration_id", "key")},
),
migrations.AlterUniqueTogether(
name="dashboardtombstone",
unique_together={("organization", "slug")},
),
migrations.AlterUniqueTogether(
name="organizationintegration",
unique_together={("organization_id", "integration")},
),
migrations.AlterUniqueTogether(
name="organizationmemberinvite",
unique_together={("organization", "email")},
),
migrations.AddIndex(
model_name="organizationmembermapping",
index=models.Index(
fields=["organization_id", "user"], name="sentry_orga_organiz_ae9fe7_idx"
),
),
migrations.AddIndex(
model_name="organizationmembermapping",
index=models.Index(
fields=["organization_id", "email"], name="sentry_orga_organiz_7de26b_idx"
),
),
migrations.AlterUniqueTogether(
name="organizationmembermapping",
unique_together={("organization_id", "organizationmember_id")},
),
migrations.AlterUniqueTogether(
name="organizationoption",
unique_together={("organization", "key")},
),
migrations.AlterUniqueTogether(
name="platformexternalissue",
unique_together={("group", "service_type")},
),
migrations.AlterUniqueTogether(
name="organizationonboardingtask",
unique_together={("organization", "task")},
),
migrations.AlterUniqueTogether(
name="incidentproject",
unique_together={("project", "incident")},
),
migrations.AlterUniqueTogether(
name="groupseen",
unique_together={("user_id", "group")},
),
migrations.AlterUniqueTogether(
name="groupsearchviewproject",
unique_together={("group_search_view", "project")},
),
migrations.AddIndex(
model_name="groupopenperiod",
index=models.Index(
fields=["group", "date_started"], name="sentry_grou_group_i_4bffd0_idx"
),
),
# would be nice but it doesn't support hints :(
# django.contrib.postgres.operations.BtreeGistExtension(),
SafeRunSQL(
sql="CREATE EXTENSION IF NOT EXISTS btree_gist;",
reverse_sql="",
hints={"tables": ["sentry_groupopenperiod"]},
),
migrations.AddConstraint(
model_name="groupopenperiod",
constraint=django.contrib.postgres.constraints.ExclusionConstraint(
expressions=[
(models.F("group"), "="),
(
sentry.models.groupopenperiod.TsTzRange(
"date_started",
"date_ended",
django.contrib.postgres.fields.ranges.RangeBoundary(),
),
"&&",
),
],
name="exclude_overlapping_start_end",
),
),
migrations.AddIndex(
model_name="grouplink",
index=models.Index(
fields=["project", "linked_id", "linked_type", "group"],
name="sentry_grou_project_dd3e95_idx",
),
),
migrations.AlterUniqueTogether(
name="grouplink",
unique_together={("group", "linked_type", "linked_id")},
),
migrations.AddIndex(
model_name="groupinbox",
index=models.Index(
fields=["project", "date_added"], name="sentry_grou_project_a9fe16_idx"
),
),
migrations.AlterUniqueTogether(
name="grouphash",
unique_together={("project", "hash")},
),
migrations.AddIndex(
model_name="groupemailthread",
index=models.Index(
fields=["date", "project", "id"], name="sentry_grou_date_d4eb5a_idx"
),
),
migrations.AlterUniqueTogether(
name="groupemailthread",
unique_together={("email", "group"), ("email", "msgid")},
),
migrations.AlterUniqueTogether(
name="groupbookmark",
unique_together={("project", "user_id", "group")},
),
migrations.AlterUniqueTogether(
name="environmentproject",
unique_together={("project", "environment")},
),
migrations.AlterUniqueTogether(
name="environment",
unique_together={("organization_id", "name")},
),
migrations.AlterUniqueTogether(
name="discoversavedqueryproject",
unique_together={("project", "discover_saved_query")},
),
migrations.AddConstraint(
model_name="discoversavedquery",
constraint=models.UniqueConstraint(
condition=models.Q(("is_homepage", True)),
fields=("organization", "created_by_id", "is_homepage"),
name="unique_user_homepage_query",
),
),
migrations.AlterUniqueTogether(
name="dashboardproject",
unique_together={("project", "dashboard")},
),
migrations.AlterUniqueTogether(
name="dashboard",
unique_together={("organization", "title")},
),
migrations.AlterUniqueTogether(
name="customdynamicsamplingruleproject",
unique_together={("custom_dynamic_sampling_rule", "project")},
),
migrations.AddIndex(
model_name="customdynamicsamplingrule",
index=models.Index(
condition=models.Q(("is_active", True)), fields=["organization"], name="org_idx"
),
),
migrations.AddIndex(
model_name="customdynamicsamplingrule",
index=models.Index(
condition=models.Q(("is_active", True)), fields=["end_date"], name="end_date_idx"
),
),
migrations.AddIndex(
model_name="customdynamicsamplingrule",
index=models.Index(
condition=models.Q(("is_active", True)),
fields=["condition_hash"],
name="condition_hash_idx",
),
),
migrations.AlterUniqueTogether(
name="alertruleprojects",
unique_together={("alert_rule", "project")},
),
migrations.AddIndex(
model_name="activity",
index=models.Index(
fields=["project", "datetime"], name="sentry_acti_project_cd8457_idx"
),
),
migrations.AddIndex(
model_name="projectartifactbundle",
index=models.Index(
fields=["project_id", "artifact_bundle"], name="sentry_proj_project_f73d36_idx"
),
),
migrations.AlterUniqueTogether(
name="projectbookmark",
unique_together={("project", "user_id")},
),
migrations.AddIndex(
model_name="projectdebugfile",
index=models.Index(
fields=["project_id", "debug_id"], name="sentry_proj_project_c586ac_idx"
),
),
migrations.AddIndex(
model_name="projectdebugfile",
index=models.Index(
fields=["project_id", "code_id"], name="sentry_proj_project_9b5950_idx"
),
),
migrations.AlterUniqueTogether(
name="proguardartifactrelease",
unique_together={("project_id", "release_name", "proguard_uuid")},
),
migrations.AddIndex(
model_name="projectoption",
index=models.Index(fields=["key"], name="sentry_proj_key_2426c1_idx"),
),
migrations.AlterUniqueTogether(
name="projectoption",
unique_together={("project", "key")},
),
migrations.AlterUniqueTogether(
name="projectredirect",
unique_together={("organization", "redirect_slug")},
),
migrations.AlterUniqueTogether(
name="projectsdk",
unique_together={("project", "event_type", "sdk_name")},
),
migrations.AddConstraint(
model_name="projecttemplate",
constraint=models.UniqueConstraint(
fields=("name", "organization"), name="unique_projecttemplate_name_per_org"
),
),
migrations.AlterUniqueTogether(
name="projecttemplateoption",
unique_together={("project_template", "key")},
),
migrations.AlterUniqueTogether(
name="projecttransactionthresholdoverride",
unique_together={("project", "transaction")},
),
migrations.AddIndex(
model_name="pullrequest",
index=models.Index(
fields=["repository_id", "date_added"], name="sentry_pull_reposit_c429a4_idx"
),
),
migrations.AddIndex(
model_name="pullrequest",
index=models.Index(
fields=["organization_id", "merge_commit_sha"],
name="sentry_pull_organiz_8aabcf_idx",
),
),
migrations.AlterUniqueTogether(
name="pullrequest",
unique_together={("repository_id", "key")},
),
migrations.AlterUniqueTogether(
name="pullrequestcomment",
unique_together={("pull_request", "comment_type")},
),
migrations.AlterUniqueTogether(
name="pullrequestcommit",
unique_together={("pull_request", "commit")},
),
migrations.AddIndex(
model_name="incident",
index=models.Index(
fields=["alert_rule", "type", "status"], name="sentry_inci_alert_r_24a457_idx"
),
),
migrations.AlterUniqueTogether(
name="incident",
unique_together={("organization", "identifier")},
),
migrations.AlterUniqueTogether(
name="recentsearch",
unique_together={("user_id", "organization", "type", "query_hash")},
),
migrations.AddIndex(
model_name="groupenvironment",
index=models.Index(
fields=["environment", "first_release", "first_seen"],
name="sentry_grou_environ_443bdb_idx",
),
),
migrations.AlterUniqueTogether(
name="groupenvironment",
unique_together={("group", "environment")},
),
migrations.AddIndex(
model_name="group",
index=models.Index(
fields=["project", "first_release"], name="sentry_grou_project_4662d9_idx"
),
),
migrations.AddIndex(
model_name="group",
index=models.Index(fields=["project", "id"], name="sentry_grou_project_41a5ce_idx"),
),
migrations.AddIndex(
model_name="group",
index=models.Index(
fields=["project", "status", "last_seen", "id"],
name="sentry_grou_project_81a5ed_idx",
),
),
migrations.AddIndex(
model_name="group",
index=models.Index(
fields=["project", "status", "type", "last_seen", "id"],
name="sentry_grou_project_17d28d_idx",
),
),
migrations.AddIndex(
model_name="group",
index=models.Index(
fields=["project", "status", "substatus", "last_seen", "id"],
name="sentry_grou_project_5eb75b_idx",
),
),
migrations.AddIndex(
model_name="group",
index=models.Index(
fields=["project", "status", "substatus", "type", "last_seen", "id"],
name="sentry_grou_project_ff3fdf_idx",
),
),
migrations.AddIndex(
model_name="group",
index=models.Index(
fields=["project", "status", "substatus", "id"],
name="sentry_grou_project_5acaf7_idx",
),
),
migrations.AddIndex(
model_name="group",
index=models.Index(
fields=["status", "substatus", "id"], name="sentry_grou_status_48b516_idx"
),
),
migrations.AddIndex(
model_name="group",
index=models.Index(
fields=["status", "substatus", "first_seen"], name="sentry_grou_status_e07f40_idx"
),
),
migrations.AddIndex(
model_name="group",
index=models.Index(
fields=["project", "status", "priority", "last_seen", "id"],
name="sentry_grou_project_4ac709_idx",
),
),
migrations.AlterUniqueTogether(
name="group",
unique_together={("project", "short_id")},
),
migrations.AlterUniqueTogether(
name="distribution",
unique_together={("release", "name")},
),
migrations.AddIndex(
model_name="releaseartifactbundle",
index=models.Index(
fields=["organization_id", "release_name", "dist_name", "artifact_bundle"],
name="sentry_rele_organiz_291018_idx",
),
),
migrations.AlterUniqueTogether(
name="releasecommit",
unique_together={("release", "commit"), ("release", "order")},
),
migrations.AlterUniqueTogether(
name="releaseenvironment",
unique_together={("organization", "release", "environment")},
),
migrations.AddIndex(
model_name="releasefile",
index=models.Index(
fields=["release_id", "name"], name="sentry_rele_release_bff97c_idx"
),
),
migrations.AlterUniqueTogether(
name="releasefile",
unique_together={("release_id", "ident")},
),
migrations.AlterUniqueTogether(
name="releaseheadcommit",
unique_together={("repository_id", "release")},
),
migrations.AddIndex(
model_name="releaseproject",
index=models.Index(
fields=["project", "adopted"], name="sentry_rele_project_a80825_idx"
),
),
migrations.AddIndex(
model_name="releaseproject",
index=models.Index(
fields=["project", "unadopted"], name="sentry_rele_project_2ca122_idx"
),
),
migrations.AddIndex(
model_name="releaseproject",
index=models.Index(
fields=["project", "first_seen_transaction"], name="sentry_rele_project_3143eb_idx"
),
),
migrations.AlterUniqueTogether(
name="releaseproject",
unique_together={("project", "release")},
),
migrations.AddIndex(
model_name="release",
index=models.Index(
fields=["organization", "version"],
name="sentry_release_version_btree",
opclasses=["", "text_pattern_ops"],
),
),
migrations.AddIndex(
model_name="release",
index=sentry.db.models.indexes.IndexWithPostgresNameLimits(
models.F("organization"),
models.F("package"),
models.OrderBy(models.F("major"), descending=True),
models.OrderBy(models.F("minor"), descending=True),
models.OrderBy(models.F("patch"), descending=True),
models.OrderBy(models.F("revision"), descending=True),
models.OrderBy(
models.Case(models.When(prerelease="", then=1), default=0), descending=True
),
models.OrderBy(models.F("prerelease"), descending=True),
name="sentry_release_semver_by_package_idx",
),
),
migrations.AddIndex(
model_name="release",
index=models.Index(
models.F("organization"),
models.OrderBy(models.F("major"), descending=True),
models.OrderBy(models.F("minor"), descending=True),
models.OrderBy(models.F("patch"), descending=True),
models.OrderBy(models.F("revision"), descending=True),
models.OrderBy(
models.Case(models.When(prerelease="", then=1), default=0), descending=True
),
models.OrderBy(models.F("prerelease"), descending=True),
name="sentry_release_semver_idx",
),
),
migrations.AddIndex(
model_name="release",
index=models.Index(
fields=["organization", "build_code"], name="sentry_rele_organiz_ffeeb2_idx"
),
),
migrations.AddIndex(
model_name="release",
index=models.Index(
fields=["organization", "build_number"], name="sentry_rele_organiz_6b035f_idx"
),
),
migrations.AddIndex(
model_name="release",
index=models.Index(
fields=["organization", "date_added"], name="sentry_rele_organiz_4ed947_idx"
),
),
migrations.AddIndex(
model_name="release",
index=models.Index(
fields=["organization", "status"], name="sentry_rele_organiz_6975e7_idx"
),
),
migrations.AlterUniqueTogether(
name="release",
unique_together={("organization", "version")},
),
migrations.AddIndex(
model_name="releaseprojectenvironment",
index=models.Index(
fields=["project", "adopted", "environment"], name="sentry_rele_project_4bea8e_idx"
),
),
migrations.AddIndex(
model_name="releaseprojectenvironment",
index=models.Index(
fields=["project", "unadopted", "environment"],
name="sentry_rele_project_922a6a_idx",
),
),
migrations.AlterUniqueTogether(
name="releaseprojectenvironment",
unique_together={("project", "release", "environment")},
),
migrations.AlterUniqueTogether(
name="relocationfile",
unique_together={("relocation", "file"), ("relocation", "kind")},
),
migrations.AlterUniqueTogether(
name="repositoryprojectpathconfig",
unique_together={("project", "stack_root")},
),
migrations.AddConstraint(
model_name="rollbackorganization",
constraint=models.UniqueConstraint(fields=("organization_id",), name="unique_org"),
),
migrations.AddConstraint(
model_name="rollbackuser",
constraint=models.UniqueConstraint(
fields=("user_id", "organization_id"), name="unique_user_org"
),
),
migrations.AlterUniqueTogether(
name="grouprulestatus",
unique_together={("rule", "group")},
),
migrations.AddIndex(
model_name="rulefirehistory",
index=models.Index(
fields=["rule", "date_added"], name="sentry_rule_rule_id_015b9e_idx"
),
),
migrations.AddConstraint(
model_name="notificationmessage",
constraint=models.UniqueConstraint(
condition=models.Q(
("error_code__isnull", True),
("incident__isnull", False),
("parent_notification_message__isnull", True),
("trigger_action__isnull", False),
),
fields=("incident", "trigger_action"),
name="singular_parent_message_per_incident_and_trigger_action",
),
),
migrations.AddConstraint(
model_name="notificationmessage",
constraint=models.UniqueConstraint(
models.F("rule_fire_history"),
models.F("rule_action_uuid"),
django.db.models.functions.comparison.Coalesce(
"open_period_start",
models.Value(datetime.datetime(1, 1, 1, 0, 0, tzinfo=datetime.UTC)),
),
condition=models.Q(
("error_code__isnull", True), ("parent_notification_message__isnull", True)
),
name="singular_parent_message_per_rule_fire_history_rule_action_open_",
),
),
migrations.AddConstraint(
model_name="rulesnooze",
constraint=models.CheckConstraint(
condition=models.Q(
models.Q(("alert_rule__isnull", True), ("rule__isnull", False)),
models.Q(("alert_rule__isnull", False), ("rule__isnull", True)),
_connector="OR",
),
name="rule_or_alert_rule",
),
),
migrations.AddConstraint(
model_name="rulesnooze",
constraint=models.UniqueConstraint(
condition=models.Q(("user_id__isnull", True)),
fields=("rule",),
name="unique_rule_user",
),
),
migrations.AddConstraint(
model_name="rulesnooze",
constraint=models.UniqueConstraint(
condition=models.Q(("user_id__isnull", True)),
fields=("alert_rule",),
name="unique_alert_rule_user",
),
),
migrations.AlterUniqueTogether(
name="rulesnooze",
unique_together={("user_id", "alert_rule"), ("user_id", "rule")},
),
migrations.AddConstraint(
model_name="savedsearch",
constraint=models.UniqueConstraint(
condition=models.Q(("visibility", "owner_pinned")),
fields=("organization", "owner_id", "type"),
name="sentry_savedsearch_pinning_constraint",
),
),
migrations.AddConstraint(
model_name="savedsearch",
constraint=models.UniqueConstraint(
condition=models.Q(("is_global", True)),
fields=("is_global", "name"),
name="sentry_savedsearch_organization_id_313a24e907cdef99",
),
),
migrations.AlterUniqueTogether(
name="sentryappinstallationforprovider",
unique_together={("provider", "organization_id")},
),
migrations.AlterUniqueTogether(
name="sentryappinstallationtoken",
unique_together={("sentry_app_installation", "api_token")},
),
migrations.AlterUniqueTogether(
name="servicehookproject",
unique_together={("service_hook", "project_id")},
),
migrations.AlterUniqueTogether(
name="snubaqueryeventtype",
unique_together={("snuba_query", "type")},
),
migrations.AlterUniqueTogether(
name="team",
unique_together={("organization", "slug")},
),
migrations.AddIndex(
model_name="rule",
index=models.Index(
fields=["project", "status", "owner_team"], name="sentry_rule_project_cce776_idx"
),
),
migrations.AddIndex(
model_name="rule",
index=models.Index(
fields=["project", "status", "owner_user_id"], name="sentry_rule_project_6e9491_idx"
),
),
migrations.AddConstraint(
model_name="rule",
constraint=models.CheckConstraint(
condition=models.Q(
models.Q(("owner_team__isnull", False), ("owner_user_id__isnull", True)),
models.Q(("owner_team__isnull", True), ("owner_user_id__isnull", False)),
models.Q(("owner_team__isnull", True), ("owner_user_id__isnull", True)),
_connector="OR",
),
name="rule_owner_user_or_team_check",
),
),
migrations.AlterUniqueTogether(
name="projectteam",
unique_together={("project", "team")},
),
migrations.AlterUniqueTogether(
name="project",
unique_together={("organization", "external_id"), ("organization", "slug")},
),
migrations.AlterUniqueTogether(
name="organizationmemberteam",
unique_together={("team", "organizationmember")},
),
migrations.AlterUniqueTogether(
name="organizationmember",
unique_together={("organization", "email"), ("organization", "user_id")},
),
migrations.AlterUniqueTogether(
name="organizationaccessrequest",
unique_together={("team", "member")},
),
migrations.AddConstraint(
model_name="groupsubscription",
constraint=models.CheckConstraint(
condition=models.Q(
models.Q(("team_id__isnull", False), ("user_id__isnull", True)),
models.Q(("team_id__isnull", True), ("user_id__isnull", False)),
_connector="OR",
),
name="subscription_team_or_user_check",
),
),
migrations.AlterUniqueTogether(
name="groupsubscription",
unique_together={("group", "team"), ("group", "user_id")},
),
migrations.AddIndex(
model_name="groupowner",
index=models.Index(
models.F("type"),
django.db.models.functions.comparison.Cast(
django.db.models.fields.json.KeyTextTransform(
"commitId",
django.db.models.functions.comparison.Cast(
models.F("context"), models.JSONField()
),
),
models.BigIntegerField(),
),
name="groupowner_type_json_commitid",
),
),
migrations.AddIndex(
model_name="grouphistory",
index=models.Index(
fields=["project", "status", "release"], name="sentry_grou_project_bbcf30_idx"
),
),
migrations.AddIndex(
model_name="grouphistory",
index=models.Index(fields=["group", "status"], name="sentry_grou_group_i_c61acb_idx"),
),
migrations.AddIndex(
model_name="grouphistory",
index=models.Index(
fields=["project", "date_added"], name="sentry_grou_project_20b3f8_idx"
),
),
migrations.AlterUniqueTogether(
name="groupassignee",
unique_together={("project", "group")},
),
migrations.AddConstraint(
model_name="externalactor",
constraint=models.CheckConstraint(
condition=models.Q(
("user_id__isnull", False), ("team_id__isnull", False), _connector="OR"
),
name="external_actor_team_or_user",
),
),
migrations.AlterUniqueTogether(
name="externalactor",
unique_together={
("organization", "provider", "external_name", "team_id"),
("organization", "provider", "external_name", "user_id"),
},
),
migrations.AlterUniqueTogether(
name="dashboardpermissionsteam",
unique_together={("team", "permissions")},
),
migrations.AlterUniqueTogether(
name="teamkeytransaction",
unique_together={("project_team", "transaction")},
),
migrations.AlterUniqueTogether(
name="useremail",
unique_together={("user", "email")},
),
migrations.AlterUniqueTogether(
name="userip",
unique_together={("user", "ip_address")},
),
migrations.AlterUniqueTogether(
name="useroption",
unique_together={("user", "organization_id", "key"), ("user", "project_id", "key")},
),
migrations.AlterUniqueTogether(
name="userpermission",
unique_together={("user", "permission")},
),
]
| Migration |
python | langchain-ai__langchain | libs/langchain_v1/langchain/agents/middleware/shell_tool.py | {
"start": 10488,
"end": 11353
} | class ____(BaseModel):
"""Input schema for the persistent shell tool."""
command: str | None = None
"""The shell command to execute."""
restart: bool | None = None
"""Whether to restart the shell session."""
runtime: Annotated[Any, SkipJsonSchema()] = None
"""The runtime for the shell tool.
Included as a workaround at the moment bc args_schema doesn't work with
injected ToolRuntime.
"""
@model_validator(mode="after")
def validate_payload(self) -> _ShellToolInput:
if self.command is None and not self.restart:
msg = "Shell tool requires either 'command' or 'restart'."
raise ValueError(msg)
if self.command is not None and self.restart:
msg = "Specify only one of 'command' or 'restart'."
raise ValueError(msg)
return self
| _ShellToolInput |
python | kamyu104__LeetCode-Solutions | Python/minimum-area-rectangle.py | {
"start": 90,
"end": 992
} | class ____(object):
def minAreaRect(self, points):
"""
:type points: List[List[int]]
:rtype: int
"""
nx = len(set(x for x, y in points))
ny = len(set(y for x, y in points))
p = collections.defaultdict(list)
if nx > ny:
for x, y in points:
p[x].append(y)
else:
for x, y in points:
p[y].append(x)
lookup = {}
result = float("inf")
for x in sorted(p):
p[x].sort()
for j in xrange(len(p[x])):
for i in xrange(j):
y1, y2 = p[x][i], p[x][j]
if (y1, y2) in lookup:
result = min(result, (x-lookup[y1, y2]) * (y2-y1))
lookup[y1, y2] = x
return result if result != float("inf") else 0
# Time: O(n^2)
# Space: O(n)
| Solution |
python | facebook__pyre-check | client/language_server/tests/connections_test.py | {
"start": 432,
"end": 736
} | class ____(socketserver.StreamRequestHandler):
def handle(self) -> None:
try:
while True:
data = self.rfile.readline()
self.wfile.write(data)
self.wfile.flush()
except BrokenPipeError:
pass
| EchoServerRequestHandler |
python | huggingface__transformers | src/transformers/models/bridgetower/modeling_bridgetower.py | {
"start": 64569,
"end": 65249
} | class ____(nn.Module):
def __init__(self, config):
super().__init__()
self.dense = nn.Linear(config.hidden_size, config.hidden_size)
if isinstance(config.hidden_act, str):
self.transform_act_fn = ACT2FN[config.hidden_act]
else:
self.transform_act_fn = config.hidden_act
self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps)
def forward(self, hidden_states):
hidden_states = self.dense(hidden_states)
hidden_states = self.transform_act_fn(hidden_states)
hidden_states = self.LayerNorm(hidden_states)
return hidden_states
| BridgeTowerPredictionHeadTransform |
python | tensorflow__tensorflow | tensorflow/compiler/tests/cast_ops_test.py | {
"start": 1226,
"end": 2112
} | class ____(xla_test.XLATestCase):
def testBitcastToLarger(self):
with ops.device('device:{}:0'.format(self.device)):
def f(x):
t = array_ops.bitcast(x, dtypes.float32)
return math_ops.reduce_sum(t, axis=1)
compiled_f = def_function.function(f, jit_compile=True)
x = random_ops.random_normal([10, 10, 2], dtype=dtypes.float16)
with ops.device(self.device):
out = f(x)
compiled_out = compiled_f(x)
self.assertAllClose(out, compiled_out)
# 10,10,2--(bitcast-convert)-->10,10--(reduce)-->10
self.assertEqual(out.shape[0], 10)
hlo = compiled_f.experimental_get_compiler_ir(x)(stage='hlo')
self.assertIn('f32[10,10]{1,0} bitcast-convert(f16[10,10,2]{2,1,0}', hlo)
def testBitcastToSmaller(self):
pass
if __name__ == '__main__':
ops.enable_eager_execution()
test.main()
| CastOpsTest |
python | apache__airflow | providers/google/tests/unit/google/firebase/hooks/test_firestore.py | {
"start": 1683,
"end": 5654
} | class ____:
hook: CloudFirestoreHook | None = None
def setup_method(self):
with mock.patch(
"airflow.providers.google.common.hooks.base_google.GoogleBaseHook.__init__",
new=mock_base_gcp_hook_default_project_id,
):
self.hook = CloudFirestoreHook(gcp_conn_id="test")
@mock.patch("airflow.providers.google.firebase.hooks.firestore.CloudFirestoreHook._authorize")
@mock.patch("airflow.providers.google.firebase.hooks.firestore.build")
@mock.patch("airflow.providers.google.firebase.hooks.firestore.build_from_document")
def test_client_creation(self, mock_build_from_document, mock_build, mock_authorize):
result = self.hook.get_conn()
mock_build.assert_called_once_with("firestore", "v1", cache_discovery=False)
mock_build_from_document.assert_called_once_with(
mock_build.return_value._rootDesc, http=mock_authorize.return_value
)
assert mock_build_from_document.return_value == result
assert self.hook._conn == result
@mock.patch("airflow.providers.google.firebase.hooks.firestore.CloudFirestoreHook.get_conn")
def test_immediately_complete(self, get_conn_mock):
service_mock = get_conn_mock.return_value
mock_export_documents = service_mock.projects.return_value.databases.return_value.exportDocuments
mock_operation_get = (
service_mock.projects.return_value.databases.return_value.operations.return_value.get
)
(mock_export_documents.return_value.execute.return_value) = TEST_OPERATION
(mock_operation_get.return_value.execute.return_value) = TEST_DONE_OPERATION
self.hook.export_documents(body=EXPORT_DOCUMENT_BODY, project_id=TEST_PROJECT_ID)
mock_export_documents.assert_called_once_with(
body=EXPORT_DOCUMENT_BODY, name="projects/firestore--project-id/databases/(default)"
)
@mock.patch("airflow.providers.google.firebase.hooks.firestore.CloudFirestoreHook.get_conn")
@mock.patch("airflow.providers.google.firebase.hooks.firestore.time.sleep")
def test_waiting_operation(self, _, get_conn_mock):
service_mock = get_conn_mock.return_value
mock_export_documents = service_mock.projects.return_value.databases.return_value.exportDocuments
mock_operation_get = (
service_mock.projects.return_value.databases.return_value.operations.return_value.get
)
(mock_export_documents.return_value.execute.return_value) = TEST_OPERATION
execute_mock = mock.Mock(
**{"side_effect": [TEST_WAITING_OPERATION, TEST_DONE_OPERATION, TEST_DONE_OPERATION]}
)
mock_operation_get.return_value.execute = execute_mock
self.hook.export_documents(body=EXPORT_DOCUMENT_BODY, project_id=TEST_PROJECT_ID)
mock_export_documents.assert_called_once_with(
body=EXPORT_DOCUMENT_BODY, name="projects/firestore--project-id/databases/(default)"
)
@mock.patch("airflow.providers.google.firebase.hooks.firestore.CloudFirestoreHook.get_conn")
@mock.patch("airflow.providers.google.firebase.hooks.firestore.time.sleep")
def test_error_operation(self, _, get_conn_mock):
service_mock = get_conn_mock.return_value
mock_export_documents = service_mock.projects.return_value.databases.return_value.exportDocuments
mock_operation_get = (
service_mock.projects.return_value.databases.return_value.operations.return_value.get
)
(mock_export_documents.return_value.execute.return_value) = TEST_OPERATION
execute_mock = mock.Mock(**{"side_effect": [TEST_WAITING_OPERATION, TEST_ERROR_OPERATION]})
mock_operation_get.return_value.execute = execute_mock
with pytest.raises(AirflowException, match="error"):
self.hook.export_documents(body=EXPORT_DOCUMENT_BODY, project_id=TEST_PROJECT_ID)
| TestCloudFirestoreHookWithPassedProjectId |
python | tensorflow__tensorflow | tensorflow/python/keras/optimizer_v1.py | {
"start": 26784,
"end": 29827
} | class ____(Optimizer, trackable.Trackable):
"""Wrapper class for native TensorFlow optimizers."""
def __init__(self, optimizer, iterations=None): # pylint: disable=super-init-not-called
self.optimizer = optimizer
self._track_trackable(optimizer, name='optimizer')
if iterations is None:
with backend.name_scope(self.__class__.__name__):
self.iterations = backend.variable(0, dtype='int64', name='iterations')
else:
self.iterations = iterations
self._track_trackable(self.iterations, name='global_step')
def _clip_gradients(self, grads):
"""Clip gradients according to the clipnorm and clipvalue attributes."""
# TFOptimizer wrapper has no gradient clipping options.
return grads
def minimize(self, loss, var_list, grad_loss=None, tape=None):
"""Mimics the `OptimizerV2.minimize` API."""
if not callable(loss) and tape is None:
raise ValueError('`tape` is required when a `Tensor` loss is passed.')
tape = tape if tape is not None else backprop.GradientTape()
if callable(loss):
with tape:
if not callable(var_list):
tape.watch(var_list)
loss = loss()
if callable(var_list):
var_list = var_list()
var_list = nest.flatten(var_list)
if var_list:
grads = tape.gradient(loss, var_list, grad_loss)
grads_and_vars = list(zip(grads, var_list))
self.apply_gradients(grads_and_vars)
def apply_gradients(self, grads_and_vars):
self.optimizer.apply_gradients(grads_and_vars, global_step=self.iterations)
def get_grads(self, loss, params):
return self.optimizer.compute_gradients(loss, params)
def get_updates(self, loss, params):
if distribute_lib.has_strategy():
self.updates = []
if not params:
# After the model vars have been created, the second call to get_updates
# is called with params as an empty list. This ensures that we call
# compute_gradients with params=None.
grads = self.optimizer.compute_gradients(loss)
else:
grads = self.optimizer.compute_gradients(loss, params)
global_step = training_util.get_global_step()
opt_update = self.optimizer.apply_gradients(grads, global_step)
else:
if not params:
self.updates = [state_ops.assign_add(self.iterations, 1)]
return self.updates
# Updates list starts out empty because the iterations variable is
# incremented in optimizer.apply_gradients()
self.updates = []
grads = self.optimizer.compute_gradients(loss, params)
opt_update = self.optimizer.apply_gradients(
grads, global_step=self.iterations)
self.updates.append(opt_update)
return self.updates
@property
def weights(self):
raise NotImplementedError
def get_config(self):
raise NotImplementedError
def from_config(self, config):
raise NotImplementedError
# Aliases.
sgd = SGD
rmsprop = RMSprop
adagrad = Adagrad
adadelta = Adadelta
adam = Adam
adamax = Adamax
nadam = Nadam
| TFOptimizer |
python | walkccc__LeetCode | solutions/2291. Maximum Profit From Trading Stocks/2291.py | {
"start": 0,
"end": 574
} | class ____:
def maximumProfit(
self,
present: list[int],
future: list[int],
budget: int,
) -> int:
n = len(present)
# dp[i][j] := the maximum profit of buying present[0..i) with j budget
dp = [[0] * (budget + 1) for _ in range(n + 1)]
for i in range(1, n + 1):
profit = future[i - 1] - present[i - 1]
for j in range(budget + 1):
if j < present[i - 1]:
dp[i][j] = dp[i - 1][j]
else:
dp[i][j] = max(dp[i - 1][j], profit + dp[i - 1][j - present[i - 1]])
return dp[n][budget]
| Solution |
python | numba__llvmlite | llvmlite/ir/instructions.py | {
"start": 14274,
"end": 14715
} | class ____(Instruction):
def __init__(self, parent, op, val, typ, name=''):
super(CastInstr, self).__init__(parent, typ, op, [val], name=name)
def descr(self, buf):
buf.append("{0} {1} {2} to {3} {4}\n".format(
self.opname,
self.operands[0].type,
self.operands[0].get_reference(),
self.type,
self._stringify_metadata(leading_comma=True),
))
| CastInstr |
python | django-import-export__django-import-export | import_export/widgets.py | {
"start": 7818,
"end": 10079
} | class ____(Widget):
"""
Widget for converting boolean fields.
The widget assumes that ``True``, ``False``, and ``None`` are all valid
values, as to match Django's `BooleanField
<https://docs.djangoproject.com/en/dev/ref/models/fields/#booleanfield>`_.
That said, whether the database/Django will actually accept NULL values
will depend on if you have set ``null=True`` on that Django field.
Recognizes standard boolean representations. For custom boolean values,
see :ref:`custom_boolean_handling` in the advanced usage documentation.
"""
TRUE_VALUES = ["1", 1, True, "true", "TRUE", "True"]
FALSE_VALUES = ["0", 0, False, "false", "FALSE", "False"]
NULL_VALUES = ["", None, "null", "NULL", "none", "NONE", "None"]
def __init__(self, coerce_to_string=True):
""" """
super().__init__(coerce_to_string)
def clean(self, value, row=None, **kwargs):
"""
Converts the input value to a Python boolean or None.
Recognizes common string representations of boolean values:
- True values: '1', 1, True, 'true', 'TRUE', 'True'
- False values: '0', 0, False, 'false', 'FALSE', 'False'
- Null values: '', None, 'null', 'NULL', 'none', 'NONE', 'None'
:param value: The value to be converted to boolean.
:param row: The current row being processed.
:param **kwargs: Optional keyword arguments.
:returns: True, False, or None depending on the input value.
"""
if value in self.NULL_VALUES:
return None
return True if value in self.TRUE_VALUES else False
def render(self, value, obj=None, **kwargs):
"""
:return: ``True`` is represented as ``1``, ``False`` as ``0``, and
``None``/NULL as an empty string.
If ``coerce_to_string`` is ``False``, the python Boolean type is
returned (may be ``None``).
"""
self._obj_deprecation_warning(obj)
if self.coerce_to_string and not kwargs.get("force_native_type"):
if value in self.NULL_VALUES or not type(value) is bool:
return ""
return self.TRUE_VALUES[0] if value else self.FALSE_VALUES[0]
return value
| BooleanWidget |
python | spack__spack | lib/spack/spack/subprocess_context.py | {
"start": 3306,
"end": 3998
} | class ____:
"""Class to serialize and restore global state for child processes.
Spack may modify state that is normally read from disk or command line in memory;
this object is responsible for properly serializing that state to be applied to a subprocess.
"""
def __init__(self):
self.config = spack.config.CONFIG.ensure_unwrapped()
self.platform = spack.platforms.host
self.store = spack.store.STORE
def restore(self):
spack.config.CONFIG = self.config
spack.repo.enable_repo(spack.repo.RepoPath.from_config(self.config))
spack.platforms.host = self.platform
spack.store.STORE = self.store
| GlobalStateMarshaler |
python | coleifer__peewee | tests/signals.py | {
"start": 169,
"end": 226
} | class ____(BaseSignalModel):
a = TextField(default='')
| A |
python | PrefectHQ__prefect | tests/_internal/compatibility/test_async_dispatch.py | {
"start": 8685,
"end": 10406
} | class ____:
"""Test that async_dispatch adds the .aio attribute for compatibility."""
def test_async_dispatch_adds_aio_attribute(self):
"""Test that the decorator adds an .aio attribute pointing to async implementation."""
async def async_impl(x: int) -> str:
return f"async {x}"
@async_dispatch(async_impl)
def sync_impl(x: int) -> str:
return f"sync {x}"
# Check that .aio attribute exists and points to async implementation
assert hasattr(sync_impl, "aio")
assert sync_impl.aio is async_impl
async def test_aio_attribute_can_be_called_directly(self):
"""Test that the .aio attribute can be called directly."""
async def async_impl(x: int) -> str:
return f"async {x}"
@async_dispatch(async_impl)
def sync_impl(x: int) -> str:
return f"sync {x}"
# Call .aio directly
result = await sync_impl.aio(42)
assert result == "async 42"
async def test_aio_attribute_with_instance_methods(self):
"""Test that .aio works correctly with instance methods."""
class Counter:
def __init__(self) -> None:
self.count = 0
async def increment_async(self) -> int:
self.count += 1
return self.count
@async_dispatch(increment_async)
def increment(self) -> int:
self.count += 10
return self.count
counter = Counter()
# Call via .aio directly - should increment by 1
result = await counter.increment.aio(counter)
assert result == 1
assert counter.count == 1
| TestAioAttribute |
python | django__django | django/forms/fields.py | {
"start": 18219,
"end": 19459
} | class ____(BaseTemporalField):
widget = DateTimeInput
input_formats = DateTimeFormatsIterator()
default_error_messages = {
"invalid": _("Enter a valid date/time."),
}
def prepare_value(self, value):
if isinstance(value, datetime.datetime):
value = to_current_timezone(value)
return value
def to_python(self, value):
"""
Validate that the input can be converted to a datetime. Return a
Python datetime.datetime object.
"""
if value in self.empty_values:
return None
if isinstance(value, datetime.datetime):
return from_current_timezone(value)
if isinstance(value, datetime.date):
result = datetime.datetime(value.year, value.month, value.day)
return from_current_timezone(result)
try:
result = parse_datetime(value.strip())
except ValueError:
raise ValidationError(self.error_messages["invalid"], code="invalid")
if not result:
result = super().to_python(value)
return from_current_timezone(result)
def strptime(self, value, format):
return datetime.datetime.strptime(value, format)
| DateTimeField |
python | spyder-ide__spyder | external-deps/spyder-remote-services/spyder_remote_services/services/files/handlers.py | {
"start": 1629,
"end": 4979
} | class ____(FilesRESTMixin, JupyterHandler):
auth_resource = "spyder-services"
def get_path_argument(self, name: str) -> str:
"""Get the path argument from the request.
Args
----
name (str): Name of the argument to get.
Returns
-------
str: The path argument.
Raises
------
HTTPError: If the argument is missing or invalid.
"""
path = self.get_argument(name)
if not path:
raise web.HTTPError(
HTTPStatus.BAD_REQUEST,
reason=f"Missing {name} argument",
)
match = re.match(_path_regex, path)
if not match:
raise web.HTTPError(
HTTPStatus.BAD_REQUEST,
reason=f"Missing {name} argument",
)
return match.group("path")
def write_json(self, data, status=200):
self.set_status(status)
self.set_header("Content-Type", "application/json")
self.finish(orjson.dumps(data))
@asynccontextmanager
async def stream_json(self, status=200):
self.set_status(status)
self.set_header("Content-Type", "application/stream+json")
async def write_json(data):
self.write(orjson.dumps(data) + b"\n")
await self.flush()
yield write_json
await self.finish()
def write_error(self, status_code, **kwargs):
"""APIHandler errors are JSON, not human pages."""
self.set_header("Content-Type", "application/json")
reply: dict[str, Any] = {}
exc_info = kwargs.get("exc_info")
if exc_info:
e = exc_info[1]
if isinstance(e, web.HTTPError):
reply["message"] = e.log_message or responses.get(status_code, "Unknown HTTP Error")
reply["reason"] = e.reason
elif isinstance(e, OSError):
self.set_status(HTTPStatus.EXPECTATION_FAILED)
reply["strerror"] = e.strerror
reply["errno"] = e.errno
reply["filename"] = e.filename
else:
self.set_status(HTTPStatus.INTERNAL_SERVER_ERROR)
reply["type"] = str(type(e))
reply["message"] = str(e)
reply["traceback"] = traceback.format_exception(*exc_info)
else:
reply["message"] = responses.get(status_code, "Unknown HTTP Error")
self.finish(orjson.dumps(reply))
def log_exception(self, typ, value, tb):
"""Log uncaught exceptions."""
if isinstance(value, web.HTTPError):
if value.log_message:
format = "%d %s: " + value.log_message
args = [value.status_code, self._request_summary()] + list(value.args)
self.log.warning(format, *args)
elif isinstance(value, OSError):
self.log.debug(
"OSError [Errno %s] %s",
value.errno,
self._request_summary(),
exc_info=(typ, value, tb), # type: ignore
)
else:
self.log.warning(
"Uncaught exception %s\n%r",
self._request_summary(),
self.request,
exc_info=(typ, value, tb), # type: ignore
)
| BaseFSHandler |
python | bokeh__bokeh | src/bokeh/embed/util.py | {
"start": 7585,
"end": 8353
} | class ____:
""" Encapsulate data needed for embedding a Bokeh document root.
Values for ``name`` or ``tags`` are optional. They may be useful for
querying a collection of roots to find a specific one to embed.
"""
#: A unique ID to use for the DOM element
elementid: ID
#: The Bokeh model ID for this root
id: ID = field(compare=False)
#: An optional user-supplied name for this root
name: str | None = field(default="", compare=False)
#: A list of any user-supplied tag values for this root
tags: list[Any] = field(default_factory=list, compare=False)
def __post_init__(self):
# Model.name is nullable, and field() won't enforce the default when name=None
self.name = self.name or ""
| RenderRoot |
python | davidhalter__jedi | jedi/plugins/django.py | {
"start": 9972,
"end": 10289
} | class ____(BaseTreeParamName):
def __init__(self, field_name):
super().__init__(field_name.parent_context, field_name.tree_name)
self._field_name = field_name
def get_kind(self):
return Parameter.KEYWORD_ONLY
def infer(self):
return self._field_name.infer()
| DjangoParamName |
python | doocs__leetcode | solution/2500-2599/2516.Take K of Each Character From Left and Right/Solution.py | {
"start": 0,
"end": 383
} | class ____:
def takeCharacters(self, s: str, k: int) -> int:
cnt = Counter(s)
if any(cnt[c] < k for c in "abc"):
return -1
mx = j = 0
for i, c in enumerate(s):
cnt[c] -= 1
while cnt[c] < k:
cnt[s[j]] += 1
j += 1
mx = max(mx, i - j + 1)
return len(s) - mx
| Solution |
python | gevent__gevent | benchmarks/bench_spawn.py | {
"start": 611,
"end": 5929
} | class ____(object):
def __init__(self,
spawn_duration,
sleep_duration=-1,
join_duration=-1):
self.spawn_duration = spawn_duration
self.sleep_duration = sleep_duration
self.join_duration = join_duration
def _test(spawn, sleep, options):
global counter
counter = 0
before_spawn = perf_counter()
for _ in xrange(N):
spawn(incr, **options.kwargs)
spawn_duration = perf_counter() - before_spawn
if options.sleep:
assert counter == 0, counter
before_sleep = perf_counter()
sleep(0)
sleep_duration = perf_counter() - before_sleep
assert counter == N, (counter, N)
else:
sleep_duration = -1
if options.join:
before_join = perf_counter()
options.join()
join_duration = perf_counter() - before_join
else:
join_duration = -1
return Times(spawn_duration,
sleep_duration,
join_duration)
def test(spawn, sleep, options):
all_times = [
_test(spawn, sleep, options)
for _ in xrange(options.loops)
]
spawn_duration = sum(x.spawn_duration for x in all_times)
sleep_duration = sum(x.sleep_duration for x in all_times)
join_duration = sum(x.sleep_duration for x in all_times
if x != -1)
return Times(spawn_duration, sleep_duration, join_duration)
def bench_none(options):
from time import sleep
options.sleep = False
def spawn(f, **kwargs):
return f(**kwargs)
return test(spawn,
sleep,
options)
def bench_gevent(options):
from gevent import spawn, sleep
return test(spawn, sleep, options)
def bench_geventraw(options):
from gevent import sleep, spawn_raw
return test(spawn_raw, sleep, options)
def bench_geventpool(options):
from gevent import sleep
from gevent.pool import Pool
p = Pool()
if options.join:
options.join = p.join
times = test(p.spawn, sleep, options)
return times
try:
__import__('eventlet')
except ImportError:
pass
else:
def bench_eventlet(options):
from eventlet import spawn, sleep
if options.eventlet_hub is not None:
from eventlet.hubs import use_hub
use_hub(options.eventlet_hub)
return test(spawn, sleep, options)
def all():
result = [x for x in globals() if x.startswith('bench_') and x != 'bench_all']
result.sort()
result = [x.replace('bench_', '') for x in result]
return result
def main(argv=None):
import os
import sys
if argv is None:
argv = sys.argv[1:]
env_options = [
'--inherit-environ',
','.join([k for k in os.environ
if k.startswith(('GEVENT',
'PYTHON',
'ZS', # experimental zodbshootout config
'RS', # relstorage config
'COVERAGE'))])]
# This is a default, so put it early
argv[0:0] = env_options
def worker_cmd(cmd, args):
cmd.extend(args.benchmark)
runner = Runner(add_cmdline_args=worker_cmd)
runner.argparser.add_argument('benchmark',
nargs='*',
default='all',
choices=all() + ['all'])
def spawn_time(loops, func, options):
options.loops = loops
times = func(options)
return times.spawn_duration
def sleep_time(loops, func, options):
options.loops = loops
times = func(options)
return times.sleep_duration
def join_time(loops, func, options):
options.loops = loops
times = func(options)
return times.join_duration
args = runner.parse_args(argv)
if 'all' in args.benchmark or args.benchmark == 'all':
args.benchmark = ['all']
names = all()
else:
names = args.benchmark
names = sorted(set(names))
for name in names:
runner.bench_time_func(name + ' spawn',
spawn_time,
globals()['bench_' + name],
Options(sleep=False, join=False),
inner_loops=N)
if name != 'none':
runner.bench_time_func(name + ' sleep',
sleep_time,
globals()['bench_' + name],
Options(sleep=True, join=False),
inner_loops=N)
if 'geventpool' in names:
runner.bench_time_func('geventpool join',
join_time,
bench_geventpool,
Options(sleep=True, join=True),
inner_loops=N)
for name in names:
runner.bench_time_func(name + ' spawn kwarg',
spawn_time,
globals()['bench_' + name],
Options(sleep=False, join=False, foo=1, bar='hello'),
inner_loops=N)
if __name__ == '__main__':
main()
| Times |
python | numpy__numpy | tools/c_coverage/c_coverage_report.py | {
"start": 599,
"end": 1393
} | class ____(HtmlFormatter):
"""Custom HTML formatter to insert extra information with the lines."""
def __init__(self, lines, **kwargs):
HtmlFormatter.__init__(self, **kwargs)
self.lines = lines
def wrap(self, source, outfile):
for i, (c, t) in enumerate(HtmlFormatter.wrap(self, source, outfile)):
as_functions = self.lines.get(i - 1, None)
if as_functions is not None:
yield 0, ('<div title=%s style="background: #ccffcc">[%2d]' %
(quoteattr('as ' + ', '.join(as_functions)),
len(as_functions)))
else:
yield 0, ' '
yield c, t
if as_functions is not None:
yield 0, '</div>'
| FunctionHtmlFormatter |
python | spack__spack | lib/spack/spack/test/config.py | {
"start": 11188,
"end": 64372
} | class ____:
def __init__(self, path):
self.path = path
def test_substitute_config_variables(mock_low_high_config, monkeypatch):
prefix = spack.paths.prefix.lstrip("/")
assert cross_plat_join(
os.sep + os.path.join("foo", "bar", "baz"), prefix
) == spack_path.canonicalize_path("/foo/bar/baz/$spack")
assert cross_plat_join(
spack.paths.prefix, os.path.join("foo", "bar", "baz")
) == spack_path.canonicalize_path("$spack/foo/bar/baz/")
assert cross_plat_join(
os.sep + os.path.join("foo", "bar", "baz"), prefix, os.path.join("foo", "bar", "baz")
) == spack_path.canonicalize_path("/foo/bar/baz/$spack/foo/bar/baz/")
assert cross_plat_join(
os.sep + os.path.join("foo", "bar", "baz"), prefix
) == spack_path.canonicalize_path("/foo/bar/baz/${spack}")
assert cross_plat_join(
spack.paths.prefix, os.path.join("foo", "bar", "baz")
) == spack_path.canonicalize_path("${spack}/foo/bar/baz/")
assert cross_plat_join(
os.sep + os.path.join("foo", "bar", "baz"), prefix, os.path.join("foo", "bar", "baz")
) == spack_path.canonicalize_path("/foo/bar/baz/${spack}/foo/bar/baz/")
assert cross_plat_join(
os.sep + os.path.join("foo", "bar", "baz"), prefix, os.path.join("foo", "bar", "baz")
) != spack_path.canonicalize_path("/foo/bar/baz/${spack/foo/bar/baz/")
# $env replacement is a no-op when no environment is active
assert spack_path.canonicalize_path(
os.sep + os.path.join("foo", "bar", "baz", "$env")
) == os.sep + os.path.join("foo", "bar", "baz", "$env")
# Fake an active environment and $env is replaced properly
fake_env_path = os.sep + os.path.join("quux", "quuux")
monkeypatch.setattr(ev, "active_environment", lambda: MockEnv(fake_env_path))
assert spack_path.canonicalize_path("$env/foo/bar/baz") == os.path.join(
fake_env_path, os.path.join("foo", "bar", "baz")
)
# relative paths without source information are relative to cwd
assert spack_path.canonicalize_path(os.path.join("foo", "bar", "baz")) == os.path.abspath(
os.path.join("foo", "bar", "baz")
)
# relative paths with source information are relative to the file
spack.config.set(
"modules:default", {"roots": {"lmod": os.path.join("foo", "bar", "baz")}}, scope="low"
)
spack.config.CONFIG.clear_caches()
path = spack.config.get("modules:default:roots:lmod")
assert spack_path.canonicalize_path(path) == os.path.normpath(
os.path.join(mock_low_high_config.scopes["low"].path, os.path.join("foo", "bar", "baz"))
)
# test architecture information is in replacements
assert spack_path.canonicalize_path(
os.path.join("foo", "$platform", "bar")
) == os.path.abspath(os.path.join("foo", "test", "bar"))
host_target = spack.platforms.host().default_target()
host_target_family = str(host_target.family)
assert spack_path.canonicalize_path(
os.path.join("foo", "$target_family", "bar")
) == os.path.abspath(os.path.join("foo", host_target_family, "bar"))
packages_merge_low = {"packages": {"foo": {"variants": ["+v1"]}, "bar": {"variants": ["+v2"]}}}
packages_merge_high = {
"packages": {
"foo": {"version": ["a"]},
"bar": {"version": ["b"], "variants": ["+v3"]},
"baz": {"version": ["c"]},
}
}
@pytest.mark.regression("7924")
def test_merge_with_defaults(mock_low_high_config, write_config_file):
"""This ensures that specified preferences merge with defaults as
expected. Originally all defaults were initialized with the
exact same object, which led to aliasing problems. Therefore
the test configs used here leave 'version' blank for multiple
packages in 'packages_merge_low'.
"""
write_config_file("packages", packages_merge_low, "low")
write_config_file("packages", packages_merge_high, "high")
cfg = spack.config.get("packages")
assert cfg["foo"]["version"] == ["a"]
assert cfg["bar"]["version"] == ["b"]
assert cfg["baz"]["version"] == ["c"]
def test_substitute_user(mock_low_high_config):
user = getpass.getuser()
assert os.sep + os.path.join(
"foo", "bar"
) + os.sep + user + os.sep + "baz" == spack_path.canonicalize_path(
os.sep + os.path.join("foo", "bar", "$user", "baz")
)
def test_substitute_user_cache(mock_low_high_config):
user_cache_path = spack.paths.user_cache_path
assert user_cache_path + os.sep + "baz" == spack_path.canonicalize_path(
os.path.join("$user_cache_path", "baz")
)
def test_substitute_tempdir(mock_low_high_config):
tempdir = tempfile.gettempdir()
assert tempdir == spack_path.canonicalize_path("$tempdir")
assert tempdir + os.sep + os.path.join("foo", "bar", "baz") == spack_path.canonicalize_path(
os.path.join("$tempdir", "foo", "bar", "baz")
)
def test_substitute_date(mock_low_high_config):
test_path = os.path.join("hello", "world", "on", "$date")
new_path = spack_path.canonicalize_path(test_path)
assert "$date" in test_path
assert date.today().strftime("%Y-%m-%d") in new_path
def test_substitute_spack_version():
version = spack.spack_version_info
assert spack_path.canonicalize_path(
"spack$spack_short_version/test"
) == spack_path.canonicalize_path(f"spack{version[0]}.{version[1]}/test")
PAD_STRING = spack_path.SPACK_PATH_PADDING_CHARS
MAX_PATH_LEN = spack_path.get_system_path_max()
MAX_PADDED_LEN = MAX_PATH_LEN - spack_path.SPACK_MAX_INSTALL_PATH_LENGTH
reps = [PAD_STRING for _ in range((MAX_PADDED_LEN // len(PAD_STRING) + 1) + 2)]
full_padded_string = os.path.join(os.sep + "path", os.sep.join(reps))[:MAX_PADDED_LEN]
@pytest.mark.parametrize(
"config_settings,expected",
[
([], [None, None, None]),
([["config:install_tree:root", os.sep + "path"]], [os.sep + "path", None, None]),
(
[["config:install_tree:projections", {"all": "{name}"}]],
[None, None, {"all": "{name}"}],
),
],
)
def test_parse_install_tree(config_settings, expected, mutable_config):
expected_root = expected[0] or mutable_config.get("config:install_tree:root")
expected_unpadded_root = expected[1] or expected_root
expected_proj = expected[2] or spack.directory_layout.default_projections
# config settings is a list of 2-element lists, [path, value]
# where path is a config path and value is the value to set at that path
# these can be "splatted" in as the arguments to config.set
for config_setting in config_settings:
mutable_config.set(*config_setting)
config_dict = mutable_config.get("config")
root, unpadded_root, projections = spack.store.parse_install_tree(config_dict)
assert root == expected_root
assert unpadded_root == expected_unpadded_root
assert projections == expected_proj
def test_change_or_add(mutable_config, mock_packages):
spack.config.add("packages:a:version:['1.0']", scope="user")
spack.config.add("packages:b:version:['1.1']", scope="system")
class ChangeTest:
def __init__(self, pkg_name, new_version):
self.pkg_name = pkg_name
self.new_version = new_version
def find_fn(self, section):
return self.pkg_name in section
def change_fn(self, section):
pkg_section = section.get(self.pkg_name, {})
pkg_section["version"] = self.new_version
section[self.pkg_name] = pkg_section
change1 = ChangeTest("b", ["1.2"])
spack.config.change_or_add("packages", change1.find_fn, change1.change_fn)
assert "b" not in mutable_config.get("packages", scope="user")
assert mutable_config.get("packages")["b"]["version"] == ["1.2"]
change2 = ChangeTest("c", ["1.0"])
spack.config.change_or_add("packages", change2.find_fn, change2.change_fn)
assert "c" in mutable_config.get("packages", scope="user")
@pytest.mark.not_on_windows("Padding unsupported on Windows")
@pytest.mark.parametrize(
"config_settings,expected",
[
(
[
["config:install_tree:root", os.sep + "path"],
["config:install_tree:padded_length", 11],
],
[os.path.join(os.sep + "path", PAD_STRING[:5]), os.sep + "path", None],
),
(
[["config:install_tree:root", "/path/$padding:11"]],
[os.path.join(os.sep + "path", PAD_STRING[:5]), os.sep + "path", None],
),
([["config:install_tree:padded_length", False]], [None, None, None]),
(
[
["config:install_tree:padded_length", True],
["config:install_tree:root", os.sep + "path"],
],
[full_padded_string, os.sep + "path", None],
),
],
)
def test_parse_install_tree_padded(config_settings, expected, mutable_config):
expected_root = expected[0] or mutable_config.get("config:install_tree:root")
expected_unpadded_root = expected[1] or expected_root
expected_proj = expected[2] or spack.directory_layout.default_projections
# config settings is a list of 2-element lists, [path, value]
# where path is a config path and value is the value to set at that path
# these can be "splatted" in as the arguments to config.set
for config_setting in config_settings:
mutable_config.set(*config_setting)
config_dict = mutable_config.get("config")
root, unpadded_root, projections = spack.store.parse_install_tree(config_dict)
assert root == expected_root
assert unpadded_root == expected_unpadded_root
assert projections == expected_proj
def test_read_config(mock_low_high_config, write_config_file):
write_config_file("config", config_low, "low")
assert spack.config.get("config") == config_low["config"]
def test_read_config_override_all(mock_low_high_config, write_config_file):
write_config_file("config", config_low, "low")
write_config_file("config", config_override_all, "high")
assert spack.config.get("config") == {"install_tree": {"root": "override_all"}}
def test_read_config_override_key(mock_low_high_config, write_config_file):
write_config_file("config", config_low, "low")
write_config_file("config", config_override_key, "high")
assert spack.config.get("config") == {
"install_tree": {"root": "override_key"},
"build_stage": ["path1", "path2", "path3"],
}
def test_read_config_merge_list(mock_low_high_config, write_config_file):
write_config_file("config", config_low, "low")
write_config_file("config", config_merge_list, "high")
assert spack.config.get("config") == {
"install_tree": {"root": "install_tree_path"},
"build_stage": ["patha", "pathb", "path1", "path2", "path3"],
}
def test_read_config_override_list(mock_low_high_config, write_config_file):
write_config_file("config", config_low, "low")
write_config_file("config", config_override_list, "high")
assert spack.config.get("config") == {
"install_tree": {"root": "install_tree_path"},
"build_stage": config_override_list["config"]["build_stage:"],
}
def test_internal_config_update(mock_low_high_config, write_config_file):
write_config_file("config", config_low, "low")
before = mock_low_high_config.get("config")
assert before["install_tree"]["root"] == "install_tree_path"
# add an internal configuration scope
scope = spack.config.InternalConfigScope("command_line")
assert "InternalConfigScope" in repr(scope)
mock_low_high_config.push_scope(scope)
command_config = mock_low_high_config.get("config", scope="command_line")
command_config["install_tree"] = {"root": "foo/bar"}
mock_low_high_config.set("config", command_config, scope="command_line")
after = mock_low_high_config.get("config")
assert after["install_tree"]["root"] == "foo/bar"
def test_internal_config_filename(mock_low_high_config, write_config_file):
write_config_file("config", config_low, "low")
mock_low_high_config.push_scope(spack.config.InternalConfigScope("command_line"))
with pytest.raises(NotImplementedError):
mock_low_high_config.get_config_filename("command_line", "config")
def test_mark_internal():
data = {
"config": {
"bool": False,
"int": 6,
"numbers": [1, 2, 3],
"string": "foo",
"dict": {"more_numbers": [1, 2, 3], "another_string": "foo", "another_int": 7},
}
}
marked = spack.config._mark_internal(data, "x")
# marked version should be equal to the original
assert data == marked
def assert_marked(obj):
if type(obj) is bool:
return # can't subclass bool, so can't mark it
assert hasattr(obj, "_start_mark") and obj._start_mark.name == "x"
assert hasattr(obj, "_end_mark") and obj._end_mark.name == "x"
# everything in the marked version should have marks
checks = (
marked.keys(),
marked.values(),
marked["config"].keys(),
marked["config"].values(),
marked["config"]["numbers"],
marked["config"]["dict"].keys(),
marked["config"]["dict"].values(),
marked["config"]["dict"]["more_numbers"],
)
for seq in checks:
for obj in seq:
assert_marked(obj)
def test_internal_config_from_data():
config = spack.config.create_from(
spack.config.InternalConfigScope(
"_builtin", {"config": {"verify_ssl": False, "build_jobs": 6}}
)
)
assert config.get("config:verify_ssl", scope="_builtin") is False
assert config.get("config:build_jobs", scope="_builtin") == 6
assert config.get("config:verify_ssl") is False
assert config.get("config:build_jobs") == 6
# push one on top and see what happens.
config.push_scope(
spack.config.InternalConfigScope(
"higher", {"config": {"checksum": True, "verify_ssl": True}}
)
)
assert config.get("config:verify_ssl", scope="_builtin") is False
assert config.get("config:build_jobs", scope="_builtin") == 6
assert config.get("config:verify_ssl", scope="higher") is True
assert config.get("config:build_jobs", scope="higher") is None
assert config.get("config:verify_ssl") is True
assert config.get("config:build_jobs") == 6
assert config.get("config:checksum") is True
assert config.get("config:checksum", scope="_builtin") is None
assert config.get("config:checksum", scope="higher") is True
def test_keys_are_ordered(configuration_dir):
"""Test that keys in Spack YAML files retain their order from the file."""
expected_order = (
"./bin",
"./man",
"./share/man",
"./share/aclocal",
"./lib/pkgconfig",
"./lib64/pkgconfig",
"./share/pkgconfig",
"./",
)
config_scope = spack.config.DirectoryConfigScope("modules", configuration_dir / "site")
data = config_scope.get_section("modules")
prefix_inspections = data["modules"]["prefix_inspections"]
for actual, expected in zip(prefix_inspections, expected_order):
assert actual == expected
def test_config_format_error(mutable_config):
"""This is raised when we try to write a bad configuration."""
with pytest.raises(spack.config.ConfigFormatError):
spack.config.set("compilers", {"bad": "data"}, scope="site")
def get_config_error(filename, schema, yaml_string):
"""Parse a YAML string and return the resulting ConfigFormatError.
Fail if there is no ConfigFormatError
"""
with open(filename, "w", encoding="utf-8") as f:
f.write(yaml_string)
# parse and return error, or fail.
try:
spack.config.read_config_file(filename, schema)
except spack.config.ConfigFormatError as e:
return e
else:
pytest.fail("ConfigFormatError was not raised!")
def test_config_parse_dict_in_list(tmp_path: pathlib.Path):
with fs.working_dir(str(tmp_path)):
e = get_config_error(
"repos.yaml",
spack.schema.repos.schema,
"""\
repos:
a: https://foobar.com/foo
b: https://foobar.com/bar
c:
error:
- abcdef
d: https://foobar.com/baz
""",
)
assert "repos.yaml:2" in str(e)
def test_config_parse_str_not_bool(tmp_path: pathlib.Path):
with fs.working_dir(str(tmp_path)):
e = get_config_error(
"config.yaml",
spack.schema.config.schema,
"""\
config:
verify_ssl: False
checksum: foobar
dirty: True
""",
)
assert "config.yaml:3" in str(e)
def test_config_parse_list_in_dict(tmp_path: pathlib.Path):
with fs.working_dir(str(tmp_path)):
e = get_config_error(
"mirrors.yaml",
spack.schema.mirrors.schema,
"""\
mirrors:
foo: http://foobar.com/baz
bar: http://barbaz.com/foo
baz: http://bazfoo.com/bar
travis: [1, 2, 3]
""",
)
assert "mirrors.yaml:5" in str(e)
def test_bad_config_section(mock_low_high_config):
"""Test that getting or setting a bad section gives an error."""
with pytest.raises(spack.config.ConfigSectionError):
spack.config.set("foobar", "foobar")
with pytest.raises(spack.config.ConfigSectionError):
spack.config.get("foobar")
def test_nested_override():
"""Ensure proper scope naming of nested overrides."""
base_name = spack.config._OVERRIDES_BASE_NAME
def _check_scopes(num_expected, debug_values):
scope_names = [
s.name for s in spack.config.CONFIG.scopes.values() if s.name.startswith(base_name)
]
for i in range(num_expected):
name = "{0}{1}".format(base_name, i)
assert name in scope_names
data = spack.config.CONFIG.get_config("config", name)
assert data["debug"] == debug_values[i]
# Check results from single and nested override
with spack.config.override("config:debug", True):
with spack.config.override("config:debug", False):
_check_scopes(2, [True, False])
_check_scopes(1, [True])
def test_alternate_override(monkeypatch):
"""Ensure proper scope naming of override when conflict present."""
base_name = spack.config._OVERRIDES_BASE_NAME
def _matching_scopes(regexpr):
return [spack.config.InternalConfigScope("{0}1".format(base_name))]
# Check that the alternate naming works
monkeypatch.setattr(spack.config.CONFIG, "matching_scopes", _matching_scopes)
with spack.config.override("config:debug", False):
name = "{0}2".format(base_name)
scope_names = [
s.name for s in spack.config.CONFIG.scopes.values() if s.name.startswith(base_name)
]
assert name in scope_names
data = spack.config.CONFIG.get_config("config", name)
assert data["debug"] is False
def test_immutable_scope(tmp_path: pathlib.Path):
config_yaml = str(tmp_path / "config.yaml")
with open(config_yaml, "w", encoding="utf-8") as f:
f.write(
"""\
config:
install_tree:
root: dummy_tree_value
"""
)
scope = spack.config.DirectoryConfigScope("test", str(tmp_path), writable=False)
data = scope.get_section("config")
assert data is not None
assert data["config"]["install_tree"] == {"root": "dummy_tree_value"}
with pytest.raises(spack.error.ConfigError):
scope._write_section("config")
def test_single_file_scope(config, env_yaml):
scope = spack.config.SingleFileScope(
"env", env_yaml, spack.schema.env.schema, yaml_path=["spack"]
)
with spack.config.override(scope):
# from the single-file config
assert spack.config.get("config:verify_ssl") is False
assert spack.config.get("config:dirty") is False
# from the lower config scopes
assert spack.config.get("config:checksum") is True
assert spack.config.get("config:checksum") is True
assert spack.config.get("packages:externalmodule:buildable") is False
assert spack.config.get("repos") == {
"z": "/x/y/z",
"builtin_mock": "$spack/var/spack/test_repos/spack_repo/builtin_mock",
}
def test_single_file_scope_section_override(tmp_path: pathlib.Path, config):
"""Check that individual config sections can be overridden in an
environment config. The config here primarily differs in that the
``packages`` section is intended to override all other scopes (using the
"::" syntax).
"""
env_yaml = str(tmp_path / "env.yaml")
with open(env_yaml, "w", encoding="utf-8") as f:
f.write(
"""\
spack:
config:
verify_ssl: False
packages::
all:
target: [ x86_64 ]
repos:
z: /x/y/z
"""
)
scope = spack.config.SingleFileScope(
"env", env_yaml, spack.schema.env.schema, yaml_path=["spack"]
)
with spack.config.override(scope):
# from the single-file config
assert spack.config.get("config:verify_ssl") is False
assert spack.config.get("packages:all:target") == ["x86_64"]
# from the lower config scopes
assert spack.config.get("config:checksum") is True
assert not spack.config.get("packages:externalmodule")
assert spack.config.get("repos") == {
"z": "/x/y/z",
"builtin_mock": "$spack/var/spack/test_repos/spack_repo/builtin_mock",
}
def test_write_empty_single_file_scope(tmp_path: pathlib.Path):
env_schema = spack.schema.env.schema
config_file = tmp_path / "config.yaml"
config_file.touch()
scope = spack.config.SingleFileScope("test", str(config_file), env_schema, yaml_path=["spack"])
scope._write_section("config")
# confirm we can write empty config
assert not scope.get_section("config")
def check_schema(name, file_contents):
"""Check a Spack YAML schema against some data"""
f = io.StringIO(file_contents)
data = syaml.load_config(f)
spack.config.validate(data, name)
def test_good_env_yaml():
check_schema(
spack.schema.env.schema,
"""\
spack:
config:
verify_ssl: False
dirty: False
repos:
- ~/my/repo/location
mirrors:
remote: /foo/bar/baz
compilers:
- compiler:
spec: cce@2.1
operating_system: cnl
modules: []
paths:
cc: /path/to/cc
cxx: /path/to/cxx
fc: /path/to/fc
f77: /path/to/f77
""",
)
def test_bad_env_yaml():
with pytest.raises(spack.config.ConfigFormatError):
check_schema(
spack.schema.env.schema,
"""\
spack:
foobar:
verify_ssl: False
dirty: False
""",
)
def test_bad_config_yaml():
with pytest.raises(spack.config.ConfigFormatError):
check_schema(
spack.schema.config.schema,
"""\
config:
verify_ssl: False
install_tree:
root:
extra_level: foo
""",
)
def test_bad_include_yaml():
with pytest.raises(spack.config.ConfigFormatError, match="is not of type"):
check_schema(
spack.schema.include.schema,
"""\
include: $HOME/include.yaml
""",
)
def test_bad_mirrors_yaml():
with pytest.raises(spack.config.ConfigFormatError):
check_schema(
spack.schema.mirrors.schema,
"""\
mirrors:
local: True
""",
)
def test_bad_repos_yaml():
with pytest.raises(spack.config.ConfigFormatError):
check_schema(
spack.schema.repos.schema,
"""\
repos:
True
""",
)
def test_bad_compilers_yaml():
with pytest.raises(spack.config.ConfigFormatError):
check_schema(
spack.schema.compilers.schema,
"""\
compilers:
key_instead_of_list: 'value'
""",
)
with pytest.raises(spack.config.ConfigFormatError):
check_schema(
spack.schema.compilers.schema,
"""\
compilers:
- shmompiler:
environment: /bad/value
""",
)
with pytest.raises(spack.config.ConfigFormatError):
check_schema(
spack.schema.compilers.schema,
"""\
compilers:
- compiler:
fenfironfent: /bad/value
""",
)
def test_internal_config_section_override(mock_low_high_config, write_config_file):
write_config_file("config", config_merge_list, "low")
wanted_list = config_override_list["config"]["build_stage:"]
mock_low_high_config.push_scope(
spack.config.InternalConfigScope("high", {"config:": {"build_stage": wanted_list}})
)
assert mock_low_high_config.get("config:build_stage") == wanted_list
def test_internal_config_dict_override(mock_low_high_config, write_config_file):
write_config_file("config", config_merge_dict, "low")
wanted_dict = config_override_dict["config"]["aliases:"]
mock_low_high_config.push_scope(spack.config.InternalConfigScope("high", config_override_dict))
assert mock_low_high_config.get("config:aliases") == wanted_dict
def test_internal_config_list_override(mock_low_high_config, write_config_file):
write_config_file("config", config_merge_list, "low")
wanted_list = config_override_list["config"]["build_stage:"]
mock_low_high_config.push_scope(spack.config.InternalConfigScope("high", config_override_list))
assert mock_low_high_config.get("config:build_stage") == wanted_list
def test_set_section_override(mock_low_high_config, write_config_file):
write_config_file("config", config_merge_list, "low")
wanted_list = config_override_list["config"]["build_stage:"]
with spack.config.override("config::build_stage", wanted_list):
assert mock_low_high_config.get("config:build_stage") == wanted_list
assert config_merge_list["config"]["build_stage"] == mock_low_high_config.get(
"config:build_stage"
)
def test_set_list_override(mock_low_high_config, write_config_file):
write_config_file("config", config_merge_list, "low")
wanted_list = config_override_list["config"]["build_stage:"]
with spack.config.override("config:build_stage:", wanted_list):
assert wanted_list == mock_low_high_config.get("config:build_stage")
assert config_merge_list["config"]["build_stage"] == mock_low_high_config.get(
"config:build_stage"
)
def test_set_dict_override(mock_low_high_config, write_config_file):
write_config_file("config", config_merge_dict, "low")
wanted_dict = config_override_dict["config"]["aliases:"]
with spack.config.override("config:aliases:", wanted_dict):
assert wanted_dict == mock_low_high_config.get("config:aliases")
assert config_merge_dict["config"]["aliases"] == mock_low_high_config.get("config:aliases")
def test_set_bad_path(config):
with pytest.raises(ValueError):
with spack.config.override(":bad:path", ""):
pass
def test_bad_path_double_override(config):
with pytest.raises(syaml.SpackYAMLError, match="Meaningless second override"):
with spack.config.override("bad::double:override::directive", ""):
pass
def test_license_dir_config(mutable_config, mock_packages):
"""Ensure license directory is customizable"""
expected_dir = spack.paths.default_license_dir
assert spack.config.get("config:license_dir") == expected_dir
assert spack.package_base.PackageBase.global_license_dir == expected_dir
assert spack.repo.PATH.get_pkg_class("pkg-a").global_license_dir == expected_dir
rel_path = os.path.join(os.path.sep, "foo", "bar", "baz")
spack.config.set("config:license_dir", rel_path)
assert spack.config.get("config:license_dir") == rel_path
assert spack.package_base.PackageBase.global_license_dir == rel_path
assert spack.repo.PATH.get_pkg_class("pkg-a").global_license_dir == rel_path
@pytest.mark.regression("22547")
def test_single_file_scope_cache_clearing(env_yaml):
scope = spack.config.SingleFileScope(
"env", env_yaml, spack.schema.env.schema, yaml_path=["spack"]
)
# Check that we can retrieve data from the single file scope
before = scope.get_section("config")
assert before
# Clear the cache of the Single file scope
scope.clear()
# Check that the section can be retireved again and it's
# the same as before
after = scope.get_section("config")
assert after
assert before == after
@pytest.mark.regression("22611")
def test_internal_config_scope_cache_clearing():
"""
An InternalConfigScope object is constructed from data that is already
in memory, therefore it doesn't have any cache to clear. Here we ensure
that calling the clear method is consistent with that..
"""
data = {"config": {"build_jobs": 10}}
internal_scope = spack.config.InternalConfigScope("internal", data)
# Ensure that the initial object is properly set
assert internal_scope.sections["config"] == data
# Call the clear method
internal_scope.clear()
# Check that this didn't affect the scope object
assert internal_scope.sections["config"] == data
def test_system_config_path_is_overridable(working_env):
p = "/some/path"
os.environ["SPACK_SYSTEM_CONFIG_PATH"] = p
assert spack.paths._get_system_config_path() == p
def test_system_config_path_is_default_when_env_var_is_empty(working_env):
os.environ["SPACK_SYSTEM_CONFIG_PATH"] = ""
assert os.sep + os.path.join("etc", "spack") == spack.paths._get_system_config_path()
def test_user_config_path_is_overridable(working_env):
p = "/some/path"
os.environ["SPACK_USER_CONFIG_PATH"] = p
assert p == spack.paths._get_user_config_path()
def test_user_config_path_is_default_when_env_var_is_empty(working_env):
os.environ["SPACK_USER_CONFIG_PATH"] = ""
assert os.path.expanduser("~%s.spack" % os.sep) == spack.paths._get_user_config_path()
def test_default_install_tree(monkeypatch, default_config):
s = spack.spec.Spec("nonexistent@x.y.z arch=foo-bar-baz")
monkeypatch.setattr(s, "dag_hash", lambda length: "abc123")
_, _, projections = spack.store.parse_install_tree(spack.config.get("config"))
assert s.format(projections["all"]) == "foo-baz/nonexistent-x.y.z-abc123"
@pytest.fixture
def mock_include_scope(tmp_path):
for subdir in ["defaults", "test1", "test2", "test3"]:
path = tmp_path / subdir
path.mkdir()
include = tmp_path / "include.yaml"
with include.open("w", encoding="utf-8") as f:
f.write(
textwrap.dedent(
"""\
include::
- name: "test1"
path: "test1"
when: '"SPACK_DISABLE_LOCAL_CONFIG" not in env'
- name: "test2"
path: "test2"
- name: "test3"
path: "test3"
when: '"SPACK_DISABLE_LOCAL_CONFIG" not in env'
"""
)
)
yield tmp_path
@pytest.fixture
def include_config_factory(mock_include_scope):
def make_config():
cfg = spack.config.create()
cfg.push_scope(
spack.config.DirectoryConfigScope("defaults", str(mock_include_scope / "defaults")),
priority=ConfigScopePriority.DEFAULTS,
)
cfg.push_scope(
spack.config.DirectoryConfigScope("tmp_path", str(mock_include_scope)),
priority=ConfigScopePriority.CONFIG_FILES,
)
return cfg
yield make_config
def test_modify_scope_precedence(working_env, include_config_factory, tmp_path):
"""Test how spack selects the scope to modify when commands write config."""
cfg = include_config_factory()
# ensure highest precedence writable scope is selected by default
assert cfg.highest_precedence_scope().name == "tmp_path"
include_yaml = tmp_path / "include.yaml"
subdir = tmp_path / "subdir"
subdir2 = tmp_path / "subdir2"
subdir.mkdir()
subdir2.mkdir()
with include_yaml.open("w", encoding="utf-8") as f:
f.write(
textwrap.dedent(
"""\
include::
- name: "subdir"
path: "subdir"
"""
)
)
cfg.push_scope(
spack.config.DirectoryConfigScope("override", str(tmp_path)),
priority=ConfigScopePriority.CONFIG_FILES,
)
# ensure override scope is selected when it is on top
assert cfg.highest_precedence_scope().name == "override"
cfg.remove_scope("override")
with include_yaml.open("w", encoding="utf-8") as f:
f.write(
textwrap.dedent(
"""\
include::
- name: "subdir"
path: "subdir"
prefer_modify: true
"""
)
)
cfg.push_scope(
spack.config.DirectoryConfigScope("override", str(tmp_path)),
priority=ConfigScopePriority.CONFIG_FILES,
)
# if the top scope prefers another, ensure it is selected
assert cfg.highest_precedence_scope().name == "subdir"
cfg.remove_scope("override")
with include_yaml.open("w", encoding="utf-8") as f:
f.write(
textwrap.dedent(
"""\
include::
- name: "subdir"
path: "subdir"
- name: "subdir2"
path: "subdir2"
prefer_modify: true
"""
)
)
cfg.push_scope(
spack.config.DirectoryConfigScope("override", str(tmp_path)),
priority=ConfigScopePriority.CONFIG_FILES,
)
# if there are multiple scopes and one is preferred, make sure it's that one
assert cfg.highest_precedence_scope().name == "subdir2"
def test_local_config_can_be_disabled(working_env, include_config_factory):
"""Ensure that SPACK_DISABLE_LOCAL_CONFIG disables configurations with `when:`."""
os.environ["SPACK_DISABLE_LOCAL_CONFIG"] = "true"
cfg = include_config_factory()
assert "defaults" in cfg.scopes
assert "test1" not in cfg.scopes
assert "test2" in cfg.scopes
assert "test3" not in cfg.scopes
os.environ["SPACK_DISABLE_LOCAL_CONFIG"] = ""
cfg = include_config_factory()
assert "defaults" in cfg.scopes
assert "test1" not in cfg.scopes
assert "test2" in cfg.scopes
assert "test3" not in cfg.scopes
del os.environ["SPACK_DISABLE_LOCAL_CONFIG"]
cfg = include_config_factory()
assert "defaults" in cfg.scopes
assert "test1" in cfg.scopes
assert "test2" in cfg.scopes
assert "test3" in cfg.scopes
def test_override_included_config(working_env, tmp_path, include_config_factory):
override_scope = tmp_path / "override"
override_scope.mkdir()
include_yaml = override_scope / "include.yaml"
subdir = override_scope / "subdir"
subdir.mkdir()
with include_yaml.open("w", encoding="utf-8") as f:
f.write(
textwrap.dedent(
"""\
include::
- name: "subdir"
path: "subdir"
"""
)
)
# check the mock config is correct
cfg = include_config_factory()
assert "defaults" in cfg.scopes
assert "test1" in cfg.scopes
assert "test2" in cfg.scopes
assert "test3" in cfg.scopes
active_names = [s.name for s in cfg.active_scopes]
assert "defaults" in active_names
assert "test1" in active_names
assert "test2" in active_names
assert "test3" in active_names
# push a scope that overrides everything under it but includes a subdir.
# its included subdir should be active, but scopes *not* included by the overriding
# scope should not.
cfg.push_scope(
spack.config.DirectoryConfigScope("override", str(override_scope)),
priority=ConfigScopePriority.CONFIG_FILES,
)
assert "defaults" in cfg.scopes
assert "test1" in cfg.scopes
assert "test2" in cfg.scopes
assert "test3" in cfg.scopes
assert "override" in cfg.scopes
assert "subdir" in cfg.scopes
active_names = [s.name for s in cfg.active_scopes]
assert "defaults" in active_names
assert "test1" not in active_names
assert "test2" not in active_names
assert "test3" not in active_names
assert "override" in active_names
assert "subdir" in active_names
# remove the override and ensure everything is back to normal
cfg.remove_scope("override")
assert "defaults" in cfg.scopes
assert "test1" in cfg.scopes
assert "test2" in cfg.scopes
assert "test3" in cfg.scopes
active_names = [s.name for s in cfg.active_scopes]
assert "defaults" in active_names
assert "test1" in active_names
assert "test2" in active_names
assert "test3" in active_names
def test_user_cache_path_is_overridable(working_env):
p = "/some/path"
os.environ["SPACK_USER_CACHE_PATH"] = p
assert spack.paths._get_user_cache_path() == p
def test_user_cache_path_is_default_when_env_var_is_empty(working_env):
os.environ["SPACK_USER_CACHE_PATH"] = ""
assert os.path.expanduser("~%s.spack" % os.sep) == spack.paths._get_user_cache_path()
def test_config_file_dir_failure(tmp_path: pathlib.Path, mutable_empty_config):
with pytest.raises(spack.config.ConfigFileError, match="not a file"):
spack.config.read_config_file(str(tmp_path))
@pytest.mark.not_on_windows("chmod not supported on Windows")
def test_config_file_read_perms_failure(tmp_path: pathlib.Path, mutable_empty_config):
"""Test reading a configuration file without permissions to ensure
ConfigFileError is raised."""
filename = join_path(str(tmp_path), "test.yaml")
touch(filename)
os.chmod(filename, 0o200)
with pytest.raises(spack.config.ConfigFileError, match="not readable"):
spack.config.read_config_file(filename)
def test_config_file_read_invalid_yaml(tmp_path: pathlib.Path, mutable_empty_config):
"""Test reading a configuration file with invalid (unparseable) YAML
raises a ConfigFileError."""
filename = join_path(str(tmp_path), "test.yaml")
with open(filename, "w", encoding="utf-8") as f:
f.write("spack:\nview")
with pytest.raises(spack.config.ConfigFileError, match="parsing YAML"):
spack.config.read_config_file(filename)
@pytest.mark.parametrize(
"path,it_should_work,expected_parsed",
[
("x:y:z", True, ["x:", "y:", "z"]),
("x+::y:z", True, ["x+::", "y:", "z"]),
('x:y:"{z}"', True, ["x:", "y:", '"{z}"']),
('x:"y"+:z', True, ["x:", '"y"+:', "z"]),
('x:"y"trail:z', False, None),
("x:y:[1.0]", True, ["x:", "y:", "[1.0]"]),
("x:y:['1.0']", True, ["x:", "y:", "['1.0']"]),
("x:{y}:z", True, ["x:", "{y}:", "z"]),
("x:'{y}':z", True, ["x:", "'{y}':", "z"]),
("x:{y}", True, ["x:", "{y}"]),
],
)
def test_config_path_dsl(path, it_should_work, expected_parsed):
if it_should_work:
assert spack.config.ConfigPath._validate(path) == expected_parsed
else:
with pytest.raises(ValueError):
spack.config.ConfigPath._validate(path)
@pytest.mark.regression("48254")
def test_env_activation_preserves_command_line_scope(mutable_mock_env_path):
"""Check that the "command_line" scope remains the highest priority scope, when we activate,
or deactivate, environments.
"""
expected_cl_scope = spack.config.CONFIG.highest()
assert expected_cl_scope.name == "command_line"
# Creating an environment pushes a new scope
ev.create("test")
with ev.read("test"):
assert spack.config.CONFIG.highest() == expected_cl_scope
# No active environment pops the scope
with ev.no_active_environment():
assert spack.config.CONFIG.highest() == expected_cl_scope
assert spack.config.CONFIG.highest() == expected_cl_scope
# Switch the environment to another one
ev.create("test-2")
with ev.read("test-2"):
assert spack.config.CONFIG.highest() == expected_cl_scope
assert spack.config.CONFIG.highest() == expected_cl_scope
assert spack.config.CONFIG.highest() == expected_cl_scope
@pytest.mark.regression("48414")
@pytest.mark.regression("49188")
def test_env_activation_preserves_config_scopes(mutable_mock_env_path):
"""Check that the priority of scopes is respected when merging configuration files."""
custom_scope = spack.config.InternalConfigScope("custom_scope")
spack.config.CONFIG.push_scope(custom_scope, priority=ConfigScopePriority.CUSTOM)
expected_scopes_without_env = ["custom_scope", "command_line"]
expected_scopes_with_first_env = ["env:test", "custom_scope", "command_line"]
expected_scopes_with_second_env = ["env:test-2", "custom_scope", "command_line"]
def highest_priority_scopes(config, *, nscopes):
return list(config.scopes)[-nscopes:]
assert highest_priority_scopes(spack.config.CONFIG, nscopes=2) == expected_scopes_without_env
# Creating an environment pushes a new scope
ev.create("test")
with ev.read("test"):
assert (
highest_priority_scopes(spack.config.CONFIG, nscopes=3)
== expected_scopes_with_first_env
)
# No active environment pops the scope
with ev.no_active_environment():
assert (
highest_priority_scopes(spack.config.CONFIG, nscopes=2)
== expected_scopes_without_env
)
assert (
highest_priority_scopes(spack.config.CONFIG, nscopes=3)
== expected_scopes_with_first_env
)
# Switch the environment to another one
ev.create("test-2")
with ev.read("test-2"):
assert (
highest_priority_scopes(spack.config.CONFIG, nscopes=3)
== expected_scopes_with_second_env
)
assert (
highest_priority_scopes(spack.config.CONFIG, nscopes=3)
== expected_scopes_with_first_env
)
assert highest_priority_scopes(spack.config.CONFIG, nscopes=2) == expected_scopes_without_env
@pytest.mark.regression("51059")
def test_config_include_similar_name(tmp_path: pathlib.Path):
config_a = tmp_path / "a" / "config"
config_b = tmp_path / "b" / "config"
os.makedirs(config_a)
with open(config_a / "config.yaml", "w", encoding="utf-8") as fd:
syaml.dump_config({"config": {"install_tree": {"root": str(tmp_path)}}}, fd)
os.makedirs(config_b)
with open(config_b / "config.yaml", "w", encoding="utf-8") as fd:
syaml.dump_config({"config": {"install_tree": {"padded_length": 64}}}, fd)
with open(tmp_path / "include.yaml", "w", encoding="utf-8") as fd:
syaml.dump_config({"include": [str(config_a), str(config_b)]}, fd)
config = spack.config.create_from(spack.config.DirectoryConfigScope("test", str(tmp_path)))
# Ensure all of the scopes are found
assert len(config.matching_scopes("^test$")) == 1
assert len(config.matching_scopes("^test:a/config$")) == 1
assert len(config.matching_scopes("^test:b/config$")) == 1
def test_deepcopy_as_builtin(env_yaml):
cfg = spack.config.create_from(
spack.config.SingleFileScope("env", env_yaml, spack.schema.env.schema, yaml_path=["spack"])
)
config_copy = cfg.deepcopy_as_builtin("config")
assert config_copy == cfg.get_config("config")
assert type(config_copy) is DictWithLineInfo
assert type(config_copy["verify_ssl"]) is bool
packages_copy = cfg.deepcopy_as_builtin("packages")
assert type(packages_copy) is DictWithLineInfo
assert type(packages_copy["all"]) is DictWithLineInfo
assert type(packages_copy["all"]["compiler"]) is list
assert type(packages_copy["all"]["compiler"][0]) is str
def test_included_optional_include_scopes():
with pytest.raises(NotImplementedError):
spack.config.OptionalInclude({}).scopes(spack.config.ConfigScope("fail"))
def test_included_path_string(
tmp_path: pathlib.Path, mock_low_high_config, ensure_debug, monkeypatch, capsys
):
path = tmp_path / "local" / "config.yaml"
path.parent.mkdir()
include = spack.config.included_path(path)
assert isinstance(include, spack.config.IncludePath)
assert include.path == str(path)
assert not include.optional
assert include.evaluate_condition()
parent_scope = mock_low_high_config.scopes["low"]
# Trigger failure when required path does not exist
with pytest.raises(ValueError, match="does not exist"):
include.scopes(parent_scope)
# First successful pass builds the scope
path.touch()
scopes = include.scopes(parent_scope)
assert scopes and len(scopes) == 1
assert isinstance(scopes[0], spack.config.SingleFileScope)
# Second pass uses the scopes previously built
assert include._scopes is not None
scopes = include.scopes(parent_scope)
captured = capsys.readouterr()[1]
assert "Using existing scopes" in captured
def test_included_path_string_no_parent_path(
tmp_path: pathlib.Path, config, ensure_debug, monkeypatch
):
"""Use a relative include path and no parent scope path so destination
will be rooted in the current working directory (usually SPACK_ROOT)."""
entry = {"path": "config.yaml", "optional": True}
include = spack.config.included_path(entry)
FakeScope = collections.namedtuple("FakeScope", ["path", "name"])
parent_scope = FakeScope("", "")
assert not include.scopes(parent_scope) # type: ignore[arg-type]
destination = include.destination
curr_dir = os.getcwd()
assert curr_dir == os.path.commonprefix([curr_dir, destination]) # type: ignore[list-item]
def test_included_path_conditional_bad_when(
tmp_path: pathlib.Path, mock_low_high_config, ensure_debug, capsys
):
path = tmp_path / "local"
path.mkdir()
entry = {"path": str(path), "when": 'platform == "nosuchplatform"', "optional": True}
include = spack.config.included_path(entry)
assert isinstance(include, spack.config.IncludePath)
assert include.path == entry["path"]
assert include.when == entry["when"]
assert include.optional
assert not include.evaluate_condition()
scopes = include.scopes(mock_low_high_config.scopes["low"])
captured = capsys.readouterr()[1]
assert "condition is not satisfied" in captured
assert not scopes
def test_included_path_conditional_success(tmp_path: pathlib.Path, mock_low_high_config):
path = tmp_path / "local"
path.mkdir()
entry = {"path": str(path), "when": 'platform == "test"', "optional": True}
include = spack.config.included_path(entry)
assert isinstance(include, spack.config.IncludePath)
assert include.path == entry["path"]
assert include.when == entry["when"]
assert include.optional
assert include.evaluate_condition()
scopes = include.scopes(mock_low_high_config.scopes["low"])
assert scopes and len(scopes) == 1
assert isinstance(scopes[0], spack.config.DirectoryConfigScope)
def test_included_path_git_missing_args():
# must have one or more of: branch, tag and commit so fail if missing any
entry = {"git": "https://example.com/windows/configs.git", "paths": ["config.yaml"]}
with pytest.raises(spack.error.ConfigError, match="specify one or more"):
spack.config.included_path(entry)
# must have one or more paths
entry["tag"] = "v1.0"
entry["paths"] = []
with pytest.raises(spack.error.ConfigError, match="must include one or more"):
spack.config.included_path(entry)
def test_included_path_git_unsat(
tmp_path: pathlib.Path, mock_low_high_config, ensure_debug, monkeypatch, capsys
):
paths = ["config.yaml", "packages.yaml"]
entry = {
"git": "https://example.com/windows/configs.git",
"tag": "v1.0",
"paths": paths,
"when": 'platform == "nosuchplatform"',
}
include = spack.config.included_path(entry)
assert isinstance(include, spack.config.GitIncludePaths)
assert include.repo == entry["git"]
assert include.tag == entry["tag"]
assert include.paths == entry["paths"]
assert include.when == entry["when"]
assert not include.optional and not include.evaluate_condition()
scopes = include.scopes(mock_low_high_config.scopes["low"])
captured = capsys.readouterr()[1]
assert "condition is not satisfied" in captured
assert not scopes
@pytest.mark.parametrize(
"key,value", [("branch", "main"), ("commit", "abcdef123456"), ("tag", "v1.0")]
)
def test_included_path_git(
tmp_path: pathlib.Path, mock_low_high_config, ensure_debug, monkeypatch, key, value, capsys
):
monkeypatch.setattr(spack.paths, "user_cache_path", str(tmp_path))
class MockIncludeGit(spack.util.executable.Executable):
def __init__(self, required: bool):
pass
def __call__(self, *args, **kwargs) -> str: # type: ignore
action = args[0]
if action == "config":
return "origin"
return ""
paths = ["config.yaml", "packages.yaml"]
entry = {
"git": "https://example.com/windows/configs.git",
key: value,
"paths": paths,
"when": 'platform == "test"',
}
include = spack.config.included_path(entry)
assert isinstance(include, spack.config.GitIncludePaths)
assert not include.optional and include.evaluate_condition()
destination = include._destination()
assert not os.path.exists(destination)
# set up minimal git and repository operations
monkeypatch.setattr(spack.util.git, "git", MockIncludeGit)
def _init_repo(*args, **kwargs):
fs.mkdirp(fs.join_path(destination, ".git"))
def _checkout(*args, **kwargs):
# Make sure the files exist at the clone destination
with fs.working_dir(destination):
for p in paths:
fs.touch(p)
monkeypatch.setattr(spack.util.git, "init_git_repo", _init_repo)
monkeypatch.setattr(spack.util.git, f"pull_checkout_{key}", _checkout)
# First successful pass builds the scope
parent_scope = mock_low_high_config.scopes["low"]
scopes = include.scopes(parent_scope)
assert scopes and len(scopes) == len(paths)
for scope in scopes:
assert isinstance(scope, spack.config.SingleFileScope)
assert os.path.basename(scope.path) in paths # type: ignore[union-attr]
# Second pass uses the scopes previously built.
# Only need to do this for one of the parameters.
if key == "branch":
assert include._scopes is not None
scopes = include.scopes(parent_scope)
captured = capsys.readouterr()[1]
assert "Using existing scopes" in captured
# A direct clone now returns already cloned destination and debug message.
# Again only need to run this test once.
if key == "tag":
assert include._clone() == include.destination
captured = capsys.readouterr()[1]
assert "already cloned" in captured
def test_included_path_git_errs(tmp_path: pathlib.Path, mock_low_high_config, monkeypatch):
monkeypatch.setattr(spack.paths, "user_cache_path", str(tmp_path))
paths = ["concretizer.yaml"]
entry = {
"git": "https://example.com/linux/configs.git",
"branch": "develop",
"paths": paths,
"when": 'platform == "test"',
}
include = spack.config.included_path(entry)
parent_scope = mock_low_high_config.scopes["low"]
# fail to initialize the repository
def _failing_init(*args, **kwargs):
raise spack.util.executable.ProcessError("mock init repo failure")
monkeypatch.setattr(spack.util.git, "init_git_repo", _failing_init)
with pytest.raises(spack.error.ConfigError, match="Unable to initialize"):
include.scopes(parent_scope)
# fail in git config (so use default remote) *and* git checkout
def _init_repo(*args, **kwargs):
fs.mkdirp(fs.join_path(include.destination, ".git"))
class MockIncludeGit(spack.util.executable.Executable):
def __init__(self, required: bool):
pass
def __call__(self, *args, **kwargs) -> str: # type: ignore
raise spack.util.executable.ProcessError("mock git failure")
monkeypatch.setattr(spack.util.git, "init_git_repo", _init_repo)
monkeypatch.setattr(spack.util.git, "git", MockIncludeGit)
with pytest.raises(spack.error.ConfigError, match="Unable to check out"):
include.scopes(parent_scope)
# set up invalid option failure
include.branch = "" # type: ignore[union-attr]
with pytest.raises(spack.error.ConfigError, match="Missing or unsupported options"):
include.scopes(parent_scope)
| MockEnv |
python | TheAlgorithms__Python | data_structures/queues/linked_queue.py | {
"start": 148,
"end": 334
} | class ____:
def __init__(self, data: Any) -> None:
self.data: Any = data
self.next: Node | None = None
def __str__(self) -> str:
return f"{self.data}"
| Node |
python | PrefectHQ__prefect | src/integrations/prefect-ray/prefect_ray/task_runners.py | {
"start": 2449,
"end": 4323
} | class ____(PrefectWrappedFuture[R, "ray.ObjectRef"]):
def wait(self, timeout: float | None = None) -> None:
try:
result = ray.get(self.wrapped_future, timeout=timeout)
except ray.exceptions.GetTimeoutError:
return
except Exception as exc:
result = run_coro_as_sync(exception_to_crashed_state(exc))
if isinstance(result, State):
self._final_state = result
def result(
self,
timeout: float | None = None,
raise_on_failure: bool = True,
) -> R:
if not self._final_state:
try:
object_ref_result = ray.get(self.wrapped_future, timeout=timeout)
except ray.exceptions.GetTimeoutError as exc:
raise TimeoutError(
f"Task run {self.task_run_id} did not complete within {timeout} seconds"
) from exc
if isinstance(object_ref_result, State):
self._final_state = object_ref_result
else:
return object_ref_result
_result = self._final_state.result(raise_on_failure=raise_on_failure)
# state.result is a `sync_compatible` function that may or may not return an awaitable
# depending on whether the parent frame is sync or not
if asyncio.iscoroutine(_result):
_result = run_coro_as_sync(_result)
return _result
def add_done_callback(self, fn: Callable[["PrefectRayFuture[R]"], Any]):
if not self._final_state:
def call_with_self(future: "PrefectRayFuture[R]"):
"""Call the callback with self as the argument, this is necessary to ensure we remove the future from the pending set"""
fn(self)
self._wrapped_future._on_completed(call_with_self)
return
fn(self)
| PrefectRayFuture |
python | sphinx-doc__sphinx | tests/roots/test-ext-autodoc/target/overload.py | {
"start": 352,
"end": 667
} | class ____:
"""docstring"""
@overload
def sum(self, x: int, y: int = 0) -> int: ...
@overload
def sum(self, x: float, y: float = 0.0) -> float: ...
@overload
def sum(self, x: str, y: str = ...) -> str: ...
def sum(self, x, y=None):
"""docstring"""
return x + y
| Math |
python | mlflow__mlflow | mlflow/genai/git_versioning/__init__.py | {
"start": 400,
"end": 5360
} | class ____:
def __init__(self, remote_name: str = "origin") -> None:
try:
self.info = GitInfo.from_env(remote_name=remote_name)
except GitOperationError as e:
_logger.warning(
f"Encountered an error while retrieving git information: {e}. "
f"Git model versioning is disabled."
)
self.info = None
self.active_model = None
return
git_tags = self.info.to_mlflow_tags()
filter_string = self.info.to_search_filter_string()
models = mlflow.search_logged_models(
filter_string=filter_string,
max_results=1,
output_format="list",
)
match models:
case [m]:
_logger.info(
f"Using existing model with branch '{self.info.branch}', "
f"commit '{self.info.commit}', dirty state '{self.info.dirty}'."
)
model = m
# Update tags to ensure they're current (especially git diff)
mlflow.set_logged_model_tags(model_id=model.model_id, tags=git_tags)
case _:
_logger.info(
"No existing model found with the current git information. "
"Creating a new model."
)
model = mlflow.initialize_logged_model(tags=git_tags)
self.active_model = _set_active_model(model_id=model.model_id)
def __enter__(self) -> Self:
return self
def __exit__(self, exc_type, exc_value, traceback) -> None:
disable_git_model_versioning()
# Global variable to track the active git context
_active_context: GitContext | None = None
@record_usage_event(GitModelVersioningEvent)
def _enable_git_model_versioning(remote_name: str) -> None:
global _active_context
context = GitContext(remote_name=remote_name)
_active_context = context
return context
@experimental(version="3.4.0")
def enable_git_model_versioning(remote_name: str = "origin") -> GitContext:
"""
Enable automatic Git-based model versioning for MLflow traces.
This function enables automatic version tracking based on your Git repository state.
When enabled, MLflow will:
- Detect the current Git branch, commit hash, and dirty state
- Create or reuse a LoggedModel matching this exact Git state
- Link all subsequent traces to this LoggedModel version
- Capture uncommitted changes as diffs when the repository is dirty
Args:
remote_name: The name of the git remote to use for repository URL detection.
Defaults to "origin".
Returns:
A GitContext instance containing:
- info: GitInfo object with branch, commit, dirty state, and diff information
- active_model: The active LoggedModel linked to current Git state
Example:
.. code-block:: python
import mlflow.genai
# Enable Git-based versioning
context = mlflow.genai.enable_git_model_versioning()
print(f"Branch: {context.info.branch}, Commit: {context.info.commit[:8]}")
# Output: Branch: main, Commit: abc12345
# All traces are now automatically linked to this Git version
@mlflow.trace
def my_app():
return "result"
# Can also use as a context manager
with mlflow.genai.enable_git_model_versioning() as context:
# Traces within this block are linked to the Git version
result = my_app()
Note:
If Git is not available or the current directory is not a Git repository,
a warning is issued and versioning is disabled (context.info will be None).
"""
return _enable_git_model_versioning(remote_name)
@experimental(version="3.4.0")
def disable_git_model_versioning() -> None:
"""
Disable Git-based model versioning and clear the active model context.
This function stops automatic Git-based version tracking and clears any active
LoggedModel context. After calling this, traces will no longer be automatically
linked to Git-based versions.
This is automatically called when exiting a context manager created with
enable_git_model_versioning().
Example:
.. code-block:: python
import mlflow.genai
# Enable versioning
context = mlflow.genai.enable_git_model_versioning()
# ... do work with versioning enabled ...
# Disable versioning
mlflow.genai.disable_git_model_versioning()
# Traces are no longer linked to Git versions
"""
global _active_context
_active_context = None
mlflow.clear_active_model()
def _get_active_git_context() -> GitContext | None:
"""
Get the currently active git context, if any.
Returns:
The active GitContext instance or None if no context is active.
"""
return _active_context
| GitContext |
python | microsoft__pyright | packages/pyright-internal/src/tests/samples/autoVariance5.py | {
"start": 522,
"end": 611
} | class ____(Generic[T]):
@deco
def select_all(self, *args: object) -> list[Any]: ...
| A |
python | mlflow__mlflow | tests/projects/test_projects.py | {
"start": 16325,
"end": 19290
} | class ____:
def __init__(self, prefix):
self.prefix = prefix
def __eq__(self, other):
return isinstance(other, str) and other.startswith(self.prefix)
def test_parse_kubernetes_config_without_context(mock_kubernetes_job_template):
with mock.patch("mlflow.projects._logger.debug") as mock_debug:
kubernetes_config = {
"repository-uri": "dockerhub_account/mlflow-kubernetes-example",
"kube-job-template-path": mock_kubernetes_job_template,
}
_parse_kubernetes_config(kubernetes_config)
mock_debug.assert_called_once_with(
StartsWithMatcher("Could not find kube-context in backend_config")
)
def test_parse_kubernetes_config_without_image_uri(mock_kubernetes_job_template):
kubernetes_config = {
"kube-context": "docker-for-desktop",
"kube-job-template-path": mock_kubernetes_job_template,
}
with pytest.raises(ExecutionException, match="Could not find 'repository-uri'"):
_parse_kubernetes_config(kubernetes_config)
def test_parse_kubernetes_config_invalid_template_job_file():
kubernetes_config = {
"kube-context": "docker-for-desktop",
"repository-uri": "username/mlflow-kubernetes-example",
"kube-job-template-path": "file_not_found.yaml",
}
with pytest.raises(ExecutionException, match="Could not find 'kube-job-template-path'"):
_parse_kubernetes_config(kubernetes_config)
@pytest.mark.parametrize("synchronous", [True, False])
def test_credential_propagation(synchronous, monkeypatch):
class DummyProcess:
def wait(self):
return 0
def poll(self):
return 0
def communicate(self, _):
return "", ""
monkeypatch.setenv("DATABRICKS_HOST", "host")
monkeypatch.setenv("DATABRICKS_TOKEN", "mytoken")
with (
mock.patch("subprocess.Popen", return_value=DummyProcess()) as popen_mock,
mock.patch("mlflow.utils.uri.is_databricks_uri", return_value=True),
):
mlflow.projects.run(
TEST_PROJECT_DIR,
entry_point="sleep",
experiment_id=FileStore.DEFAULT_EXPERIMENT_ID,
parameters={"duration": 2},
env_manager="local",
synchronous=synchronous,
)
_, kwargs = popen_mock.call_args
env = kwargs["env"]
assert env["DATABRICKS_HOST"] == "host"
assert env["DATABRICKS_TOKEN"] == "mytoken"
def test_get_or_create_conda_env_capture_output_mode(tmp_path):
conda_yaml_file = tmp_path / "conda.yaml"
conda_yaml_file.write_text(
"""
channels:
- conda-forge
dependencies:
- pip:
- scikit-learn==99.99.99
"""
)
with pytest.raises(
ShellCommandException,
match="Could not find a version that satisfies the requirement scikit-learn==99.99.99",
):
get_or_create_conda_env(str(conda_yaml_file), capture_output=True)
| StartsWithMatcher |
python | ray-project__ray | rllib/examples/_old_api_stack/models/parametric_actions_model.py | {
"start": 532,
"end": 2516
} | class ____(DistributionalQTFModel):
"""Parametric action model that handles the dot product and masking.
This assumes the outputs are logits for a single Categorical action dist.
Getting this to work with a more complex output (e.g., if the action space
is a tuple of several distributions) is also possible but left as an
exercise to the reader.
"""
def __init__(
self,
obs_space,
action_space,
num_outputs,
model_config,
name,
true_obs_shape=(4,),
action_embed_size=2,
**kw
):
super(ParametricActionsModel, self).__init__(
obs_space, action_space, num_outputs, model_config, name, **kw
)
self.action_embed_model = FullyConnectedNetwork(
Box(-1, 1, shape=true_obs_shape),
action_space,
action_embed_size,
model_config,
name + "_action_embed",
)
def forward(self, input_dict, state, seq_lens):
# Extract the available actions tensor from the observation.
avail_actions = input_dict["obs"]["avail_actions"]
action_mask = input_dict["obs"]["action_mask"]
# Compute the predicted action embedding
action_embed, _ = self.action_embed_model({"obs": input_dict["obs"]["cart"]})
# Expand the model output to [BATCH, 1, EMBED_SIZE]. Note that the
# avail actions tensor is of shape [BATCH, MAX_ACTIONS, EMBED_SIZE].
intent_vector = tf.expand_dims(action_embed, 1)
# Batch dot product => shape of logits is [BATCH, MAX_ACTIONS].
action_logits = tf.reduce_sum(avail_actions * intent_vector, axis=2)
# Mask out invalid actions (use tf.float32.min for stability)
inf_mask = tf.maximum(tf.math.log(action_mask), tf.float32.min)
return action_logits + inf_mask, state
def value_function(self):
return self.action_embed_model.value_function()
| ParametricActionsModel |
python | modin-project__modin | asv_bench/benchmarks/benchmarks.py | {
"start": 8958,
"end": 9593
} | class ____:
param_names = ["shapes", "how", "axis", "ignore_index"]
params = [
get_benchmark_shapes("TimeConcat"),
["inner", "outer"],
[0, 1],
[True, False],
]
def setup(self, shapes, how, axis, ignore_index):
self.df1 = generate_dataframe("int", *shapes[0], RAND_LOW, RAND_HIGH)
self.df2 = generate_dataframe("int", *shapes[1], RAND_LOW, RAND_HIGH)
def time_concat(self, shapes, how, axis, ignore_index):
execute(
IMPL.concat(
[self.df1, self.df2], axis=axis, join=how, ignore_index=ignore_index
)
)
| TimeConcat |
python | dagster-io__dagster | python_modules/dagster/dagster_tests/core_tests/test_user_code_boundary.py | {
"start": 23,
"end": 1451
} | class ____(Exception):
def __init__(self):
super().__init__("The user has errored")
def test_user_error_boundary_op_compute():
@dg.op
def throws_user_error(_):
raise UserError()
@dg.job
def job_def():
throws_user_error()
result = job_def.execute_in_process(raise_on_error=False)
assert not result.success
def test_user_error_boundary_input_hydration():
@dg.dagster_type_loader(dg.String)
def InputHydration(context, hello):
raise UserError()
@dg.usable_as_dagster_type(loader=InputHydration)
class CustomType(str):
pass
@dg.op(ins={"custom_type": dg.In(CustomType)})
def input_hydration_op(context, custom_type):
context.log.info(custom_type)
@dg.job
def input_hydration_job():
input_hydration_op()
result = input_hydration_job.execute_in_process(
{"ops": {"input_hydration_op": {"inputs": {"custom_type": "hello"}}}},
raise_on_error=False,
)
assert not result.success
def test_user_error_boundary_resource_init():
@dg.resource
def resource_a(_):
raise UserError()
@dg.op(required_resource_keys={"a"})
def resource_op(_context):
return "hello"
@dg.job(resource_defs={"a": resource_a})
def resource_job():
resource_op()
result = resource_job.execute_in_process(raise_on_error=False)
assert not result.success
| UserError |
python | tensorflow__tensorflow | tensorflow/python/ops/tensor_array_ops.py | {
"start": 15973,
"end": 25904
} | class ____:
"""Graph-mode implementation of TensorArray backed by TensorLists.
The backing tensor of this TensorArray is a TensorList variant tensor which is
stored in the `flow`. The `handle` is always none here. The reason we use the
`flow` field and not the `handle` field is to ensure backwards compatibility
with legacy control flow.
"""
def __init__(self,
dtype,
size=None,
dynamic_size=None,
clear_after_read=None,
tensor_array_name=None,
handle=None,
flow=None,
infer_shape=True,
element_shape=None,
colocate_with_first_write_call=True,
name=None):
"""Constructs a graph mode TensorArray.
Args:
dtype: (required) data type of the TensorArray.
size: (optional) int32 scalar `Tensor`: the size of the TensorArray.
Required if flow is not provided.
dynamic_size: (optional) Python bool: If true, writes to the TensorArray
can grow the TensorArray past its initial size. Default: False.
clear_after_read: (optional) unused. Not supported in TensorLists.
tensor_array_name: (optional) unused.
handle: (optional) Must always be None.
flow: (optional) A variant `Tensor` scalar for a TensorList.
infer_shape: (optional, default: True) If True, shape inference is
enabled. In this case, all elements must have the same shape.
element_shape: (optional, default: None) A `TensorShape` object specifying
the shape constraints of each of the elements of the TensorArray. Need
not be fully defined.
colocate_with_first_write_call: (optional). unused.
name: (optional) A name for the operation.
Raises:
ValueError: if both handle and tensor_array_name are provided.
TypeError: if handle is provided but is not a Tensor.
"""
assert handle is None
del handle
del clear_after_read
del tensor_array_name
del colocate_with_first_write_call
self._dynamic_size = dynamic_size
self._size = size
if flow is not None and (
not isinstance(flow, tensor_lib.Tensor) or flow.dtype != dtypes.variant
):
raise TypeError(
f"Expected `flow` to be a variant tensor, but received `{flow.dtype}`"
" instead."
)
if flow is None and size is None:
raise ValueError(
"Argument `size` must be provided if argument `flow` is not provided."
)
if flow is not None and size is not None:
raise ValueError(
"Cannot provide both `flow` and `size` arguments at the same time."
)
if flow is not None and element_shape is not None:
raise ValueError(
"Cannot provide both `flow` and `element_shape` arguments"
"at the same time."
)
self._dtype = dtypes.as_dtype(dtype).base_dtype
# Record the current static shape for the array elements. The element
# shape is defined either by `element_shape` or the shape of the tensor
# of the first write. If `infer_shape` is true, all writes checks for
# shape equality.
self._element_shape = [tensor_shape.as_shape(element_shape)]
self._infer_shape = infer_shape
with ops.name_scope(name, "TensorArrayV2", [size, flow]) as scope:
if flow is None:
self._flow = list_ops.tensor_list_reserve(
element_shape=element_shape,
num_elements=size,
element_dtype=dtype,
name=scope)
else:
self._flow = flow
# For backwards compatibility.
self._colocate_with_first_write_call = None
self._colocate_with = None
@property
def flow(self):
return self._flow
@property
def dtype(self):
return self._dtype
@property
def element_shape(self):
return self._element_shape[0]
@property
def handle(self):
# We intentionally do not raise an error so that legacy while_loop does not
# complain.
return None
def _check_element_shape(self, shape):
"""Changes the element shape of the array given a shape to merge with.
Args:
shape: A `TensorShape` object to merge with.
Raises:
ValueError: if the provided shape is incompatible with the current
element shape of the `TensorArray`.
"""
if not shape.is_compatible_with(self.element_shape):
raise ValueError("Inconsistent shapes: saw %s but expected %s " %
(shape, self.element_shape))
if self._infer_shape:
self._element_shape[0] = self.element_shape.merge_with(shape)
def identity(self):
"""See TensorArray."""
flow = array_ops.identity(self._flow)
return build_ta_with_new_flow(self, flow)
def grad(self, source, flow=None, name=None):
"""Not supported."""
raise NotImplementedError()
def read(self, index, name=None):
"""See TensorArray."""
with ops.name_scope(name, "TensorArrayV2Read", [self._flow, index]):
value = list_ops.tensor_list_get_item(
input_handle=self._flow,
index=index,
element_dtype=self._dtype,
element_shape=self.element_shape,
name=name)
return value
def write(self, index, value, name=None):
"""See TensorArray."""
with ops.name_scope(name, "TensorArrayV2Write", [self._flow, index, value]):
# TODO(b/129870929): Fix after all callers provide proper init dtype.
value = ops.convert_to_tensor(
value, preferred_dtype=self._dtype, name="value")
_check_dtypes(value, self._dtype)
self._check_element_shape(value.shape)
flow_out = list_ops.tensor_list_set_item(
input_handle=self._flow,
index=index,
item=value,
resize_if_index_out_of_bounds=self._dynamic_size,
name=name)
return build_ta_with_new_flow(self, flow_out)
def stack(self, name=None):
"""See TensorArray."""
with ops.name_scope(name, "TensorArrayV2Stack", [self._flow]):
# TODO(b/139941163): remove constant_value after changing num_elements to regular input
if not self._dynamic_size and self._size is not None:
ta_size = tensor_util.constant_value(self._size)
else:
ta_size = -1
value = list_ops.tensor_list_stack(
input_handle=self._flow,
element_dtype=self._dtype,
num_elements=ta_size,
element_shape=self.element_shape)
return value
def gather(self, indices, name=None):
"""See TensorArray."""
value = list_ops.tensor_list_gather(
input_handle=self._flow,
indices=indices,
element_dtype=self._dtype,
element_shape=self.element_shape,
name=name)
return value
def concat(self, name=None):
"""See TensorArray."""
if self.element_shape:
element_shape = [None] + self.element_shape.dims[1:]
else:
element_shape = None
value = list_ops.tensor_list_concat(
input_handle=self._flow,
element_dtype=self._dtype,
element_shape=element_shape,
name=name)
return value
@tf_should_use.should_use_result
def unstack(self, value, name=None):
"""See TensorArray."""
with ops.name_scope(name, "TensorArrayUnstack", [self._flow, value]):
# TODO(b/129870929): Fix after all callers provide proper init dtype.
value = ops.convert_to_tensor(
value, preferred_dtype=self._dtype, name="value")
_check_dtypes(value, self._dtype)
self._check_element_shape(value.shape[1:])
flow_out = list_ops.tensor_list_from_tensor(
tensor=value, element_shape=value.shape[1:])
return build_ta_with_new_flow(self, flow_out)
@tf_should_use.should_use_result
def scatter(self, indices, value, name=None):
"""See TensorArray."""
with ops.name_scope(name, "TensorArrayScatter",
[self._flow, value, indices]):
# TODO(b/129870929): Fix after all callers provide proper init dtype.
value = ops.convert_to_tensor(
value, preferred_dtype=self._dtype, name="value")
_check_dtypes(value, self._dtype)
self._check_element_shape(value.shape[1:])
flow_out = list_ops.tensor_list_scatter(
tensor=value,
indices=indices,
element_shape=self.element_shape,
input_handle=self._flow)
return build_ta_with_new_flow(self, flow_out)
@tf_should_use.should_use_result
def split(self, value, lengths, name=None):
"""See TensorArray."""
with ops.name_scope(name, "TensorArraySplit", [self._flow, value, lengths]):
# TODO(b/129870929): Fix after all callers provide proper init dtype.
value = ops.convert_to_tensor(
value, preferred_dtype=self._dtype, name="value")
_check_dtypes(value, self._dtype)
lengths_64 = math_ops.cast(lengths, dtypes.int64)
if not context.executing_eagerly():
clengths = tensor_util.constant_value(lengths_64)
if value.shape.dims is not None and clengths is not None:
if clengths.shape and clengths.max() == clengths.min():
self._check_element_shape(
tensor_shape.TensorShape([clengths[0]
]).concatenate(value.shape[1:]))
flow_out = list_ops.tensor_list_split(
tensor=value,
lengths=lengths_64,
element_shape=self.element_shape,
name=name)
return build_ta_with_new_flow(self, flow_out)
def size(self, name=None):
"""See TensorArray."""
if not self._dynamic_size and self._size is not None:
return ops.convert_to_tensor(self._size, dtype=dtypes.int32)
else:
return list_ops.tensor_list_length(input_handle=self._flow, name=name)
def close(self, name=None):
"""See TensorArray."""
return gen_control_flow_ops.no_op(name=name)
# pylint: enable=protected-access
| _GraphTensorArrayV2 |
python | pypa__hatch | src/hatch/python/resolve.py | {
"start": 1153,
"end": 2960
} | class ____(ABC):
def __init__(self, name: str, source: str) -> None:
self.__name = name
self.__source = source
@property
def name(self) -> str:
return self.__name
@cached_property
def source(self) -> str:
return self.__source if (custom_source := get_custom_source(self.name)) is None else custom_source
@cached_property
def archive_name(self) -> str:
return self.source.rsplit("/", 1)[-1]
def unpack(self, archive: Path, directory: Path) -> None:
if self.source.endswith(".zip"):
import zipfile
with zipfile.ZipFile(archive, "r") as zf:
zf.extractall(directory)
elif self.source.endswith((".tar.gz", ".tgz")):
self.__unpack_tarfile(archive, directory, "r:gz")
elif self.source.endswith((".tar.bz2", ".bz2")):
self.__unpack_tarfile(archive, directory, "r:bz2")
elif self.source.endswith((".tar.zst", ".tar.zstd")):
self.__unpack_tarfile(archive, directory, "r:zst")
else:
message = f"Unknown archive type: {archive}"
raise ValueError(message)
@staticmethod
def __unpack_tarfile(archive: Path, directory: Path, mode: Literal["r:gz", "r:bz2", "r:zst"]) -> None:
if sys.version_info >= (3, 14):
import tarfile
else:
# for zstd support (introduced in Python 3.14)
# and filter kwarg (introduced in Python 3.12)
from backports.zstd import tarfile
with tarfile.open(archive, mode) as tf:
tf.extractall(directory, filter="data")
@property
@abstractmethod
def version(self) -> Version:
pass
@property
@abstractmethod
def python_path(self) -> str:
pass
| Distribution |
python | numpy__numpy | tools/swig/test/testSuperTensor.py | {
"start": 13622,
"end": 13937
} | class ____(SuperTensorTestCase):
def __init__(self, methodName="runTest"):
SuperTensorTestCase.__init__(self, methodName)
self.typeStr = "uint"
self.typeCode = "I"
#self.result = int(self.result)
######################################################################
| uintTestCase |
python | cython__cython | tests/run/ext_auto_richcmp.py | {
"start": 2636,
"end": 4301
} | class ____(ClassEqNe):
"""
>>> a = ClassEqNeGe(1)
>>> b = ClassEqNeGe(2)
>>> c = ClassEqNeGe(1)
>>> a == a
True
>>> a != a
False
>>> a >= a
True
>>> a <= a
True
>>> a == b
False
>>> a != b
True
>>> a >= b
False
>>> b <= a
False
>>> a == c
True
>>> a != c
False
>>> a >= c
True
>>> c <= a
True
>>> b == c
False
>>> b != c
True
>>> b >= c
True
>>> c <= b
True
>>> c == a
True
>>> c != a
False
>>> c >= a
True
>>> a <= c
True
>>> b == a
False
>>> b != a
True
>>> b >= a
True
>>> a <= b
True
>>> a < b # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError...
>>> a > b # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError...
>>> 2 <= a
False
>>> a >= 2
False
>>> 1 <= a
True
>>> a >= 1
True
>>> a >= 2
False
>>> 'x' <= a # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError...
>>> a >= 'x' # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError...
#>>> print(a.__eq__.__doc__)
#EQ
#>>> print(a.__ne__.__doc__)
#NE
>>> print(a.__ge__.__doc__)
GE
"""
def __ge__(self, other):
"""GE"""
assert 1 <= self.x <= 2
assert isinstance(self, ClassEqNeGe), type(self)
if isinstance(other, X):
return self.x >= x_of(other)
elif isinstance(other, int):
return self.x >= other
return NotImplemented
@cython.cclass
| ClassEqNeGe |
python | huggingface__transformers | src/transformers/models/fastspeech2_conformer/modeling_fastspeech2_conformer.py | {
"start": 13300,
"end": 14049
} | class ____(nn.Module):
def __init__(
self,
in_channels=1,
out_channels=384,
kernel_size=1,
padding=0,
dropout_rate=0.0,
):
super().__init__()
self.conv = nn.Conv1d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
padding=padding,
)
self.dropout = nn.Dropout(dropout_rate)
def forward(self, hidden_states):
hidden_states = hidden_states.transpose(1, 2)
hidden_states = self.conv(hidden_states)
hidden_states = self.dropout(hidden_states)
hidden_states = hidden_states.transpose(1, 2)
return hidden_states
| FastSpeech2ConformerVarianceEmbedding |
python | django__django | tests/test_utils/test_testcase.py | {
"start": 3327,
"end": 5796
} | class ____(TestCase):
# setUpTestData re-assignment are also wrapped in TestData.
jim_douglas = None
@classmethod
def setUpTestData(cls):
cls.jim_douglas = Person.objects.create(name="Jim Douglas")
cls.car = Car.objects.create(name="1963 Volkswagen Beetle")
cls.herbie = cls.jim_douglas.possessed_cars.create(
car=cls.car,
belongs_to=cls.jim_douglas,
)
cls.person_binary = Person.objects.create(name="Person", data=b"binary data")
cls.person_binary_get = Person.objects.get(pk=cls.person_binary.pk)
@assert_no_queries
def test_class_attribute_equality(self):
"""Class level test data is equal to instance level test data."""
self.assertEqual(self.jim_douglas, self.__class__.jim_douglas)
self.assertEqual(self.person_binary, self.__class__.person_binary)
self.assertEqual(self.person_binary_get, self.__class__.person_binary_get)
@assert_no_queries
def test_class_attribute_identity(self):
"""
Class level test data is not identical to instance level test data.
"""
self.assertIsNot(self.jim_douglas, self.__class__.jim_douglas)
self.assertIsNot(self.person_binary, self.__class__.person_binary)
self.assertIsNot(self.person_binary_get, self.__class__.person_binary_get)
@assert_no_queries
def test_binaryfield_data_type(self):
self.assertEqual(bytes(self.person_binary.data), b"binary data")
self.assertEqual(bytes(self.person_binary_get.data), b"binary data")
self.assertEqual(
type(self.person_binary_get.data),
type(self.__class__.person_binary_get.data),
)
self.assertEqual(
type(self.person_binary.data),
type(self.__class__.person_binary.data),
)
@assert_no_queries
def test_identity_preservation(self):
"""Identity of test data is preserved between accesses."""
self.assertIs(self.jim_douglas, self.jim_douglas)
@assert_no_queries
def test_known_related_objects_identity_preservation(self):
"""Known related objects identity is preserved."""
self.assertIs(self.herbie.car, self.car)
self.assertIs(self.herbie.belongs_to, self.jim_douglas)
def test_repr(self):
self.assertEqual(
repr(TestData("attr", "value")),
"<TestData: name='attr', data='value'>",
)
| TestDataTests |
python | virgili0__Virgilio | Tools/regex-bin/regexPrinter.py | {
"start": 3161,
"end": 4450
} | class ____(TreeNode):
def __init__(self, token, value):
TreeNode.__init__(self, token, value, None)
def get_printer(self, values):
if self.token == Token.QUESTION:
temp = [""] + [v for v in values]
ss = sorted(temp)
elif self.token in (Token.TIMES, Token.PLUS):
sub_result = [v for v in values]
temp = sub_result[::]
sub_result = [v+sub for v in values for sub in sub_result]
temp += sub_result
temp += (self.token == Token.TIMES)*[""]
ss = sorted(temp)
if isinstance(values, list) or len(values) > 1:
ss += ["[...]"]
else:
ss += [values+"..."+values]
else:
try:
l, r = self.value
except:
raise ValueError("Was expecting a curly quantifier")
sub_result = [""]
temp = []
for i in range(1, r+1):
sub_result = [v+sub for v in values for sub in sub_result]
if i >= l:
temp += sub_result
ss = sorted(temp)
def printer():
for s in ss:
yield s
return printer
| QuantifierNode |
python | walkccc__LeetCode | solutions/2608. Shortest Cycle in a Graph/2608.py | {
"start": 0,
"end": 806
} | class ____:
def findShortestCycle(self, n: int, edges: list[list[int]]) -> int:
INF = 1001
ans = INF
graph = [[] for _ in range(n)]
for u, v in edges:
graph[u].append(v)
graph[v].append(u)
def bfs(i: int) -> int:
"""Returns the length of the minimum cycle by starting BFS from node `i`.
Returns `INF` if there's no cycle.
"""
dist = [INF] * n
q = collections.deque([i])
dist[i] = 0
while q:
u = q.popleft()
for v in graph[u]:
if dist[v] == INF:
dist[v] = dist[u] + 1
q.append(v)
elif dist[v] + 1 != dist[u]: # v is not a parent u.
return dist[v] + dist[u] + 1
return INF
ans = min(map(bfs, range(n)))
return -1 if ans == INF else ans
| Solution |
python | openai__openai-python | src/openai/types/conversations/conversation.py | {
"start": 189,
"end": 886
} | class ____(BaseModel):
id: str
"""The unique ID of the conversation."""
created_at: int
"""
The time at which the conversation was created, measured in seconds since the
Unix epoch.
"""
metadata: object
"""Set of 16 key-value pairs that can be attached to an object.
This can be useful for storing additional information about the object in a
structured format, and querying for objects via API or the dashboard. Keys are
strings with a maximum length of 64 characters. Values are strings with a
maximum length of 512 characters.
"""
object: Literal["conversation"]
"""The object type, which is always `conversation`."""
| Conversation |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.