sample_id stringlengths 21 196 | text stringlengths 105 936k | metadata dict | category stringclasses 6
values |
|---|---|---|---|
saleor/saleor:saleor/graphql/app/tests/mutations/test_app_problem_create_aggregation.py | import datetime
from django.utils import timezone
from .....app.models import AppProblem
from ....tests.utils import get_graphql_content
APP_PROBLEM_CREATE_MUTATION = """
mutation AppProblemCreate($input: AppProblemCreateInput!) {
appProblemCreate(input: $input) {
appProblem {
id
message
key
count
isCritical
updatedAt
}
errors {
field
code
message
}
}
}
"""
def test_app_problem_create_aggregates_within_period(
app_api_client, app, app_problem_generator
):
# given
now = timezone.now()
app_problem_generator(
app,
message="First occurrence",
key="agg-key",
count=1,
updated_at=now - datetime.timedelta(minutes=30),
)
variables = {
"input": {
"message": "Second occurrence",
"key": "agg-key",
"aggregationPeriod": 60,
}
}
# when
response = app_api_client.post_graphql(APP_PROBLEM_CREATE_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemCreate"]
assert not data["errors"]
assert AppProblem.objects.filter(app=app).count() == 1
problem = AppProblem.objects.get(app=app)
assert problem.count == 2
assert problem.message == variables["input"]["message"]
def test_app_problem_create_new_when_period_expired(
app_api_client, app, app_problem_generator
):
# given
now = timezone.now()
app_problem_generator(
app,
message="Old problem",
key="exp-key",
count=3,
updated_at=now - datetime.timedelta(minutes=120),
)
variables = {
"input": {
"message": "New problem",
"key": "exp-key",
"aggregationPeriod": 60,
}
}
# when
response = app_api_client.post_graphql(APP_PROBLEM_CREATE_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemCreate"]
assert not data["errors"]
problems = AppProblem.objects.filter(app=app).order_by("created_at")
assert problems.count() == 2
assert problems[0].count == 3
assert problems[0].message == "Old problem"
assert problems[1].count == 1
assert problems[1].message == variables["input"]["message"]
def test_app_problem_create_zero_aggregation_period_always_creates_new(
app_api_client, app, app_problem_generator
):
# given
now = timezone.now()
app_problem_generator(
app,
message="Existing",
key="no-agg",
count=1,
updated_at=now - datetime.timedelta(minutes=1),
)
variables = {
"input": {"message": "New one", "key": "no-agg", "aggregationPeriod": 0}
}
# when
response = app_api_client.post_graphql(APP_PROBLEM_CREATE_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemCreate"]
assert not data["errors"]
assert AppProblem.objects.filter(app=app).count() == 2
def test_app_problem_create_default_aggregation_period_aggregates(
app_api_client, app, app_problem_generator
):
# given
now = timezone.now()
app_problem_generator(
app,
message="Recent",
key="def-agg",
count=1,
updated_at=now - datetime.timedelta(minutes=30),
)
# No aggregationPeriod specified — defaults to 60 minutes
variables = {"input": {"message": "Should aggregate", "key": "def-agg"}}
# when
response = app_api_client.post_graphql(APP_PROBLEM_CREATE_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemCreate"]
assert not data["errors"]
assert AppProblem.objects.filter(app=app).count() == 1
problem = AppProblem.objects.get(app=app)
assert problem.count == 2
assert problem.message == variables["input"]["message"]
def test_app_problem_create_dismissed_problem_not_aggregated(
app_api_client, app, app_problem_generator
):
# given
now = timezone.now()
app_problem_generator(
app,
message="Dismissed one",
key="dis-key",
count=5,
updated_at=now - datetime.timedelta(minutes=5),
dismissed=True,
)
variables = {
"input": {
"message": "Fresh problem",
"key": "dis-key",
"aggregationPeriod": 60,
}
}
# when
response = app_api_client.post_graphql(APP_PROBLEM_CREATE_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemCreate"]
assert not data["errors"]
assert AppProblem.objects.filter(app=app).count() == 2
new_problem = AppProblem.objects.filter(app=app, dismissed=False).get()
assert new_problem.count == 1
assert new_problem.message == variables["input"]["message"]
def test_app_problem_create_message_updates_on_aggregation(
app_api_client, app, app_problem_generator
):
# given
now = timezone.now()
app_problem_generator(
app,
message="Original message",
key="msg-key",
count=1,
updated_at=now - datetime.timedelta(minutes=5),
)
variables = {
"input": {
"message": "Updated message",
"key": "msg-key",
"aggregationPeriod": 60,
}
}
# when
response = app_api_client.post_graphql(APP_PROBLEM_CREATE_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemCreate"]
assert not data["errors"]
problem = AppProblem.objects.get(app=app)
assert problem.message == variables["input"]["message"]
assert problem.count == 2
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/app/tests/mutations/test_app_problem_create_aggregation.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 187,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/graphql/app/tests/mutations/test_app_problem_create_critical.py | import datetime
from django.utils import timezone
from .....app.models import AppProblem
from ....tests.utils import get_graphql_content
APP_PROBLEM_CREATE_MUTATION = """
mutation AppProblemCreate($input: AppProblemCreateInput!) {
appProblemCreate(input: $input) {
appProblem {
id
message
key
count
isCritical
updatedAt
}
errors {
field
code
message
}
}
}
"""
def test_app_problem_create_critical_threshold_reached(
app_api_client, app, app_problem_generator
):
# given
now = timezone.now()
app_problem_generator(
app,
message="Almost critical",
key="crit-key",
count=4,
updated_at=now - datetime.timedelta(minutes=5),
)
variables = {
"input": {
"message": "Critical now",
"key": "crit-key",
"aggregationPeriod": 60,
"criticalThreshold": 5,
}
}
# when
response = app_api_client.post_graphql(APP_PROBLEM_CREATE_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemCreate"]
assert not data["errors"]
problem = AppProblem.objects.get(app=app)
assert problem.count == 5
assert problem.is_critical is True
def test_app_problem_create_critical_threshold_not_reached(
app_api_client, app, app_problem_generator
):
# given
now = timezone.now()
app_problem_generator(
app,
message="Not critical yet",
key="nc-key",
count=2,
updated_at=now - datetime.timedelta(minutes=5),
)
variables = {
"input": {
"message": "Still not critical",
"key": "nc-key",
"aggregationPeriod": 60,
"criticalThreshold": 10,
}
}
# when
response = app_api_client.post_graphql(APP_PROBLEM_CREATE_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemCreate"]
assert not data["errors"]
problem = AppProblem.objects.get(app=app)
assert problem.count == 3
assert problem.is_critical is False
def test_app_problem_create_critical_threshold_reached_via_aggregation(
app_api_client, app, app_problem_generator
):
# given - existing problem with count=4, one more push with threshold=5
now = timezone.now()
app_problem_generator(
app,
message="Problem",
key="rolling-key",
count=4,
updated_at=now - datetime.timedelta(minutes=5),
)
variables = {
"input": {
"message": "Problem 5",
"key": "rolling-key",
"aggregationPeriod": 60,
"criticalThreshold": 5,
}
}
# when
response = app_api_client.post_graphql(APP_PROBLEM_CREATE_MUTATION, variables)
content = get_graphql_content(response)
# then - count=5 meets threshold=5, should be critical
data = content["data"]["appProblemCreate"]
assert not data["errors"]
problem = AppProblem.objects.get(app=app)
assert problem.count == 5
assert problem.is_critical is True
def test_app_problem_create_critical_threshold_de_escalates(
app_api_client, app, app_problem_generator
):
# given - existing critical problem with count=5
now = timezone.now()
app_problem_generator(
app,
message="Problem",
key="rolling-key",
count=5,
is_critical=True,
updated_at=now - datetime.timedelta(minutes=5),
)
# when - send problem with higher threshold=10
variables = {
"input": {
"message": "Problem 6",
"key": "rolling-key",
"aggregationPeriod": 60,
"criticalThreshold": 10,
}
}
response = app_api_client.post_graphql(APP_PROBLEM_CREATE_MUTATION, variables)
content = get_graphql_content(response)
# then - count=6 is below new threshold=10, should de-escalate
data = content["data"]["appProblemCreate"]
assert not data["errors"]
problem = AppProblem.objects.get(app=app)
assert problem.count == 6
assert problem.is_critical is False
def test_app_problem_create_critical_threshold_not_reached_due_to_expired_aggregation(
app_api_client, app, app_problem_generator
):
# given - existing problem with count=4, threshold=5
# but aggregation period expired, so a new problem is created instead
now = timezone.now()
existing = app_problem_generator(
app,
message="Almost critical",
key="crit-key",
count=4,
updated_at=now - datetime.timedelta(minutes=120),
)
variables = {
"input": {
"message": "New occurrence",
"key": "crit-key",
"aggregationPeriod": 60,
"criticalThreshold": 5,
}
}
# when
response = app_api_client.post_graphql(APP_PROBLEM_CREATE_MUTATION, variables)
content = get_graphql_content(response)
# then - a new problem is created instead of aggregating
data = content["data"]["appProblemCreate"]
assert not data["errors"]
assert AppProblem.objects.filter(app=app, key="crit-key").count() == 2
# old problem stays at count=4, not critical
existing.refresh_from_db()
assert existing.count == 4
assert existing.is_critical is False
# new problem starts at count=1, not critical
new_problem = data["appProblem"]
assert new_problem["count"] == 1
assert new_problem["isCritical"] is False
def test_app_problem_create_critical_on_first_problem(app_api_client, app):
# given
variables = {
"input": {
"message": "Immediately critical",
"key": "imm-crit",
"criticalThreshold": 1,
}
}
# when
response = app_api_client.post_graphql(APP_PROBLEM_CREATE_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemCreate"]
assert not data["errors"]
problem = AppProblem.objects.get(app=app)
assert problem.is_critical is True
assert problem.count == 1
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/app/tests/mutations/test_app_problem_create_critical.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 194,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/graphql/app/tests/mutations/test_app_problem_create_eviction.py | import datetime
from unittest.mock import patch
from django.utils import timezone
from .....app.models import AppProblem
from ....tests.utils import get_graphql_content
APP_PROBLEM_CREATE_MUTATION = """
mutation AppProblemCreate($input: AppProblemCreateInput!) {
appProblemCreate(input: $input) {
appProblem {
id
message
key
count
isCritical
updatedAt
}
errors {
field
code
message
}
}
}
"""
MOCKED_MAX = 2
@patch.object(AppProblem, "MAX_PROBLEMS_PER_APP", MOCKED_MAX)
def test_app_problem_create_limit_eviction(app_api_client, app):
# given
now = timezone.now()
AppProblem.objects.bulk_create(
[
AppProblem(
app=app,
message=f"Problem {i}",
key=f"key-{i}",
updated_at=now - datetime.timedelta(minutes=MOCKED_MAX - i),
)
for i in range(MOCKED_MAX)
]
)
oldest_id = AppProblem.objects.filter(app=app).order_by("updated_at").first().id
variables = {"input": {"message": "One more", "key": "new-key"}}
# when
response = app_api_client.post_graphql(APP_PROBLEM_CREATE_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemCreate"]
assert not data["errors"]
assert AppProblem.objects.filter(app=app).count() == MOCKED_MAX
assert not AppProblem.objects.filter(id=oldest_id).exists()
assert AppProblem.objects.filter(app=app, key="new-key").exists()
@patch.object(AppProblem, "MAX_PROBLEMS_PER_APP", MOCKED_MAX)
def test_app_problem_create_bulk_eviction_when_over_limit(app_api_client, app):
"""Test that multiple oldest problems are evicted when count exceeds limit."""
# given - create 4 problems (2 over the mocked limit of 2)
over_limit = MOCKED_MAX + 2
now = timezone.now()
AppProblem.objects.bulk_create(
[
AppProblem(
app=app,
message=f"Problem {i}",
key=f"key-{i}",
updated_at=now,
)
for i in range(over_limit)
]
)
# Track the 3 oldest problem IDs (should be evicted to make room for the new one)
evict_count = over_limit - MOCKED_MAX + 1
oldest_ids = list(
AppProblem.objects.filter(app=app)
.order_by("created_at")
.values_list("id", flat=True)[:evict_count]
)
variables = {"input": {"message": "One more", "key": "new-key"}}
# when
response = app_api_client.post_graphql(APP_PROBLEM_CREATE_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemCreate"]
assert not data["errors"]
assert AppProblem.objects.filter(app=app).count() == MOCKED_MAX
for old_id in oldest_ids:
assert not AppProblem.objects.filter(id=old_id).exists()
assert AppProblem.objects.filter(app=app, key="new-key").exists()
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/app/tests/mutations/test_app_problem_create_eviction.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 86,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/graphql/app/tests/mutations/test_app_problem_create_validation.py | import datetime
from django.utils import timezone
from .....app.error_codes import AppProblemCreateErrorCode
from .....app.models import AppProblem
from ....tests.utils import get_graphql_content
APP_PROBLEM_CREATE_MUTATION = """
mutation AppProblemCreate($input: AppProblemCreateInput!) {
appProblemCreate(input: $input) {
appProblem {
id
message
key
count
isCritical
updatedAt
}
errors {
field
code
message
}
}
}
"""
def test_app_problem_create_negative_aggregation_period_fails(app_api_client, app):
# given
variables = {
"input": {
"message": "Something went wrong",
"key": "error-1",
"aggregationPeriod": -1,
}
}
# when
response = app_api_client.post_graphql(APP_PROBLEM_CREATE_MUTATION, variables)
# then - Minute scalar rejects negative values at GraphQL level
assert response.status_code == 400
content = response.json()
assert "errors" in content
error_message = content["errors"][0]["message"]
assert 'Expected type "Minute", found -1' in error_message
assert AppProblem.objects.filter(app=app).count() == 0
def test_app_problem_create_zero_critical_threshold_fails(app_api_client, app):
# given
variables = {
"input": {
"message": "Something went wrong",
"key": "error-1",
"criticalThreshold": 0,
}
}
# when
response = app_api_client.post_graphql(APP_PROBLEM_CREATE_MUTATION, variables)
# then - PositiveInt scalar rejects 0 at GraphQL level
assert response.status_code == 400
content = response.json()
assert "errors" in content
assert "PositiveInt" in content["errors"][0]["message"]
assert AppProblem.objects.filter(app=app).count() == 0
def test_app_problem_create_negative_critical_threshold_fails(app_api_client, app):
# given
variables = {
"input": {
"message": "Something went wrong",
"key": "error-1",
"criticalThreshold": -5,
}
}
# when
response = app_api_client.post_graphql(APP_PROBLEM_CREATE_MUTATION, variables)
# then - PositiveInt scalar rejects negative values at GraphQL level
assert response.status_code == 400
content = response.json()
assert "errors" in content
assert "PositiveInt" in content["errors"][0]["message"]
assert AppProblem.objects.filter(app=app).count() == 0
def test_app_problem_create_null_aggregation_period_defaults_to_60(
app_api_client, app, app_problem_generator
):
# given
now = timezone.now()
app_problem_generator(
app,
message="Recent problem",
key="null-agg-key",
count=1,
updated_at=now - datetime.timedelta(minutes=30),
)
# Explicitly pass null for aggregationPeriod
variables = {
"input": {
"message": "Should aggregate with default period",
"key": "null-agg-key",
"aggregationPeriod": None,
}
}
# when
response = app_api_client.post_graphql(APP_PROBLEM_CREATE_MUTATION, variables)
content = get_graphql_content(response)
# then - null should default to 60 minutes, so it aggregates with the existing problem
data = content["data"]["appProblemCreate"]
assert not data["errors"]
assert AppProblem.objects.filter(app=app).count() == 1
problem = AppProblem.objects.get(app=app)
assert problem.count == 2
assert problem.message == "Should aggregate with default period"
def test_app_problem_create_message_too_short_fails(app_api_client, app):
# given
variables = {
"input": {
"message": "ab", # min_length=3
"key": "error-1",
}
}
# when
response = app_api_client.post_graphql(APP_PROBLEM_CREATE_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemCreate"]
assert len(data["errors"]) == 1
assert data["errors"][0]["field"] == "message"
assert data["errors"][0]["code"] == AppProblemCreateErrorCode.INVALID.name
assert data["errors"][0]["message"] == "String should have at least 3 characters"
assert AppProblem.objects.filter(app=app).count() == 0
def test_app_problem_create_message_too_long_is_truncated(app_api_client, app):
# given
long_message = "a" * 3000
variables = {
"input": {
"message": long_message,
"key": "error-1",
}
}
# when
response = app_api_client.post_graphql(APP_PROBLEM_CREATE_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemCreate"]
assert not data["errors"]
problem = AppProblem.objects.get(app=app)
assert len(problem.message) == 2048
assert problem.message == "a" * 2045 + "..."
def test_app_problem_create_key_too_short_fails(app_api_client, app):
# given
variables = {
"input": {
"message": "Something went wrong",
"key": "ab", # min_length=3
}
}
# when
response = app_api_client.post_graphql(APP_PROBLEM_CREATE_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemCreate"]
assert len(data["errors"]) == 1
assert data["errors"][0]["field"] == "key"
assert data["errors"][0]["code"] == AppProblemCreateErrorCode.INVALID.name
assert data["errors"][0]["message"] == "String should have at least 3 characters"
assert AppProblem.objects.filter(app=app).count() == 0
def test_app_problem_create_key_too_long_fails(app_api_client, app):
# given
variables = {
"input": {
"message": "Something went wrong",
"key": "a" * 129, # max_length=128
}
}
# when
response = app_api_client.post_graphql(APP_PROBLEM_CREATE_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemCreate"]
assert len(data["errors"]) == 1
assert data["errors"][0]["field"] == "key"
assert data["errors"][0]["code"] == AppProblemCreateErrorCode.INVALID.name
assert data["errors"][0]["message"] == "String should have at most 128 characters"
assert AppProblem.objects.filter(app=app).count() == 0
def test_app_problem_create_message_at_2048_chars_is_not_truncated(app_api_client, app):
# given
message = "a" * 2048
variables = {
"input": {
"message": message,
"key": "error-1",
}
}
# when
response = app_api_client.post_graphql(APP_PROBLEM_CREATE_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemCreate"]
assert not data["errors"]
problem = AppProblem.objects.get(app=app)
assert problem.message == message
assert len(problem.message) == 2048
def test_app_problem_create_key_at_max_length_succeeds(app_api_client, app):
# given
variables = {
"input": {
"message": "Something went wrong",
"key": "a" * 128, # exactly at max_length
}
}
# when
response = app_api_client.post_graphql(APP_PROBLEM_CREATE_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemCreate"]
assert not data["errors"]
problem = AppProblem.objects.get(app=app)
assert len(problem.key) == 128
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/app/tests/mutations/test_app_problem_create_validation.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 212,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/graphql/app/tests/mutations/test_app_problem_dismiss.py | import base64
import graphene
from .....app.error_codes import AppProblemDismissErrorCode
from ....tests.utils import assert_no_permission, get_graphql_content
APP_PROBLEM_DISMISS_MUTATION = """
mutation AppProblemDismiss($input: AppProblemDismissInput!) {
appProblemDismiss(input: $input) {
errors {
field
code
message
}
}
}
"""
def test_app_problem_dismiss_multiple_inputs_fails(
app_api_client, app, app_problem_generator
):
# given
p1 = app_problem_generator(app)
variables = {
"input": {
"byApp": {"ids": [graphene.Node.to_global_id("AppProblem", p1.id)]},
"byStaffWithIds": {
"ids": [graphene.Node.to_global_id("AppProblem", p1.id)]
},
}
}
# when
response = app_api_client.post_graphql(APP_PROBLEM_DISMISS_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemDismiss"]
assert len(data["errors"]) == 1
assert data["errors"][0]["field"] is None
assert data["errors"][0]["code"] == "GRAPHQL_ERROR"
assert (
data["errors"][0]["message"]
== "Argument 'byApp' cannot be combined with 'byStaffWithIds'"
)
def test_app_problem_dismiss_no_input_fails(app_api_client, app):
# given
variables = {"input": {}}
# when
response = app_api_client.post_graphql(APP_PROBLEM_DISMISS_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemDismiss"]
assert len(data["errors"]) == 1
assert data["errors"][0]["field"] is None
assert data["errors"][0]["code"] == "GRAPHQL_ERROR"
assert (
data["errors"][0]["message"]
== "At least one of arguments is required: 'byApp', 'byStaffWithIds', 'byStaffWithKeys'."
)
def test_app_problem_dismiss_empty_by_app_fails(app_api_client, app):
# given - byApp provided but without ids or keys
variables = {"input": {"byApp": {}}}
# when
response = app_api_client.post_graphql(APP_PROBLEM_DISMISS_MUTATION, variables)
content = get_graphql_content(response)
# then - empty byApp ({}) is falsy, so treated as not provided
data = content["data"]["appProblemDismiss"]
assert len(data["errors"]) == 1
assert data["errors"][0]["field"] is None
assert data["errors"][0]["code"] == "GRAPHQL_ERROR"
assert (
data["errors"][0]["message"]
== "At least one of arguments is required: 'byApp', 'byStaffWithIds', 'byStaffWithKeys'."
)
def test_app_problem_dismiss_without_permission(
staff_api_client, app, app_problem_generator
):
# given
p1 = app_problem_generator(app)
variables = {
"input": {
"byStaffWithIds": {"ids": [graphene.Node.to_global_id("AppProblem", p1.id)]}
}
}
# when
response = staff_api_client.post_graphql(APP_PROBLEM_DISMISS_MUTATION, variables)
# then
assert_no_permission(response)
def test_app_problem_dismiss_with_non_integer_id_fails(app_api_client, app):
# given - ID with UUID instead of integer pk
invalid_id = base64.b64encode(
b"AppProblem:a7f47ac1-058c-4372-a567-0e02b2c3d479"
).decode("utf-8")
variables = {"input": {"byApp": {"ids": [invalid_id]}}}
# when
response = app_api_client.post_graphql(APP_PROBLEM_DISMISS_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemDismiss"]
assert len(data["errors"]) == 1
assert data["errors"][0]["field"] == "ids"
assert data["errors"][0]["code"] == AppProblemDismissErrorCode.INVALID.name
assert "Invalid ID" in data["errors"][0]["message"]
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/app/tests/mutations/test_app_problem_dismiss.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 99,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/graphql/app/tests/mutations/test_app_problem_dismiss_by_app.py | import graphene
from .....app.error_codes import AppProblemDismissErrorCode
from ....tests.utils import get_graphql_content
from ...mutations.app_problem_dismiss import MAX_ITEMS_LIMIT
APP_PROBLEM_DISMISS_MUTATION = """
mutation AppProblemDismiss($input: AppProblemDismissInput!) {
appProblemDismiss(input: $input) {
errors {
field
code
message
}
}
}
"""
def test_app_problem_dismiss_by_ids_as_app(app_api_client, app, app_problem_generator):
# given
p1 = app_problem_generator(app, key="k1", message="Problem 1")
p2 = app_problem_generator(app, key="k2", message="Problem 2")
variables = {
"input": {
"byApp": {
"ids": [graphene.Node.to_global_id("AppProblem", p1.id)],
}
}
}
# when
response = app_api_client.post_graphql(APP_PROBLEM_DISMISS_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemDismiss"]
assert not data["errors"]
p1.refresh_from_db()
p2.refresh_from_db()
assert p1.dismissed is True
assert p1.dismissed_by_user_email is None # Dismissed by app, no email
assert p1.dismissed_by_user is None
assert p2.dismissed is False
def test_app_problem_dismiss_by_keys_as_app(app_api_client, app, app_problem_generator):
# given
p1 = app_problem_generator(app, key="same-key", message="Problem 1")
p2 = app_problem_generator(app, key="same-key", message="Problem 2")
p3 = app_problem_generator(app, key="other-key", message="Problem 3")
variables = {"input": {"byApp": {"keys": ["same-key"]}}}
# when
response = app_api_client.post_graphql(APP_PROBLEM_DISMISS_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemDismiss"]
assert not data["errors"]
p1.refresh_from_db()
p2.refresh_from_db()
p3.refresh_from_db()
assert p1.dismissed is True
assert p2.dismissed is True
assert p3.dismissed is False
def test_app_problem_dismiss_by_ids_and_keys_as_app_fails(
app_api_client, app, app_problem_generator
):
# given - cannot specify both ids and keys
p1 = app_problem_generator(app, key="k1", message="Problem 1")
variables = {
"input": {
"byApp": {
"ids": [graphene.Node.to_global_id("AppProblem", p1.id)],
"keys": ["k2"],
}
}
}
# when
response = app_api_client.post_graphql(APP_PROBLEM_DISMISS_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemDismiss"]
assert len(data["errors"]) == 1
assert data["errors"][0]["field"] == "byApp"
assert data["errors"][0]["code"] == AppProblemDismissErrorCode.INVALID.name
assert data["errors"][0]["message"] == "Cannot specify both 'ids' and 'keys'."
def test_app_problem_dismiss_idempotent(app_api_client, app, app_problem_generator):
# given - problem already dismissed
p1 = app_problem_generator(app, key="k1", dismissed=True)
variables = {
"input": {"byApp": {"ids": [graphene.Node.to_global_id("AppProblem", p1.id)]}}
}
# when
response = app_api_client.post_graphql(APP_PROBLEM_DISMISS_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemDismiss"]
assert not data["errors"]
p1.refresh_from_db()
assert p1.dismissed is True
def test_app_cannot_dismiss_other_apps_problems(
app_api_client, app, app_with_token, app_problem_generator
):
# given - p1 belongs to a different app
other_app = app_with_token
p1 = app_problem_generator(other_app, key="k1")
variables = {
"input": {"byApp": {"ids": [graphene.Node.to_global_id("AppProblem", p1.id)]}}
}
# when
response = app_api_client.post_graphql(APP_PROBLEM_DISMISS_MUTATION, variables)
content = get_graphql_content(response)
# then - error returned
data = content["data"]["appProblemDismiss"]
assert len(data["errors"]) == 1
assert data["errors"][0]["field"] == "ids"
assert data["errors"][0]["code"] == AppProblemDismissErrorCode.INVALID.name
assert (
data["errors"][0]["message"]
== "Cannot dismiss problems belonging to other apps."
)
p1.refresh_from_db()
assert p1.dismissed is False
def test_app_caller_cannot_use_by_staff_with_ids(
app_api_client, app, app_problem_generator
):
# given - app caller tries to use byStaffWithIds (staff-only)
p1 = app_problem_generator(app)
variables = {
"input": {
"byStaffWithIds": {"ids": [graphene.Node.to_global_id("AppProblem", p1.id)]}
}
}
# when
response = app_api_client.post_graphql(APP_PROBLEM_DISMISS_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemDismiss"]
assert len(data["errors"]) == 1
assert data["errors"][0]["field"] == "byStaffWithIds"
assert data["errors"][0]["code"] == AppProblemDismissErrorCode.INVALID.name
assert (
data["errors"][0]["message"]
== "App callers cannot use this input. Use 'byApp' instead."
)
def test_app_caller_cannot_use_by_staff_with_keys(app_api_client, app):
# given - app caller tries to use byStaffWithKeys (staff-only)
variables = {
"input": {
"byStaffWithKeys": {
"keys": ["k1"],
"app": graphene.Node.to_global_id("App", app.id),
}
}
}
# when
response = app_api_client.post_graphql(APP_PROBLEM_DISMISS_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemDismiss"]
assert len(data["errors"]) == 1
assert data["errors"][0]["field"] == "byStaffWithKeys"
assert data["errors"][0]["code"] == AppProblemDismissErrorCode.INVALID.name
assert (
data["errors"][0]["message"]
== "App callers cannot use this input. Use 'byApp' instead."
)
def test_app_problem_dismiss_by_app_with_too_many_ids_fails(app_api_client, app):
# given
ids = [
graphene.Node.to_global_id("AppProblem", i) for i in range(MAX_ITEMS_LIMIT + 1)
]
variables = {"input": {"byApp": {"ids": ids}}}
# when
response = app_api_client.post_graphql(APP_PROBLEM_DISMISS_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemDismiss"]
assert len(data["errors"]) == 1
assert data["errors"][0]["field"] == "ids"
assert data["errors"][0]["code"] == AppProblemDismissErrorCode.INVALID.name
assert data["errors"][0]["message"] == "Cannot specify more than 100 items."
def test_app_problem_dismiss_by_app_with_too_many_keys_fails(app_api_client, app):
# given
keys = [f"key-{i}" for i in range(MAX_ITEMS_LIMIT + 1)]
variables = {"input": {"byApp": {"keys": keys}}}
# when
response = app_api_client.post_graphql(APP_PROBLEM_DISMISS_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemDismiss"]
assert len(data["errors"]) == 1
assert data["errors"][0]["field"] == "keys"
assert data["errors"][0]["code"] == AppProblemDismissErrorCode.INVALID.name
assert data["errors"][0]["message"] == "Cannot specify more than 100 items."
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/app/tests/mutations/test_app_problem_dismiss_by_app.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 187,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/graphql/app/tests/mutations/test_app_problem_dismiss_by_staff_with_ids.py | import graphene
from .....app.error_codes import AppProblemDismissErrorCode
from ....tests.utils import get_graphql_content
from ...mutations.app_problem_dismiss import MAX_ITEMS_LIMIT
APP_PROBLEM_DISMISS_MUTATION = """
mutation AppProblemDismiss($input: AppProblemDismissInput!) {
appProblemDismiss(input: $input) {
errors {
field
code
message
}
}
}
"""
def test_app_problem_dismiss_by_ids_as_staff(
staff_api_client, app, permission_manage_apps, app_problem_generator
):
# given
staff_api_client.user.user_permissions.add(permission_manage_apps)
p1 = app_problem_generator(app, key="k1")
variables = {
"input": {
"byStaffWithIds": {
"ids": [graphene.Node.to_global_id("AppProblem", p1.id)],
}
}
}
# when
response = staff_api_client.post_graphql(APP_PROBLEM_DISMISS_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemDismiss"]
assert not data["errors"]
p1.refresh_from_db()
assert p1.dismissed is True
assert p1.dismissed_by_user_email == staff_api_client.user.email
assert p1.dismissed_by_user == staff_api_client.user
def test_staff_can_dismiss_problems_from_multiple_apps(
staff_api_client, app, app_with_token, permission_manage_apps, app_problem_generator
):
# given - problems from 2 different apps
staff_api_client.user.user_permissions.add(permission_manage_apps)
p1 = app_problem_generator(app, key="k1", message="Problem from app 1")
p2 = app_problem_generator(app_with_token, key="k2", message="Problem from app 2")
variables = {
"input": {
"byStaffWithIds": {
"ids": [
graphene.Node.to_global_id("AppProblem", p1.id),
graphene.Node.to_global_id("AppProblem", p2.id),
]
}
}
}
# when
response = staff_api_client.post_graphql(APP_PROBLEM_DISMISS_MUTATION, variables)
content = get_graphql_content(response)
# then - both problems should be dismissed
data = content["data"]["appProblemDismiss"]
assert not data["errors"]
p1.refresh_from_db()
p2.refresh_from_db()
assert p1.dismissed is True
assert p1.dismissed_by_user_email == staff_api_client.user.email
assert p1.dismissed_by_user == staff_api_client.user
assert p2.dismissed is True
assert p2.dismissed_by_user_email == staff_api_client.user.email
assert p2.dismissed_by_user == staff_api_client.user
def test_user_caller_cannot_use_by_app(
staff_api_client, app, permission_manage_apps, app_problem_generator
):
# given - staff caller tries to use byApp
staff_api_client.user.user_permissions.add(permission_manage_apps)
p1 = app_problem_generator(app)
variables = {
"input": {"byApp": {"ids": [graphene.Node.to_global_id("AppProblem", p1.id)]}}
}
# when
response = staff_api_client.post_graphql(APP_PROBLEM_DISMISS_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemDismiss"]
assert len(data["errors"]) == 1
assert data["errors"][0]["field"] == "byApp"
assert data["errors"][0]["code"] == AppProblemDismissErrorCode.INVALID.name
assert data["errors"][0]["message"] == "Only app callers can use 'byApp'."
def test_app_problem_dismiss_by_staff_with_too_many_ids_fails(
staff_api_client, app, permission_manage_apps
):
# given
staff_api_client.user.user_permissions.add(permission_manage_apps)
ids = [
graphene.Node.to_global_id("AppProblem", i) for i in range(MAX_ITEMS_LIMIT + 1)
]
variables = {"input": {"byStaffWithIds": {"ids": ids}}}
# when
response = staff_api_client.post_graphql(APP_PROBLEM_DISMISS_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemDismiss"]
assert len(data["errors"]) == 1
assert data["errors"][0]["field"] == "ids"
assert data["errors"][0]["code"] == AppProblemDismissErrorCode.INVALID.name
assert data["errors"][0]["message"] == "Cannot specify more than 100 items."
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/app/tests/mutations/test_app_problem_dismiss_by_staff_with_ids.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 105,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/graphql/app/tests/mutations/test_app_problem_dismiss_by_staff_with_keys.py | import graphene
from .....app.error_codes import AppProblemDismissErrorCode
from ....tests.utils import get_graphql_content
from ...mutations.app_problem_dismiss import MAX_ITEMS_LIMIT
APP_PROBLEM_DISMISS_MUTATION = """
mutation AppProblemDismiss($input: AppProblemDismissInput!) {
appProblemDismiss(input: $input) {
errors {
field
code
message
}
}
}
"""
def test_app_problem_dismiss_by_keys_as_staff(
staff_api_client, app, permission_manage_apps, app_problem_generator
):
# given
staff_api_client.user.user_permissions.add(permission_manage_apps)
p1 = app_problem_generator(app, key="k1")
variables = {
"input": {
"byStaffWithKeys": {
"keys": ["k1"],
"app": graphene.Node.to_global_id("App", app.id),
}
}
}
# when
response = staff_api_client.post_graphql(APP_PROBLEM_DISMISS_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemDismiss"]
assert not data["errors"]
p1.refresh_from_db()
assert p1.dismissed is True
assert p1.dismissed_by_user_email == staff_api_client.user.email
assert p1.dismissed_by_user == staff_api_client.user
def test_app_problem_dismiss_by_staff_with_too_many_keys_fails(
staff_api_client, app, permission_manage_apps
):
# given
staff_api_client.user.user_permissions.add(permission_manage_apps)
keys = [f"key-{i}" for i in range(MAX_ITEMS_LIMIT + 1)]
variables = {
"input": {
"byStaffWithKeys": {
"keys": keys,
"app": graphene.Node.to_global_id("App", app.id),
}
}
}
# when
response = staff_api_client.post_graphql(APP_PROBLEM_DISMISS_MUTATION, variables)
content = get_graphql_content(response)
# then
data = content["data"]["appProblemDismiss"]
assert len(data["errors"]) == 1
assert data["errors"][0]["field"] == "keys"
assert data["errors"][0]["code"] == AppProblemDismissErrorCode.INVALID.name
assert data["errors"][0]["message"] == "Cannot specify more than 100 items."
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/app/tests/mutations/test_app_problem_dismiss_by_staff_with_keys.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 62,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/graphql/app/tests/queries/test_app_problems.py | import graphene
from .....app.models import AppProblem
from ....tests.utils import get_graphql_content, get_graphql_content_from_response
QUERY_APP_PROBLEMS = """
query ($id: ID) {
app(id: $id) {
id
problems {
id
message
key
createdAt
updatedAt
count
isCritical
}
}
}
"""
QUERY_APP_PROBLEMS_WITH_LIMIT = """
query ($id: ID, $limit: PositiveInt) {
app(id: $id) {
id
problems(limit: $limit) {
id
message
}
}
}
"""
QUERY_APP_PROBLEMS_WITH_DISMISSED_BY = """
query ($id: ID) {
app(id: $id) {
id
problems {
id
dismissed {
by
}
}
}
}
"""
QUERY_APP_PROBLEMS_WITH_DISMISSED_BY_AND_EMAIL = """
query ($id: ID) {
app(id: $id) {
id
problems {
id
dismissed {
by
userEmail
}
}
}
}
"""
QUERY_APP_PROBLEMS_WITH_DISMISSED_USER = """
query ($id: ID) {
app(id: $id) {
id
problems {
id
dismissed {
user {
id
email
}
}
}
}
}
"""
QUERY_APP_PROBLEMS_WITH_DISMISSED_EMAIL_ONLY = """
query ($id: ID) {
app(id: $id) {
id
problems {
id
dismissed {
userEmail
}
}
}
}
"""
def test_app_problems_empty(app_api_client, app):
# given
variables = {"id": graphene.Node.to_global_id("App", app.id)}
# when
response = app_api_client.post_graphql(QUERY_APP_PROBLEMS, variables)
content = get_graphql_content(response)
# then
data = content["data"]["app"]
assert data["problems"] == []
def test_app_problems_returns_problems(app_api_client, app):
# given
p1 = AppProblem.objects.create(app=app, message="Issue 1", key="k1")
p2 = AppProblem.objects.create(app=app, message="Issue 2", key="k2")
variables = {"id": graphene.Node.to_global_id("App", app.id)}
# when
response = app_api_client.post_graphql(QUERY_APP_PROBLEMS, variables)
content = get_graphql_content(response)
# then
problems = content["data"]["app"]["problems"]
assert len(problems) == 2
# Ordered by created_at desc, so p2 comes first
assert problems[0]["id"] == graphene.Node.to_global_id("AppProblem", p2.id)
assert problems[0]["message"] == p2.message
assert problems[0]["key"] == p2.key
assert problems[0]["count"] == 1
assert problems[0]["isCritical"] is False
assert problems[1]["id"] == graphene.Node.to_global_id("AppProblem", p1.id)
assert problems[1]["message"] == p1.message
assert problems[1]["key"] == p1.key
assert problems[1]["count"] == 1
assert problems[1]["isCritical"] is False
def test_app_problems_ordered_by_created_at_desc(app_api_client, app):
# given
p1 = AppProblem.objects.create(app=app, message="First", key="k1")
p2 = AppProblem.objects.create(app=app, message="Second", key="k2")
variables = {"id": graphene.Node.to_global_id("App", app.id)}
# when
response = app_api_client.post_graphql(QUERY_APP_PROBLEMS, variables)
content = get_graphql_content(response)
# then
problems = content["data"]["app"]["problems"]
assert len(problems) == 2
assert problems[0]["id"] == graphene.Node.to_global_id("AppProblem", p2.id)
assert problems[0]["message"] == p2.message
assert problems[0]["key"] == p2.key
assert problems[1]["id"] == graphene.Node.to_global_id("AppProblem", p1.id)
assert problems[1]["message"] == p1.message
assert problems[1]["key"] == p1.key
def test_app_problems_count_and_critical(app_api_client, app):
# given
AppProblem.objects.create(
app=app, message="Normal", key="k1", count=3, is_critical=False
)
AppProblem.objects.create(
app=app,
message="Critical",
key="k2",
count=10,
is_critical=True,
)
variables = {"id": graphene.Node.to_global_id("App", app.id)}
# when
response = app_api_client.post_graphql(QUERY_APP_PROBLEMS, variables)
content = get_graphql_content(response)
# then
problems = content["data"]["app"]["problems"]
by_msg = {p["message"]: p for p in problems}
assert by_msg["Normal"]["count"] == 3
assert by_msg["Normal"]["isCritical"] is False
assert by_msg["Critical"]["count"] == 10
assert by_msg["Critical"]["isCritical"] is True
def test_app_problems_dismissed_null_when_not_dismissed(app_api_client, app):
# given
AppProblem.objects.create(app=app, message="Active", key="k1", dismissed=False)
variables = {"id": graphene.Node.to_global_id("App", app.id)}
# when
response = app_api_client.post_graphql(
QUERY_APP_PROBLEMS_WITH_DISMISSED_BY, variables
)
content = get_graphql_content(response)
# then
problems = content["data"]["app"]["problems"]
assert len(problems) == 1
assert problems[0]["dismissed"] is None
def test_app_problems_dismissed_by_app(app_api_client, app):
# given
AppProblem.objects.create(
app=app,
message="Dismissed by app",
key="k1",
dismissed=True,
# No dismissed_by_user_email means dismissed by app
)
variables = {"id": graphene.Node.to_global_id("App", app.id)}
# when
response = app_api_client.post_graphql(
QUERY_APP_PROBLEMS_WITH_DISMISSED_BY, variables
)
content = get_graphql_content(response)
# then
problems = content["data"]["app"]["problems"]
assert len(problems) == 1
assert problems[0]["dismissed"]["by"] == "APP"
def test_app_problems_dismissed_by_user(
staff_api_client, app, permission_manage_apps, permission_manage_staff
):
# given
staff_api_client.user.user_permissions.add(
permission_manage_apps, permission_manage_staff
)
staff_user = staff_api_client.user
AppProblem.objects.create(
app=app,
message="Dismissed by user",
key="k1",
dismissed=True,
dismissed_by_user_email=staff_user.email,
dismissed_by_user=staff_user,
)
variables = {"id": graphene.Node.to_global_id("App", app.id)}
# when
response = staff_api_client.post_graphql(
QUERY_APP_PROBLEMS_WITH_DISMISSED_BY_AND_EMAIL, variables
)
content = get_graphql_content(response)
# then
problems = content["data"]["app"]["problems"]
assert len(problems) == 1
assert problems[0]["dismissed"]["by"] == "USER"
assert problems[0]["dismissed"]["userEmail"] == staff_user.email
def test_app_problems_dismissed_by_user_returns_null_when_user_deleted(
staff_api_client, app, permission_manage_apps, permission_manage_staff
):
# given - a problem was dismissed by a user who was later deleted
staff_api_client.user.user_permissions.add(
permission_manage_apps, permission_manage_staff
)
staff_user = staff_api_client.user
AppProblem.objects.create(
app=app,
message="Dismissed by deleted user",
key="k1",
dismissed=True,
dismissed_by_user_email=staff_user.email,
dismissed_by_user=None,
)
# when
variables = {"id": graphene.Node.to_global_id("App", app.id)}
response = staff_api_client.post_graphql(
QUERY_APP_PROBLEMS_WITH_DISMISSED_BY_AND_EMAIL, variables
)
content = get_graphql_content(response)
# then - dismissed.by should still be USER (email is preserved)
problems = content["data"]["app"]["problems"]
assert len(problems) == 1
assert problems[0]["dismissed"]["by"] == "USER"
assert problems[0]["dismissed"]["userEmail"] == staff_user.email
# Permission tests for dismissed.userEmail field
def test_app_cannot_see_dismissed_user_email(
app_api_client, app, staff_user, permission_manage_apps
):
# given
app_api_client.app.permissions.add(permission_manage_apps)
AppProblem.objects.create(
app=app,
message="Problem",
key="k1",
dismissed=True,
dismissed_by_user_email=staff_user.email,
dismissed_by_user=staff_user,
)
variables = {"id": graphene.Node.to_global_id("App", app.id)}
# when
response = app_api_client.post_graphql(
QUERY_APP_PROBLEMS_WITH_DISMISSED_EMAIL_ONLY, variables
)
content = get_graphql_content_from_response(response)
# then
assert "errors" in content
assert content["errors"][0]["extensions"]["exception"]["code"] == "PermissionDenied"
def test_authenticated_user_can_see_dismissed_user_email(
staff_api_client, app, permission_manage_apps
):
# given
staff_api_client.user.user_permissions.add(permission_manage_apps)
staff_user = staff_api_client.user
AppProblem.objects.create(
app=app,
message="Problem",
key="k1",
dismissed=True,
dismissed_by_user_email=staff_user.email,
dismissed_by_user=staff_user,
)
variables = {"id": graphene.Node.to_global_id("App", app.id)}
# when
response = staff_api_client.post_graphql(
QUERY_APP_PROBLEMS_WITH_DISMISSED_EMAIL_ONLY, variables
)
content = get_graphql_content(response)
# then
problems = content["data"]["app"]["problems"]
assert len(problems) == 1
assert problems[0]["dismissed"]["userEmail"] == staff_user.email
def test_app_without_manage_staff_cannot_see_dismissed_user_email(
app_api_client, app, staff_user, permission_manage_apps
):
# given - app has MANAGE_APPS but not MANAGE_STAFF
app_api_client.app.permissions.add(permission_manage_apps)
AppProblem.objects.create(
app=app,
message="Problem",
key="k1",
dismissed=True,
dismissed_by_user_email=staff_user.email,
dismissed_by_user=staff_user,
)
variables = {"id": graphene.Node.to_global_id("App", app.id)}
# when
response = app_api_client.post_graphql(
QUERY_APP_PROBLEMS_WITH_DISMISSED_EMAIL_ONLY, variables
)
content = get_graphql_content_from_response(response)
# then
assert "errors" in content
assert content["errors"][0]["extensions"]["exception"]["code"] == "PermissionDenied"
# Permission tests for dismissed.by field - when dismissed by App
def test_app_can_see_dismissed_by_when_dismissed_by_app(app_api_client, app):
# given
AppProblem.objects.create(
app=app,
message="Dismissed by app",
key="k1",
dismissed=True,
# No dismissed_by_user_email means dismissed by app
)
variables = {"id": graphene.Node.to_global_id("App", app.id)}
# when
response = app_api_client.post_graphql(
QUERY_APP_PROBLEMS_WITH_DISMISSED_BY, variables
)
content = get_graphql_content(response)
# then
problems = content["data"]["app"]["problems"]
assert len(problems) == 1
assert problems[0]["dismissed"]["by"] == "APP"
def test_authenticated_user_can_see_dismissed_by_when_dismissed_by_app(
staff_api_client, app, permission_manage_apps
):
# given
staff_api_client.user.user_permissions.add(permission_manage_apps)
AppProblem.objects.create(
app=app,
message="Dismissed by app",
key="k1",
dismissed=True,
# No dismissed_by_user_email means dismissed by app
)
variables = {"id": graphene.Node.to_global_id("App", app.id)}
# when
response = staff_api_client.post_graphql(
QUERY_APP_PROBLEMS_WITH_DISMISSED_BY, variables
)
content = get_graphql_content(response)
# then
problems = content["data"]["app"]["problems"]
assert len(problems) == 1
assert problems[0]["dismissed"]["by"] == "APP"
# Permission tests for dismissed.user field
def test_by_field_accessible_to_app_when_dismissed_by_user(
app_api_client, app, staff_user
):
# given - app can see `by` enum even when dismissed by user
AppProblem.objects.create(
app=app,
message="Dismissed by user",
key="k1",
dismissed=True,
dismissed_by_user_email=staff_user.email,
dismissed_by_user=staff_user,
)
variables = {"id": graphene.Node.to_global_id("App", app.id)}
# when
response = app_api_client.post_graphql(
QUERY_APP_PROBLEMS_WITH_DISMISSED_BY, variables
)
content = get_graphql_content(response)
# then
problems = content["data"]["app"]["problems"]
assert len(problems) == 1
assert problems[0]["dismissed"]["by"] == "USER"
def test_user_with_manage_staff_can_see_dismissed_user(
staff_api_client, app, permission_manage_apps, permission_manage_staff
):
# given
staff_api_client.user.user_permissions.add(
permission_manage_apps, permission_manage_staff
)
staff_user = staff_api_client.user
AppProblem.objects.create(
app=app,
message="Dismissed by user",
key="k1",
dismissed=True,
dismissed_by_user_email=staff_user.email,
dismissed_by_user=staff_user,
)
variables = {"id": graphene.Node.to_global_id("App", app.id)}
# when
response = staff_api_client.post_graphql(
QUERY_APP_PROBLEMS_WITH_DISMISSED_USER, variables
)
content = get_graphql_content(response)
# then
problems = content["data"]["app"]["problems"]
assert len(problems) == 1
assert problems[0]["dismissed"]["user"]["email"] == staff_user.email
def test_user_without_manage_staff_cannot_see_dismissed_user(
staff_api_client, app, staff_user, permission_manage_apps
):
# given - user has MANAGE_APPS but not MANAGE_STAFF
staff_api_client.user.user_permissions.add(permission_manage_apps)
AppProblem.objects.create(
app=app,
message="Dismissed by user",
key="k1",
dismissed=True,
dismissed_by_user_email=staff_user.email,
dismissed_by_user=staff_user,
)
variables = {"id": graphene.Node.to_global_id("App", app.id)}
# when
response = staff_api_client.post_graphql(
QUERY_APP_PROBLEMS_WITH_DISMISSED_USER, variables
)
content = get_graphql_content_from_response(response)
# then
assert "errors" in content
assert content["errors"][0]["extensions"]["exception"]["code"] == "PermissionDenied"
def test_app_cannot_see_dismissed_user(
app_api_client, app, staff_user, permission_manage_apps
):
# given
app_api_client.app.permissions.add(permission_manage_apps)
AppProblem.objects.create(
app=app,
message="Dismissed by user",
key="k1",
dismissed=True,
dismissed_by_user_email=staff_user.email,
dismissed_by_user=staff_user,
)
variables = {"id": graphene.Node.to_global_id("App", app.id)}
# when
response = app_api_client.post_graphql(
QUERY_APP_PROBLEMS_WITH_DISMISSED_USER, variables
)
content = get_graphql_content_from_response(response)
# then
assert "errors" in content
assert content["errors"][0]["extensions"]["exception"]["code"] == "PermissionDenied"
def test_dismissed_user_returns_null_when_dismissed_by_app(
staff_api_client, app, permission_manage_apps, permission_manage_staff
):
# given
staff_api_client.user.user_permissions.add(
permission_manage_apps, permission_manage_staff
)
AppProblem.objects.create(
app=app,
message="Dismissed by app",
key="k1",
dismissed=True,
)
variables = {"id": graphene.Node.to_global_id("App", app.id)}
# when
response = staff_api_client.post_graphql(
QUERY_APP_PROBLEMS_WITH_DISMISSED_USER, variables
)
content = get_graphql_content(response)
# then
problems = content["data"]["app"]["problems"]
assert len(problems) == 1
assert problems[0]["dismissed"]["user"] is None
def test_dismissed_user_returns_null_when_user_deleted(
staff_api_client, app, permission_manage_apps, permission_manage_staff
):
# given - dismissed_by_user is None (simulating SET_NULL after user deletion)
staff_api_client.user.user_permissions.add(
permission_manage_apps, permission_manage_staff
)
staff_user = staff_api_client.user
AppProblem.objects.create(
app=app,
message="Dismissed by deleted user",
key="k1",
dismissed=True,
dismissed_by_user_email=staff_user.email,
dismissed_by_user=None,
)
# when
variables = {"id": graphene.Node.to_global_id("App", app.id)}
response = staff_api_client.post_graphql(
QUERY_APP_PROBLEMS_WITH_DISMISSED_USER, variables
)
content = get_graphql_content(response)
# then - user is null after FK SET_NULL
problems = content["data"]["app"]["problems"]
assert len(problems) == 1
assert problems[0]["dismissed"]["user"] is None
def test_app_problems_limit_negative(app_api_client, app):
# given
AppProblem.objects.create(app=app, message="Issue 1", key="k1")
variables = {
"id": graphene.Node.to_global_id("App", app.id),
"limit": -1,
}
# when
response = app_api_client.post_graphql(QUERY_APP_PROBLEMS_WITH_LIMIT, variables)
content = get_graphql_content_from_response(response)
# then
assert len(content["errors"]) == 1
assert 'Expected type "PositiveInt", found -1.' in content["errors"][0]["message"]
def test_app_problems_limit_zero(app_api_client, app):
# given
AppProblem.objects.create(app=app, message="Issue 1", key="k1")
variables = {
"id": graphene.Node.to_global_id("App", app.id),
"limit": 0,
}
# when
response = app_api_client.post_graphql(QUERY_APP_PROBLEMS_WITH_LIMIT, variables)
content = get_graphql_content_from_response(response)
# then
assert len(content["errors"]) == 1
assert 'Expected type "PositiveInt", found 0.' in content["errors"][0]["message"]
def test_app_problems_limit_one(app_api_client, app):
# given
AppProblem.objects.create(app=app, message="Issue 1", key="k1")
p2 = AppProblem.objects.create(app=app, message="Issue 2", key="k2")
variables = {
"id": graphene.Node.to_global_id("App", app.id),
"limit": 1,
}
# when
response = app_api_client.post_graphql(QUERY_APP_PROBLEMS_WITH_LIMIT, variables)
content = get_graphql_content(response)
# then
problems = content["data"]["app"]["problems"]
assert len(problems) == 1
assert problems[0]["id"] == graphene.Node.to_global_id("AppProblem", p2.id)
assert problems[0]["message"] == p2.message
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/app/tests/queries/test_app_problems.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 538,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/graphql/tests/test_error.py | from typing import Annotated
import pytest
from pydantic import BaseModel, Field, StringConstraints, field_validator
from pydantic import ValidationError as PydanticValidationError
from pydantic_core import PydanticCustomError
from ..error import pydantic_to_validation_error
class SampleModel(BaseModel):
name: Annotated[str, StringConstraints(min_length=3)]
age: Annotated[int, Field(ge=0)]
class ModelWithCustomErrorCode(BaseModel):
url: str
@field_validator("url")
@classmethod
def validate_url(cls, v: str) -> str:
if not v.startswith("https://"):
raise PydanticCustomError(
"invalid_url_format",
"Enter a valid URL.",
{"error_code": "invalid_url_format"},
)
return v
def test_pydantic_to_validation_error_single_field():
# given
with pytest.raises(PydanticValidationError) as exc_info:
SampleModel(name="ab", age=5)
# when
error = pydantic_to_validation_error(exc_info.value)
# then
assert error.message_dict == {
"name": ["String should have at least 3 characters"],
}
assert error.error_dict["name"][0].code == "invalid"
def test_pydantic_to_validation_error_multiple_fields():
# given
with pytest.raises(PydanticValidationError) as exc_info:
SampleModel(name="ab", age=-1)
# when
error = pydantic_to_validation_error(exc_info.value)
# then
assert error.message_dict == {
"name": ["String should have at least 3 characters"],
"age": ["Input should be greater than or equal to 0"],
}
assert error.error_dict["name"][0].code == "invalid"
assert error.error_dict["age"][0].code == "invalid"
def test_pydantic_to_validation_error_uses_default_error_code():
# given
with pytest.raises(PydanticValidationError) as exc_info:
SampleModel(name="ab", age=5)
# when
error = pydantic_to_validation_error(exc_info.value)
# then
assert error.message_dict == {
"name": ["String should have at least 3 characters"],
}
assert error.error_dict["name"][0].code == "invalid"
def test_pydantic_to_validation_error_uses_custom_error_code():
# given
with pytest.raises(PydanticValidationError) as exc_info:
SampleModel(name="ab", age=5)
# when
error = pydantic_to_validation_error(
exc_info.value, default_error_code="custom_code"
)
# then
assert error.message_dict == {
"name": ["String should have at least 3 characters"],
}
assert error.error_dict["name"][0].code == "custom_code"
def test_pydantic_to_validation_error_per_error_code_overrides_global():
# given
with pytest.raises(PydanticValidationError) as exc_info:
ModelWithCustomErrorCode(url="http://not-https.com")
# when
# Global fallback is "invalid", but the validator embeds "invalid_url_format"
error = pydantic_to_validation_error(exc_info.value, default_error_code="invalid")
# then
assert error.message_dict == {"url": ["Enter a valid URL."]}
assert error.error_dict["url"][0].code == "invalid_url_format"
def test_pydantic_to_validation_error_falls_back_to_global_when_no_ctx_code():
# given
with pytest.raises(PydanticValidationError) as exc_info:
SampleModel(name="ab", age=5)
# when
# Built-in constraint (min_length) carries no error_code in ctx
error = pydantic_to_validation_error(
exc_info.value, default_error_code="fallback_code"
)
# then
assert error.error_dict["name"][0].code == "fallback_code"
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/tests/test_error.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 90,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/order/tests/webhooks/subscriptions/test_exclude_shipping.py | import json
import graphene
import pytest
from .....shipping.models import ShippingMethodChannelListing
from .....webhook.event_types import WebhookEventSyncType
from .....webhook.transport.synchronous.transport import (
create_delivery_for_subscription_sync_event,
)
from ....delivery_context import get_all_shipping_methods_for_order
ORDER_FILTER_SHIPPING_METHODS = """
subscription{
event{
...on OrderFilterShippingMethods{
order{
id
}
shippingMethods{
id
name
}
}
}
}
"""
ORDER_FILTER_SHIPPING_METHODS_AVAILABLE_SHIPPING_METHODS = """
subscription{
event{
...on OrderFilterShippingMethods{
order{
id
availableShippingMethods{
id
}
}
}
}
}
"""
ORDER_FILTER_SHIPPING_METHODS_CIRCULAR_SHIPPING_METHODS = """
subscription{
event{
...on OrderFilterShippingMethods{
order{
id
shippingMethods{
id
}
}
}
}
}
"""
@pytest.fixture
def subscription_with_filter_shipping_methods_webhook(subscription_webhook):
return subscription_webhook(
ORDER_FILTER_SHIPPING_METHODS,
WebhookEventSyncType.ORDER_FILTER_SHIPPING_METHODS,
)
@pytest.fixture
def subscription_with_shipping_methods(
subscription_webhook,
):
return subscription_webhook(
ORDER_FILTER_SHIPPING_METHODS_CIRCULAR_SHIPPING_METHODS,
WebhookEventSyncType.ORDER_FILTER_SHIPPING_METHODS,
)
@pytest.fixture
def subscription_with_available_ship_methods(
subscription_webhook,
):
return subscription_webhook(
ORDER_FILTER_SHIPPING_METHODS_AVAILABLE_SHIPPING_METHODS,
WebhookEventSyncType.ORDER_FILTER_SHIPPING_METHODS,
)
def test_order_filter_shipping_methods(
order_line_with_one_allocation,
subscription_with_filter_shipping_methods_webhook,
address,
):
# given
order = order_line_with_one_allocation.order
order_line_with_one_allocation.is_shipping_required = True
order_line_with_one_allocation.save(update_fields=["is_shipping_required"])
order.currency = "USD"
order.shipping_address = address
order.save(update_fields=["shipping_address"])
event_type = WebhookEventSyncType.ORDER_FILTER_SHIPPING_METHODS
order_id = graphene.Node.to_global_id("Order", order.pk)
all_shipping_methods = get_all_shipping_methods_for_order(
order, ShippingMethodChannelListing.objects.all()
)
# when
delivery = create_delivery_for_subscription_sync_event(
event_type,
(order, all_shipping_methods),
subscription_with_filter_shipping_methods_webhook,
)
# then
shipping_methods = [
{
"id": graphene.Node.to_global_id("ShippingMethod", sm.id),
"name": sm.name,
}
for sm in all_shipping_methods
]
payload = json.loads(delivery.payload.get_payload())
assert payload["order"] == {"id": order_id}
for method in shipping_methods:
assert method in payload["shippingMethods"]
assert delivery.webhook == subscription_with_filter_shipping_methods_webhook
def test_order_filter_shipping_methods_no_methods_in_channel(
order_line_with_one_allocation,
subscription_with_filter_shipping_methods_webhook,
shipping_method_channel_PLN,
):
# given
order = order_line_with_one_allocation.order
order.save(update_fields=["shipping_address"])
event_type = WebhookEventSyncType.ORDER_FILTER_SHIPPING_METHODS
order_id = graphene.Node.to_global_id("Order", order.pk)
# when
delivery = create_delivery_for_subscription_sync_event(
event_type, (order, []), subscription_with_filter_shipping_methods_webhook
)
# then
expected_payload = {"order": {"id": order_id}, "shippingMethods": []}
assert json.loads(delivery.payload.get_payload()) == expected_payload
assert delivery.webhook == subscription_with_filter_shipping_methods_webhook
def test_order_filter_shipping_methods_with_circular_call_for_available_methods(
order_line_with_one_allocation,
subscription_with_available_ship_methods,
):
# given
event_type = WebhookEventSyncType.ORDER_FILTER_SHIPPING_METHODS
order = order_line_with_one_allocation.order
# when
delivery = create_delivery_for_subscription_sync_event(
event_type,
(order, []),
subscription_with_available_ship_methods,
)
# then
payload = json.loads(delivery.payload.get_payload())
assert len(payload["errors"]) == 1
assert (
payload["errors"][0]["message"]
== "Resolving this field is not allowed in synchronous events."
)
def test_order_filter_shipping_methods_with_circular_call_for_shipping_methods(
order_line_with_one_allocation,
subscription_with_shipping_methods,
):
# given
event_type = WebhookEventSyncType.ORDER_FILTER_SHIPPING_METHODS
order = order_line_with_one_allocation.order
# when
delivery = create_delivery_for_subscription_sync_event(
event_type,
(order, []),
subscription_with_shipping_methods,
)
# then
payload = json.loads(delivery.payload.get_payload())
assert len(payload["errors"]) == 1
assert (
payload["errors"][0]["message"]
== "Resolving this field is not allowed in synchronous events."
)
assert payload["order"] is None
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/order/tests/webhooks/subscriptions/test_exclude_shipping.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 169,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/order/tests/webhooks/static_payloads/test_order_calculate_taxes.py | import datetime
import json
from decimal import Decimal
from unittest.mock import patch
import graphene
import pytest
from freezegun import freeze_time
from .....core.prices import quantize_price
from .....discount import DiscountType, DiscountValueType, VoucherType
from ....webhooks.order_calculate_taxes import (
generate_order_payload_for_tax_calculation,
)
@pytest.fixture
def order_for_payload(fulfilled_order, voucher_percentage):
order = fulfilled_order
order.discounts.create(
type=DiscountType.MANUAL,
value_type=DiscountValueType.PERCENTAGE,
value=Decimal(20),
amount_value=Decimal("33.0"),
reason="Discount from staff",
)
discount = order.discounts.create(
type=DiscountType.VOUCHER,
value_type=DiscountValueType.PERCENTAGE,
value=Decimal(10),
amount_value=Decimal("16.5"),
name="Voucher",
voucher=voucher_percentage,
)
discount.created_at = datetime.datetime.now(tz=datetime.UTC) + datetime.timedelta(
days=1
)
discount.save(update_fields=["created_at"])
line_without_sku = order.lines.last()
line_without_sku.product_sku = None
line_without_sku.save()
return order
@freeze_time()
@pytest.mark.parametrize("prices_entered_with_tax", [True, False])
def test_generate_order_payload_for_tax_calculation(
order_for_payload,
prices_entered_with_tax,
):
# given
order = order_for_payload
tax_configuration = order.channel.tax_configuration
tax_configuration.prices_entered_with_tax = prices_entered_with_tax
tax_configuration.save(update_fields=["prices_entered_with_tax"])
tax_configuration.country_exceptions.all().delete()
discount_1, discount_2 = list(order.discounts.all())
user = order.user
# when
payload = json.loads(generate_order_payload_for_tax_calculation(order))[0]
# then
currency = order.currency
assert payload == {
"type": "Order",
"id": graphene.Node.to_global_id("Order", order.id),
"channel": {
"id": graphene.Node.to_global_id("Channel", order.channel_id),
"type": "Channel",
"slug": order.channel.slug,
"currency_code": order.channel.currency_code,
},
"address": {
"id": graphene.Node.to_global_id("Address", order.shipping_address_id),
"type": "Address",
"first_name": order.shipping_address.first_name,
"last_name": order.shipping_address.last_name,
"company_name": order.shipping_address.company_name,
"street_address_1": order.shipping_address.street_address_1,
"street_address_2": order.shipping_address.street_address_2,
"city": order.shipping_address.city,
"city_area": order.shipping_address.city_area,
"postal_code": order.shipping_address.postal_code,
"country": order.shipping_address.country.code,
"country_area": order.shipping_address.country_area,
"phone": str(order.shipping_address.phone),
},
"user_id": graphene.Node.to_global_id("User", user.pk),
"user_public_metadata": user.metadata,
"included_taxes_in_prices": prices_entered_with_tax,
"currency": order.currency,
"shipping_name": order.shipping_method.name,
"shipping_amount": str(
quantize_price(order.base_shipping_price_amount, currency)
),
"metadata": order.metadata,
"discounts": [
{
"name": discount_1.name,
"amount": str(quantize_price(discount_1.amount_value, currency)),
},
{
"name": discount_2.name,
"amount": str(quantize_price(discount_2.amount_value, currency)),
},
],
"lines": [
{
"type": "OrderLine",
"id": graphene.Node.to_global_id("OrderLine", line.id),
"product_name": line.product_name,
"variant_name": line.variant_name,
"quantity": line.quantity,
"variant_id": line.product_variant_id,
"full_name": line.variant.display_product() if line.variant else None,
"product_metadata": (
line.variant.product.metadata if line.variant else {}
),
"product_type_metadata": (
line.variant.product.product_type.metadata if line.variant else {}
),
"charge_taxes": True,
"sku": line.product_sku,
"unit_amount": str(
quantize_price(line.base_unit_price_amount, line.currency)
),
"total_amount": str(
quantize_price(
line.base_unit_price_amount * line.quantity, line.currency
)
),
}
for line in order.lines.all()
],
}
@freeze_time()
@pytest.mark.parametrize("prices_entered_with_tax", [True, False])
@patch(
"saleor.order.webhooks.order_calculate_taxes._generate_order_lines_payload_for_tax_calculation"
)
def test_generate_order_payload_for_tax_calculation_entire_order_voucher(
mocked_order_lines, order_for_payload, prices_entered_with_tax, voucher
):
# given
order = order_for_payload
tax_configuration = order.channel.tax_configuration
tax_configuration.prices_entered_with_tax = prices_entered_with_tax
tax_configuration.save(update_fields=["prices_entered_with_tax"])
tax_configuration.country_exceptions.all().delete()
order_lines = '"order_lines"'
mocked_order_lines.return_value = order_lines
discount_1, discount_2 = list(order.discounts.all())
voucher.type = VoucherType.ENTIRE_ORDER
voucher.apply_once_per_order = False
voucher.save()
discount_2.voucher = voucher
discount_2.save()
user = order.user
# when
payload = json.loads(generate_order_payload_for_tax_calculation(order))[0]
# then
currency = order.currency
assert payload == {
"type": "Order",
"id": graphene.Node.to_global_id("Order", order.id),
"channel": {
"id": graphene.Node.to_global_id("Channel", order.channel_id),
"type": "Channel",
"slug": order.channel.slug,
"currency_code": order.channel.currency_code,
},
"address": {
"id": graphene.Node.to_global_id("Address", order.shipping_address_id),
"type": "Address",
"first_name": order.shipping_address.first_name,
"last_name": order.shipping_address.last_name,
"company_name": order.shipping_address.company_name,
"street_address_1": order.shipping_address.street_address_1,
"street_address_2": order.shipping_address.street_address_2,
"city": order.shipping_address.city,
"city_area": order.shipping_address.city_area,
"postal_code": order.shipping_address.postal_code,
"country": order.shipping_address.country.code,
"country_area": order.shipping_address.country_area,
"phone": str(order.shipping_address.phone),
},
"user_id": graphene.Node.to_global_id("User", user.pk),
"user_public_metadata": user.metadata,
"included_taxes_in_prices": prices_entered_with_tax,
"currency": order.currency,
"shipping_name": order.shipping_method.name,
"shipping_amount": str(
quantize_price(order.base_shipping_price_amount, currency)
),
"metadata": order.metadata,
"discounts": [
{
"name": discount_1.name,
"amount": str(quantize_price(discount_1.amount_value, currency)),
},
{
"name": discount_2.name,
"amount": str(quantize_price(discount_2.amount_value, currency)),
},
],
"lines": json.loads(order_lines),
}
mocked_order_lines.assert_called_once()
@freeze_time()
@pytest.mark.parametrize("prices_entered_with_tax", [True, False])
def test_generate_order_payload_for_tax_calculation_line_level_voucher_excluded(
order_for_payload, prices_entered_with_tax, voucher
):
# given
order = order_for_payload
tax_configuration = order.channel.tax_configuration
tax_configuration.prices_entered_with_tax = prices_entered_with_tax
tax_configuration.save(update_fields=["prices_entered_with_tax"])
discount_1, discount_2 = list(order.discounts.all())
voucher.type = VoucherType.ENTIRE_ORDER
# line level vouchers should be excluded from discounts
voucher.apply_once_per_order = True
voucher.save()
discount_2.voucher = voucher
discount_2.save()
# when
payload = json.loads(generate_order_payload_for_tax_calculation(order))[0]
# then
assert payload["discounts"] == [
{
"name": discount_1.name,
"amount": str(quantize_price(discount_1.amount_value, order.currency)),
},
]
@pytest.mark.parametrize("charge_taxes", [True, False])
def test_order_lines_for_tax_calculation_with_removed_variant(
order, order_line_with_one_allocation, charge_taxes
):
tax_configuration = order.channel.tax_configuration
tax_configuration.charge_taxes = charge_taxes
tax_configuration.save(update_fields=["charge_taxes"])
tax_configuration.country_exceptions.all().delete()
order.lines.add(order_line_with_one_allocation)
currency = order.currency
line = order_line_with_one_allocation
line.voucher_code = "Voucher001"
line.unit_discount_amount = Decimal("10.0")
line.unit_discount_type = DiscountValueType.FIXED
line.undiscounted_unit_price = line.unit_price + line.unit_discount
line.undiscounted_total_price = line.undiscounted_unit_price * line.quantity
line.sale_id = graphene.Node.to_global_id("Sale", 1)
variant = line.variant
line.variant = None
line.save()
payload = json.loads(generate_order_payload_for_tax_calculation(order))[0]
lines_payload = payload.get("lines")
assert len(lines_payload) == 1
line_id = graphene.Node.to_global_id("OrderLine", line.id)
line_payload = lines_payload[0]
assert line_payload == {
"type": "OrderLine",
"id": line_id,
"variant_id": graphene.Node.to_global_id("ProductVariant", variant.id),
"full_name": None,
"product_name": line.product_name,
"variant_name": line.variant_name,
"product_metadata": {},
"product_type_metadata": {},
"quantity": line.quantity,
"sku": line.product_sku,
"charge_taxes": charge_taxes,
"unit_amount": str(quantize_price(line.base_unit_price_amount, currency)),
"total_amount": str(
quantize_price(line.base_unit_price_amount * line.quantity, currency)
),
}
@pytest.mark.parametrize(
("charge_taxes", "prices_entered_with_tax"),
[(False, False), (False, True), (True, False), (True, True)],
)
def test_order_lines_for_tax_calculation_have_all_required_fields(
order,
order_line_with_one_allocation,
charge_taxes,
prices_entered_with_tax,
):
tax_configuration = order.channel.tax_configuration
tax_configuration.charge_taxes = charge_taxes
tax_configuration.prices_entered_with_tax = prices_entered_with_tax
tax_configuration.save(update_fields=["charge_taxes", "prices_entered_with_tax"])
tax_configuration.country_exceptions.all().delete()
order.lines.add(order_line_with_one_allocation)
currency = order.currency
line = order_line_with_one_allocation
line.voucher_code = "Voucher001"
line.unit_discount_amount = Decimal("10.0")
line.unit_discount_type = DiscountValueType.FIXED
line.undiscounted_unit_price = line.unit_price + line.unit_discount
line.undiscounted_total_price = line.undiscounted_unit_price * line.quantity
line.sale_id = graphene.Node.to_global_id("Sale", 1)
line.save()
variant = line.variant
product = variant.product
product_type = product.product_type
product.metadata = {"product_meta": "value"}
product.save()
product_type.metadata = {"product_type_meta": "value"}
product_type.save()
payload = json.loads(generate_order_payload_for_tax_calculation(order))[0]
lines_payload = payload.get("lines")
assert len(lines_payload) == 1
line_id = graphene.Node.to_global_id("OrderLine", line.id)
line_payload = lines_payload[0]
assert line_payload == {
"type": "OrderLine",
"id": line_id,
"variant_id": graphene.Node.to_global_id("ProductVariant", variant.id),
"full_name": variant.display_product(),
"product_name": line.product_name,
"variant_name": line.variant_name,
"product_metadata": {"product_meta": "value"},
"product_type_metadata": {"product_type_meta": "value"},
"quantity": line.quantity,
"sku": line.product_sku,
"charge_taxes": charge_taxes,
"unit_amount": str(quantize_price(line.base_unit_price_amount, currency)),
"total_amount": str(
quantize_price(line.base_unit_price_amount * line.quantity, currency)
),
}
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/order/tests/webhooks/static_payloads/test_order_calculate_taxes.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 323,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/order/tests/webhooks/subscriptions/test_order_calculate_taxes.py | import json
from decimal import Decimal
from unittest.mock import ANY, Mock, patch
import pytest
from freezegun import freeze_time
from prices import Money, TaxedMoney
from promise import Promise
from .....core.prices import quantize_price
from .....discount import (
DiscountType,
DiscountValueType,
RewardType,
RewardValueType,
VoucherType,
)
from .....discount.utils.voucher import (
create_or_update_voucher_discount_objects_for_order,
)
from .....graphql.core.utils import to_global_id_or_none
from .....plugins.manager import get_plugins_manager
from .....tax import TaxableObjectDiscountType
from .....webhook.event_types import WebhookEventSyncType
from .....webhook.models import Webhook
from .....webhook.transport.synchronous.transport import (
create_delivery_for_subscription_sync_event,
)
from .... import OrderStatus
from ....calculations import fetch_order_prices_if_expired
from ....models import Order
from ....utils import (
create_manual_order_discount,
update_discount_for_order_line,
)
TAXES_SUBSCRIPTION_QUERY = """
subscription {
event {
__typename
... on CalculateTaxes {
taxBase {
pricesEnteredWithTax
currency
shippingPrice {
amount
}
address {
id
}
channel {
id
}
discounts {
amount {
amount
}
type
}
lines {
quantity
chargeTaxes
productName
variantName
productSku
unitPrice {
amount
}
totalPrice {
amount
}
sourceLine {
__typename
... on OrderLine {
id
}
}
}
sourceObject {
__typename
... on Order {
id
metadata {
key
value
}
privateMetadata {
key
value
}
user {
id
}
}
}
}
}
}
}
"""
@pytest.fixture
def subscription_order_calculate_taxes(subscription_webhook):
return subscription_webhook(
TAXES_SUBSCRIPTION_QUERY, WebhookEventSyncType.ORDER_CALCULATE_TAXES
)
@freeze_time("2020-03-18 12:00:00")
@pytest.mark.parametrize("charge_taxes", [True, False])
def test_order_calculate_taxes(order_line, tax_app, shipping_zone, charge_taxes):
# given
order = order_line.order
expected_shipping_price = Money("2.00", order.currency)
order.base_shipping_price = expected_shipping_price
order.shipping_price = TaxedMoney(
net=expected_shipping_price, gross=expected_shipping_price
)
order.save(
update_fields=[
"base_shipping_price_amount",
"shipping_price_net_amount",
"shipping_price_gross_amount",
]
)
shipping_method = shipping_zone.shipping_methods.first()
order.shipping_method = shipping_method
webhook = Webhook.objects.create(
name="Webhook",
app=tax_app,
target_url="http://127.0.0.1/any",
subscription_query=TAXES_SUBSCRIPTION_QUERY,
)
event_type = WebhookEventSyncType.ORDER_CALCULATE_TAXES
webhook.events.create(event_type=event_type)
tax_configuration = order.channel.tax_configuration
tax_configuration.charge_taxes = charge_taxes
tax_configuration.save(update_fields=["charge_taxes"])
tax_configuration.country_exceptions.all().delete()
# when
deliveries = create_delivery_for_subscription_sync_event(event_type, order, webhook)
# then
shipping_price_amount = shipping_method.channel_listings.get(
channel=order.channel
).price.amount
shipping_price_amount = quantize_price(shipping_price_amount, order.currency)
assert expected_shipping_price != shipping_price_amount
assert json.loads(deliveries.payload.get_payload()) == {
"__typename": "CalculateTaxes",
"taxBase": {
"address": {"id": to_global_id_or_none(order.shipping_address)},
"currency": "USD",
"discounts": [],
"channel": {"id": to_global_id_or_none(order.channel)},
"lines": [
{
"chargeTaxes": charge_taxes,
"productName": "Test product",
"productSku": "SKU_A",
"quantity": 3,
"sourceLine": {
"__typename": "OrderLine",
"id": to_global_id_or_none(order_line),
},
"totalPrice": {"amount": 36.9},
"unitPrice": {"amount": 12.3},
"variantName": "SKU_A",
}
],
"pricesEnteredWithTax": True,
"shippingPrice": {"amount": expected_shipping_price.amount},
"sourceObject": {
"__typename": "Order",
"id": to_global_id_or_none(order),
"metadata": [
{"key": key, "value": value}
for key, value in order.metadata.items()
],
"privateMetadata": [
{"key": key, "value": value}
for key, value in order.private_metadata.items()
],
"user": {"id": to_global_id_or_none(order.user)},
},
},
}
@freeze_time("2020-03-18 12:00:00")
def test_draft_order_calculate_taxes_line_discount(
order_line,
webhook_app,
permission_handle_taxes,
shipping_zone,
voucher_specific_product_type,
):
# given
order = order_line.order
expected_shipping_price = Money("2.00", order.currency)
order.undiscounted_base_shipping_price = expected_shipping_price
order.base_shipping_price = expected_shipping_price
order.shipping_price = TaxedMoney(
net=expected_shipping_price, gross=expected_shipping_price
)
order.status = OrderStatus.DRAFT
order.save(
update_fields=[
"base_shipping_price_amount",
"shipping_price_net_amount",
"shipping_price_gross_amount",
"undiscounted_base_shipping_price_amount",
"status",
]
)
discount_value = Decimal(5)
update_discount_for_order_line(
order_line, order, "test discount", DiscountValueType.FIXED, discount_value
)
manager = get_plugins_manager(allow_replica=False)
fetch_order_prices_if_expired(order, manager, None, order.lines.all(), True).get()
shipping_method = shipping_zone.shipping_methods.first()
order.shipping_method = shipping_method
webhook_app.permissions.add(permission_handle_taxes)
webhook = Webhook.objects.create(
name="Webhook",
app=webhook_app,
target_url="http://127.0.0.1/any",
subscription_query=TAXES_SUBSCRIPTION_QUERY,
)
event_type = WebhookEventSyncType.ORDER_CALCULATE_TAXES
webhook.events.create(event_type=event_type)
# when
deliveries = create_delivery_for_subscription_sync_event(event_type, order, webhook)
# then
expected_total_price_amount = (
order_line.undiscounted_base_unit_price_amount - discount_value
) * order_line.quantity
assert json.loads(deliveries.payload.get_payload()) == {
"__typename": "CalculateTaxes",
"taxBase": {
"address": {"id": to_global_id_or_none(order.shipping_address)},
"currency": "USD",
"discounts": [],
"channel": {"id": to_global_id_or_none(order.channel)},
"lines": [
{
"chargeTaxes": True,
"productName": order_line.product_name,
"productSku": "SKU_A",
"quantity": order_line.quantity,
"sourceLine": {
"__typename": "OrderLine",
"id": to_global_id_or_none(order_line),
},
"totalPrice": {"amount": float(expected_total_price_amount)},
"unitPrice": {
"amount": float(
expected_total_price_amount / order_line.quantity
)
},
"variantName": order_line.variant_name,
}
],
"pricesEnteredWithTax": True,
"shippingPrice": {"amount": float(expected_shipping_price.amount)},
"sourceObject": {
"__typename": "Order",
"id": to_global_id_or_none(order),
"metadata": [
{"key": key, "value": value}
for key, value in order.metadata.items()
],
"privateMetadata": [
{"key": key, "value": value}
for key, value in order.private_metadata.items()
],
"user": {"id": to_global_id_or_none(order.user)},
},
},
}
@freeze_time("2020-03-18 12:00:00")
def test_draft_order_calculate_taxes_entire_order_voucher(
draft_order_with_voucher, subscription_order_calculate_taxes, shipping_zone
):
# given
order = draft_order_with_voucher
webhook = subscription_order_calculate_taxes
expected_shipping_price = Money("2.00", order.currency)
voucher = draft_order_with_voucher.voucher
voucher.type = VoucherType.ENTIRE_ORDER
voucher.save(update_fields=["type"])
discount_amount = Decimal(10)
channel_listing = voucher.channel_listings.get()
channel_listing.discount_value = discount_amount
channel_listing.save(update_fields=["discount_value"])
order_discount = order.discounts.first()
order_discount.value = discount_amount
order_discount.save(update_fields=["value"])
order.undiscounted_base_shipping_price = expected_shipping_price
order.base_shipping_price = expected_shipping_price
order.shipping_price = TaxedMoney(
net=expected_shipping_price, gross=expected_shipping_price
)
order.save(
update_fields=[
"base_shipping_price_amount",
"shipping_price_net_amount",
"shipping_price_gross_amount",
"undiscounted_base_shipping_price_amount",
]
)
manager = get_plugins_manager(allow_replica=False)
fetch_order_prices_if_expired(order, manager, None, order.lines.all(), True).get()
shipping_method = shipping_zone.shipping_methods.first()
order.shipping_method = shipping_method
# when
deliveries = create_delivery_for_subscription_sync_event(
WebhookEventSyncType.ORDER_CALCULATE_TAXES, order, webhook
)
# then
assert json.loads(deliveries.payload.get_payload()) == {
"__typename": "CalculateTaxes",
"taxBase": {
"address": {"id": to_global_id_or_none(order.shipping_address)},
"currency": "USD",
"discounts": [
{
"amount": {"amount": float(discount_amount)},
"type": TaxableObjectDiscountType.SUBTOTAL,
}
],
"channel": {"id": to_global_id_or_none(order.channel)},
"lines": [
{
"chargeTaxes": True,
"productName": line.product_name,
"productSku": line.product_sku,
"quantity": line.quantity,
"sourceLine": {
"__typename": "OrderLine",
"id": to_global_id_or_none(line),
},
"totalPrice": {
"amount": float(line.base_unit_price_amount * line.quantity)
},
"unitPrice": {"amount": float(line.base_unit_price_amount)},
"variantName": line.variant_name,
}
for line in order.lines.all()
],
"pricesEnteredWithTax": True,
"shippingPrice": {"amount": float(expected_shipping_price.amount)},
"sourceObject": {
"__typename": "Order",
"id": to_global_id_or_none(order),
"metadata": [
{"key": key, "value": value}
for key, value in order.metadata.items()
],
"privateMetadata": [
{"key": key, "value": value}
for key, value in order.private_metadata.items()
],
"user": {"id": to_global_id_or_none(order.user)},
},
},
}
@freeze_time("2020-03-18 12:00:00")
def test_draft_order_calculate_taxes_apply_once_per_order_voucher(
draft_order_with_voucher, subscription_order_calculate_taxes, shipping_zone
):
# given
order = draft_order_with_voucher
webhook = subscription_order_calculate_taxes
expected_shipping_price = Money("2.00", order.currency)
voucher = draft_order_with_voucher.voucher
voucher.type = VoucherType.ENTIRE_ORDER
voucher.apply_once_per_order = True
voucher.save(update_fields=["type", "apply_once_per_order"])
discount_amount = Decimal(10)
order_discount = order.discounts.first()
order_discount.value = discount_amount
order_discount.save(update_fields=["value"])
order.undiscounted_base_shipping_price = expected_shipping_price
order.base_shipping_price = expected_shipping_price
order.shipping_price = TaxedMoney(
net=expected_shipping_price, gross=expected_shipping_price
)
order.save(
update_fields=[
"base_shipping_price_amount",
"shipping_price_net_amount",
"shipping_price_gross_amount",
"undiscounted_base_shipping_price_amount",
]
)
manager = get_plugins_manager(allow_replica=False)
fetch_order_prices_if_expired(order, manager, None, order.lines.all(), True).get()
shipping_method = shipping_zone.shipping_methods.first()
order.shipping_method = shipping_method
# when
deliveries = create_delivery_for_subscription_sync_event(
WebhookEventSyncType.ORDER_CALCULATE_TAXES, order, webhook
)
# then
assert json.loads(deliveries.payload.get_payload()) == {
"__typename": "CalculateTaxes",
"taxBase": {
"address": {"id": to_global_id_or_none(order.shipping_address)},
"currency": "USD",
"discounts": [],
"channel": {"id": to_global_id_or_none(order.channel)},
"lines": [
{
"chargeTaxes": True,
"productName": line.product_name,
"productSku": line.product_sku,
"quantity": line.quantity,
"sourceLine": {
"__typename": "OrderLine",
"id": to_global_id_or_none(line),
},
"totalPrice": {
"amount": float(
round(line.base_unit_price_amount * line.quantity, 2)
)
},
"unitPrice": {
"amount": float(round(line.base_unit_price_amount, 2))
},
"variantName": line.variant_name,
}
for line in order.lines.all()
],
"pricesEnteredWithTax": True,
"shippingPrice": {"amount": float(expected_shipping_price.amount)},
"sourceObject": {
"__typename": "Order",
"id": to_global_id_or_none(order),
"metadata": [
{"key": key, "value": value}
for key, value in order.metadata.items()
],
"privateMetadata": [
{"key": key, "value": value}
for key, value in order.private_metadata.items()
],
"user": {"id": to_global_id_or_none(order.user)},
},
},
}
@freeze_time("2020-03-18 12:00:00")
def test_order_calculate_taxes_specific_product_voucher(
order_line,
subscription_order_calculate_taxes,
shipping_zone,
voucher_specific_product_type,
):
# given
order = order_line.order
webhook = subscription_order_calculate_taxes
expected_shipping_price = Money("2.00", order.currency)
order.undiscounted_base_shipping_price = expected_shipping_price
order.base_shipping_price = expected_shipping_price
order.shipping_price = TaxedMoney(
net=expected_shipping_price, gross=expected_shipping_price
)
order.status = OrderStatus.DRAFT
order.voucher = voucher_specific_product_type
order.voucher_code = voucher_specific_product_type.codes.first().code
order.save(
update_fields=[
"base_shipping_price_amount",
"shipping_price_net_amount",
"shipping_price_gross_amount",
"undiscounted_base_shipping_price_amount",
"voucher_code",
"voucher",
"status",
]
)
voucher_specific_product_type.discount_value_type = DiscountValueType.FIXED
voucher_specific_product_type.save(update_fields=["discount_value_type"])
voucher_listing = voucher_specific_product_type.channel_listings.get(
channel=order.channel
)
unit_discount_amount = Decimal(2)
voucher_listing.discount_value = unit_discount_amount
voucher_listing.save(update_fields=["discount_value"])
voucher_specific_product_type.variants.add(order_line.variant)
create_or_update_voucher_discount_objects_for_order(order)
manager = get_plugins_manager(allow_replica=False)
fetch_order_prices_if_expired(order, manager, None, order.lines.all(), True).get()
shipping_method = shipping_zone.shipping_methods.first()
order.shipping_method = shipping_method
# when
deliveries = create_delivery_for_subscription_sync_event(
WebhookEventSyncType.ORDER_CALCULATE_TAXES, order, webhook
)
# then
expected_total_price_amount = (
order_line.undiscounted_base_unit_price_amount - unit_discount_amount
) * order_line.quantity
assert json.loads(deliveries.payload.get_payload()) == {
"__typename": "CalculateTaxes",
"taxBase": {
"address": {"id": to_global_id_or_none(order.shipping_address)},
"currency": "USD",
"discounts": [],
"channel": {"id": to_global_id_or_none(order.channel)},
"lines": [
{
"chargeTaxes": True,
"productName": order_line.product_name,
"productSku": "SKU_A",
"quantity": order_line.quantity,
"sourceLine": {
"__typename": "OrderLine",
"id": to_global_id_or_none(order_line),
},
"totalPrice": {"amount": float(expected_total_price_amount)},
"unitPrice": {
"amount": float(
expected_total_price_amount / order_line.quantity
)
},
"variantName": order_line.variant_name,
}
],
"pricesEnteredWithTax": True,
"shippingPrice": {"amount": float(expected_shipping_price.amount)},
"sourceObject": {
"__typename": "Order",
"id": to_global_id_or_none(order),
"metadata": [
{"key": key, "value": value}
for key, value in order.metadata.items()
],
"privateMetadata": [
{"key": key, "value": value}
for key, value in order.private_metadata.items()
],
"user": {"id": to_global_id_or_none(order.user)},
},
},
}
@freeze_time("2020-03-18 12:00:00")
@pytest.mark.parametrize("charge_taxes", [True, False])
def test_draft_order_calculate_taxes_free_shipping_voucher(
draft_order_with_free_shipping_voucher,
subscription_order_calculate_taxes,
shipping_zone,
charge_taxes,
):
# given
order = draft_order_with_free_shipping_voucher
webhook = subscription_order_calculate_taxes
shipping_method = shipping_zone.shipping_methods.first()
order.shipping_method = shipping_method
# when
deliveries = create_delivery_for_subscription_sync_event(
WebhookEventSyncType.ORDER_CALCULATE_TAXES, order, webhook
)
# then
assert json.loads(deliveries.payload.get_payload()) == {
"__typename": "CalculateTaxes",
"taxBase": {
"address": {"id": to_global_id_or_none(order.shipping_address)},
"currency": "USD",
"discounts": [],
"channel": {"id": to_global_id_or_none(order.channel)},
"lines": ANY,
"pricesEnteredWithTax": True,
"shippingPrice": {"amount": 0.0},
"sourceObject": {
"__typename": "Order",
"id": to_global_id_or_none(order),
"metadata": [
{"key": key, "value": value}
for key, value in order.metadata.items()
],
"privateMetadata": [
{"key": key, "value": value}
for key, value in order.private_metadata.items()
],
"user": {"id": to_global_id_or_none(order.user)},
},
},
}
@freeze_time("2020-03-18 12:00:00")
def test_order_calculate_taxes_with_manual_discount(
order_with_lines,
subscription_order_calculate_taxes,
tax_configuration_tax_app,
):
# given
order = order_with_lines
shipping_price_amount = order.base_shipping_price_amount
discount_value = Decimal(20)
order.discounts.create(
value_type=DiscountValueType.FIXED,
value=discount_value,
reason="Discount reason",
)
line_1, line_2 = order.lines.all()
line_1_unit_price = line_1.base_unit_price_amount
line_2_unit_price = line_2.base_unit_price_amount
subtotal_amount = (
line_1_unit_price * line_1.quantity + line_2_unit_price * line_2.quantity
)
total_amount = subtotal_amount + shipping_price_amount
manager = Mock(get_taxes_for_order=Mock(return_value={}))
fetch_order_prices_if_expired(order, manager, None, [line_1, line_2], True).get()
webhook = subscription_order_calculate_taxes
# Manual discount applies both to subtotal and shipping. For tax calculation it
# requires to be split into subtotal and shipping portion.
manual_discount_subtotal_portion = subtotal_amount / total_amount * discount_value
manual_discount_shipping_portion = discount_value - manual_discount_subtotal_portion
# when
deliveries = create_delivery_for_subscription_sync_event(
WebhookEventSyncType.ORDER_CALCULATE_TAXES, order, webhook
)
# then
assert json.loads(deliveries.payload.get_payload()) == {
"__typename": "CalculateTaxes",
"taxBase": {
"address": {"id": to_global_id_or_none(order.shipping_address)},
"currency": "USD",
"discounts": [
{
"amount": {"amount": float(manual_discount_subtotal_portion)},
"type": TaxableObjectDiscountType.SUBTOTAL,
},
{
"amount": {"amount": float(manual_discount_shipping_portion)},
"type": TaxableObjectDiscountType.SHIPPING,
},
],
"channel": {"id": to_global_id_or_none(order.channel)},
"lines": [
{
"chargeTaxes": True,
"productName": line_1.product_name,
"productSku": line_1.product_sku,
"quantity": line_1.quantity,
"sourceLine": {
"__typename": "OrderLine",
"id": to_global_id_or_none(line_1),
},
"totalPrice": {
"amount": float(line_1_unit_price * line_1.quantity)
},
"unitPrice": {"amount": float(line_1_unit_price)},
"variantName": line_1.variant_name,
},
{
"chargeTaxes": True,
"productName": line_2.product_name,
"productSku": line_2.product_sku,
"quantity": line_2.quantity,
"sourceLine": {
"__typename": "OrderLine",
"id": to_global_id_or_none(line_2),
},
"totalPrice": {
"amount": float(line_2_unit_price * line_2.quantity)
},
"unitPrice": {"amount": float(line_2_unit_price)},
"variantName": line_2.variant_name,
},
],
"pricesEnteredWithTax": False,
"shippingPrice": {"amount": float(shipping_price_amount)},
"sourceObject": {
"__typename": "Order",
"id": to_global_id_or_none(order),
"metadata": [
{"key": key, "value": value}
for key, value in order.metadata.items()
],
"privateMetadata": [
{"key": key, "value": value}
for key, value in order.private_metadata.items()
],
"user": {"id": to_global_id_or_none(order.user)},
},
},
}
@freeze_time("2020-03-18 12:00:00")
def test_order_calculate_taxes_empty_order(
order, webhook_app, permission_handle_taxes, channel_USD
):
# given
order = Order.objects.create(channel=channel_USD, currency="USD", lines_count=0)
webhook_app.permissions.add(permission_handle_taxes)
webhook = Webhook.objects.create(
name="Webhook",
app=webhook_app,
target_url="http://127.0.0.1/any",
subscription_query=TAXES_SUBSCRIPTION_QUERY,
)
event_type = WebhookEventSyncType.ORDER_CALCULATE_TAXES
webhook.events.create(event_type=event_type)
# when
deliveries = create_delivery_for_subscription_sync_event(event_type, order, webhook)
# then
assert json.loads(deliveries.payload.get_payload()) == {
"__typename": "CalculateTaxes",
"taxBase": {
"address": None,
"currency": "USD",
"discounts": [],
"lines": [],
"pricesEnteredWithTax": True,
"shippingPrice": {"amount": 0.0},
"channel": {"id": to_global_id_or_none(order.channel)},
"sourceObject": {
"__typename": "Order",
"id": to_global_id_or_none(order),
"metadata": [
{"key": key, "value": value}
for key, value in order.metadata.items()
],
"privateMetadata": [
{"key": key, "value": value}
for key, value in order.private_metadata.items()
],
"user": None,
},
},
}
@patch("saleor.tax.webhooks.shared.trigger_webhook_sync_promise")
def test_order_calculate_taxes_order_promotion(
mocked_trigger_webhook_sync_promise,
order_with_lines,
order_promotion_with_rule,
subscription_order_calculate_taxes,
tax_configuration_tax_app,
plugins_manager,
):
# given
order = order_with_lines
order.status = OrderStatus.UNCONFIRMED
webhook = subscription_order_calculate_taxes
channel = order.channel
shipping_price_amount = order.base_shipping_price_amount
line_1, line_2 = order.lines.all()
line_1_unit_price = line_1.base_unit_price_amount
line_2_unit_price = line_2.base_unit_price_amount
promotion = order_promotion_with_rule
rule = promotion.rules.get()
rule.order_predicate = {
"discountedObjectPredicate": {"baseSubtotalPrice": {"range": {"gte": 10}}}
}
rule.save(update_fields=["order_predicate"])
reward_value = Decimal(5)
assert rule.reward_value == reward_value
assert rule.reward_value_type == RewardValueType.FIXED
assert rule.reward_type == RewardType.SUBTOTAL_DISCOUNT
mocked_trigger_webhook_sync_promise.return_value = Promise.resolve({})
# when
fetch_order_prices_if_expired(order, plugins_manager, None, None, True).get()
deliveries = create_delivery_for_subscription_sync_event(
WebhookEventSyncType.ORDER_CALCULATE_TAXES, order, webhook
)
# then
assert json.loads(deliveries.payload.get_payload()) == {
"__typename": "CalculateTaxes",
"taxBase": {
"address": {"id": to_global_id_or_none(order.shipping_address)},
"currency": "USD",
"discounts": [
{
"amount": {"amount": reward_value},
"type": TaxableObjectDiscountType.SUBTOTAL,
}
],
"channel": {"id": to_global_id_or_none(channel)},
"lines": [
{
"chargeTaxes": True,
"productName": line_1.product_name,
"productSku": line_1.product_sku,
"quantity": line_1.quantity,
"sourceLine": {
"__typename": "OrderLine",
"id": to_global_id_or_none(line_1),
},
"totalPrice": {"amount": line_1_unit_price * line_1.quantity},
"unitPrice": {"amount": line_1_unit_price},
"variantName": line_1.variant_name,
},
{
"chargeTaxes": True,
"productName": line_2.product_name,
"productSku": line_2.product_sku,
"quantity": line_2.quantity,
"sourceLine": {
"__typename": "OrderLine",
"id": to_global_id_or_none(line_2),
},
"totalPrice": {"amount": line_2_unit_price * line_2.quantity},
"unitPrice": {"amount": line_2_unit_price},
"variantName": line_2.variant_name,
},
],
"pricesEnteredWithTax": False,
"shippingPrice": {"amount": shipping_price_amount},
"sourceObject": {
"__typename": "Order",
"id": to_global_id_or_none(order),
"metadata": [
{"key": key, "value": value}
for key, value in order.metadata.items()
],
"privateMetadata": [
{"key": key, "value": value}
for key, value in order.private_metadata.items()
],
"user": {"id": to_global_id_or_none(order.user)},
},
},
}
@patch("saleor.tax.webhooks.shared.trigger_webhook_sync_promise")
def test_order_calculate_taxes_order_voucher_and_manual_discount(
mocked_trigger_webhook_sync_promise,
order_with_lines,
voucher,
subscription_order_calculate_taxes,
tax_configuration_tax_app,
plugins_manager,
):
# given
order = order_with_lines
order.status = OrderStatus.UNCONFIRMED
webhook = subscription_order_calculate_taxes
channel = order.channel
shipping_price_amount = order.base_shipping_price_amount
line_1, line_2 = order.lines.all()
line_1_unit_price = line_1.base_unit_price_amount
line_2_unit_price = line_2.base_unit_price_amount
subtotal_amount = (
line_1_unit_price * line_1.quantity + line_2_unit_price * line_2.quantity
)
total_amount = subtotal_amount + shipping_price_amount
assert voucher.type == VoucherType.ENTIRE_ORDER
order.voucher = voucher
order.save(update_fields=["voucher_id"])
manual_reward = Decimal(10)
create_manual_order_discount(
order=order,
reason="Manual discount",
value_type=DiscountValueType.FIXED,
value=manual_reward,
)
subtotal_manual_reward_portion = (subtotal_amount / total_amount) * manual_reward
shipping_manual_reward_portion = manual_reward - subtotal_manual_reward_portion
mocked_trigger_webhook_sync_promise.return_value = Promise.resolve({})
# when
fetch_order_prices_if_expired(order, plugins_manager, None, None, True).get()
deliveries = create_delivery_for_subscription_sync_event(
WebhookEventSyncType.ORDER_CALCULATE_TAXES, order, webhook
)
# then
manual_discount = order.discounts.get(type=DiscountType.MANUAL)
assert manual_discount.amount_value == manual_reward
assert not order.discounts.filter(type=DiscountType.VOUCHER).first()
assert json.loads(deliveries.payload.get_payload()) == {
"__typename": "CalculateTaxes",
"taxBase": {
"address": {"id": to_global_id_or_none(order.shipping_address)},
"currency": "USD",
"discounts": [
{
"amount": {"amount": subtotal_manual_reward_portion},
"type": TaxableObjectDiscountType.SUBTOTAL,
},
{
"amount": {"amount": shipping_manual_reward_portion},
"type": TaxableObjectDiscountType.SHIPPING,
},
],
"channel": {"id": to_global_id_or_none(channel)},
"lines": [
{
"chargeTaxes": True,
"productName": line_1.product_name,
"productSku": line_1.product_sku,
"quantity": line_1.quantity,
"sourceLine": {
"__typename": "OrderLine",
"id": to_global_id_or_none(line_1),
},
"totalPrice": {"amount": line_1_unit_price * line_1.quantity},
"unitPrice": {"amount": line_1_unit_price},
"variantName": line_1.variant_name,
},
{
"chargeTaxes": True,
"productName": line_2.product_name,
"productSku": line_2.product_sku,
"quantity": line_2.quantity,
"sourceLine": {
"__typename": "OrderLine",
"id": to_global_id_or_none(line_2),
},
"totalPrice": {"amount": line_2_unit_price * line_2.quantity},
"unitPrice": {"amount": line_2_unit_price},
"variantName": line_2.variant_name,
},
],
"pricesEnteredWithTax": False,
"shippingPrice": {"amount": shipping_price_amount},
"sourceObject": {
"__typename": "Order",
"id": to_global_id_or_none(order),
"metadata": [
{"key": key, "value": value}
for key, value in order.metadata.items()
],
"privateMetadata": [
{"key": key, "value": value}
for key, value in order.private_metadata.items()
],
"user": {"id": to_global_id_or_none(order.user)},
},
},
}
@patch("saleor.tax.webhooks.shared.trigger_webhook_sync_promise")
def test_order_calculate_taxes_order_promotion_and_manual_discount(
mocked_trigger_webhook_sync_promise,
order_with_lines,
order_promotion_with_rule,
subscription_order_calculate_taxes,
tax_configuration_tax_app,
plugins_manager,
):
# given
order = order_with_lines
order.status = OrderStatus.UNCONFIRMED
webhook = subscription_order_calculate_taxes
channel = order.channel
shipping_price_amount = order.base_shipping_price_amount
line_1, line_2 = order.lines.all()
line_1_unit_price = line_1.base_unit_price_amount
line_2_unit_price = line_2.base_unit_price_amount
subtotal_amount = (
line_1_unit_price * line_1.quantity + line_2_unit_price * line_2.quantity
)
total_amount = subtotal_amount + shipping_price_amount
promotion = order_promotion_with_rule
rule = promotion.rules.get()
rule.order_predicate = {
"discountedObjectPredicate": {"baseSubtotalPrice": {"range": {"gte": 10}}}
}
rule.save(update_fields=["order_predicate"])
assert rule.reward_type == RewardType.SUBTOTAL_DISCOUNT
manual_reward = Decimal(10)
create_manual_order_discount(
order=order,
reason="Manual discount",
value_type=DiscountValueType.FIXED,
value=manual_reward,
)
subtotal_manual_reward_portion = (subtotal_amount / total_amount) * manual_reward
shipping_manual_reward_portion = manual_reward - subtotal_manual_reward_portion
mocked_trigger_webhook_sync_promise.return_value = Promise.resolve({})
# when
fetch_order_prices_if_expired(order, plugins_manager, None, None, True).get()
deliveries = create_delivery_for_subscription_sync_event(
WebhookEventSyncType.ORDER_CALCULATE_TAXES, order, webhook
)
# then
manual_discount = order.discounts.get(type=DiscountType.MANUAL)
assert manual_discount.amount_value == manual_reward
assert not order.discounts.filter(type=DiscountType.ORDER_PROMOTION).first()
assert json.loads(deliveries.payload.get_payload()) == {
"__typename": "CalculateTaxes",
"taxBase": {
"address": {"id": to_global_id_or_none(order.shipping_address)},
"currency": "USD",
"discounts": [
{
"amount": {"amount": subtotal_manual_reward_portion},
"type": TaxableObjectDiscountType.SUBTOTAL,
},
{
"amount": {"amount": shipping_manual_reward_portion},
"type": TaxableObjectDiscountType.SHIPPING,
},
],
"channel": {"id": to_global_id_or_none(channel)},
"lines": [
{
"chargeTaxes": True,
"productName": line_1.product_name,
"productSku": line_1.product_sku,
"quantity": line_1.quantity,
"sourceLine": {
"__typename": "OrderLine",
"id": to_global_id_or_none(line_1),
},
"totalPrice": {"amount": line_1_unit_price * line_1.quantity},
"unitPrice": {"amount": line_1_unit_price},
"variantName": line_1.variant_name,
},
{
"chargeTaxes": True,
"productName": line_2.product_name,
"productSku": line_2.product_sku,
"quantity": line_2.quantity,
"sourceLine": {
"__typename": "OrderLine",
"id": to_global_id_or_none(line_2),
},
"totalPrice": {"amount": line_2_unit_price * line_2.quantity},
"unitPrice": {"amount": line_2_unit_price},
"variantName": line_2.variant_name,
},
],
"pricesEnteredWithTax": False,
"shippingPrice": {"amount": shipping_price_amount},
"sourceObject": {
"__typename": "Order",
"id": to_global_id_or_none(order),
"metadata": [
{"key": key, "value": value}
for key, value in order.metadata.items()
],
"privateMetadata": [
{"key": key, "value": value}
for key, value in order.private_metadata.items()
],
"user": {"id": to_global_id_or_none(order.user)},
},
},
}
@patch("saleor.tax.webhooks.shared.trigger_webhook_sync_promise")
def test_order_calculate_taxes_free_shipping_voucher_and_manual_discount_fixed(
mocked_trigger_webhook_sync_promise,
order_with_lines,
voucher_free_shipping,
subscription_order_calculate_taxes,
tax_configuration_tax_app,
plugins_manager,
):
# given
order = order_with_lines
order.status = OrderStatus.UNCONFIRMED
voucher = voucher_free_shipping
webhook = subscription_order_calculate_taxes
channel = order.channel
shipping_price_amount = order.base_shipping_price_amount
line_1, line_2 = order.lines.all()
line_1_unit_price = line_1.base_unit_price_amount
line_2_unit_price = line_2.base_unit_price_amount
assert voucher.type == VoucherType.SHIPPING
order.voucher = voucher
order.save(update_fields=["voucher_id"])
create_or_update_voucher_discount_objects_for_order(order)
manual_reward = Decimal(10)
create_manual_order_discount(
order=order,
reason="Manual discount",
value_type=DiscountValueType.FIXED,
value=manual_reward,
)
# Since shipping is free, whole manual discount should be applied to subtotal
subtotal_manual_reward_portion = manual_reward
mocked_trigger_webhook_sync_promise.return_value = Promise.resolve({})
# when
fetch_order_prices_if_expired(order, plugins_manager, None, None, True).get()
deliveries = create_delivery_for_subscription_sync_event(
WebhookEventSyncType.ORDER_CALCULATE_TAXES, order, webhook
)
# then
manual_discount = order.discounts.get(type=DiscountType.MANUAL)
assert manual_discount.amount_value == subtotal_manual_reward_portion
voucher_discount = order.discounts.get(type=DiscountType.VOUCHER)
assert voucher_discount.amount_value == shipping_price_amount
assert json.loads(deliveries.payload.get_payload()) == {
"__typename": "CalculateTaxes",
"taxBase": {
"address": {"id": to_global_id_or_none(order.shipping_address)},
"currency": "USD",
"discounts": [
{
"amount": {"amount": subtotal_manual_reward_portion},
"type": TaxableObjectDiscountType.SUBTOTAL,
},
],
"channel": {"id": to_global_id_or_none(channel)},
"lines": [
{
"chargeTaxes": True,
"productName": line_1.product_name,
"productSku": line_1.product_sku,
"quantity": line_1.quantity,
"sourceLine": {
"__typename": "OrderLine",
"id": to_global_id_or_none(line_1),
},
"totalPrice": {"amount": line_1_unit_price * line_1.quantity},
"unitPrice": {"amount": line_1_unit_price},
"variantName": line_1.variant_name,
},
{
"chargeTaxes": True,
"productName": line_2.product_name,
"productSku": line_2.product_sku,
"quantity": line_2.quantity,
"sourceLine": {
"__typename": "OrderLine",
"id": to_global_id_or_none(line_2),
},
"totalPrice": {"amount": line_2_unit_price * line_2.quantity},
"unitPrice": {"amount": line_2_unit_price},
"variantName": line_2.variant_name,
},
],
"pricesEnteredWithTax": False,
"shippingPrice": {"amount": 0},
"sourceObject": {
"__typename": "Order",
"id": to_global_id_or_none(order),
"metadata": [
{"key": key, "value": value}
for key, value in order.metadata.items()
],
"privateMetadata": [
{"key": key, "value": value}
for key, value in order.private_metadata.items()
],
"user": {"id": to_global_id_or_none(order.user)},
},
},
}
@patch("saleor.tax.webhooks.shared.trigger_webhook_sync_promise")
def test_order_calculate_taxes_free_shipping_voucher_and_manual_discount_percentage(
mocked_trigger_webhook_sync_promise,
order_with_lines,
voucher_free_shipping,
subscription_order_calculate_taxes,
tax_configuration_tax_app,
plugins_manager,
):
# given
order = order_with_lines
order.status = OrderStatus.UNCONFIRMED
voucher = voucher_free_shipping
webhook = subscription_order_calculate_taxes
channel = order.channel
shipping_price_amount = order.base_shipping_price_amount
line_1, line_2 = order.lines.all()
line_1_unit_price = line_1.base_unit_price_amount
line_2_unit_price = line_2.base_unit_price_amount
subtotal_amount = (
line_1_unit_price * line_1.quantity + line_2_unit_price * line_2.quantity
)
total_amount = subtotal_amount + shipping_price_amount
assert voucher.type == VoucherType.SHIPPING
order.voucher = voucher
order.save(update_fields=["voucher_id"])
create_or_update_voucher_discount_objects_for_order(order)
total_amount -= shipping_price_amount
manual_reward = Decimal(10)
create_manual_order_discount(
order=order,
reason="Manual discount",
value_type=DiscountValueType.PERCENTAGE,
value=manual_reward,
)
# Since shipping is free, whole manual discount should be applied to subtotal
subtotal_manual_reward_portion = manual_reward / 100 * total_amount
mocked_trigger_webhook_sync_promise.return_value = Promise.resolve({})
# when
fetch_order_prices_if_expired(order, plugins_manager, None, None, True).get()
deliveries = create_delivery_for_subscription_sync_event(
WebhookEventSyncType.ORDER_CALCULATE_TAXES, order, webhook
)
# then
manual_discount = order.discounts.get(type=DiscountType.MANUAL)
assert manual_discount.amount_value == subtotal_manual_reward_portion
voucher_discount = order.discounts.get(type=DiscountType.VOUCHER)
assert voucher_discount.amount_value == shipping_price_amount
assert json.loads(deliveries.payload.get_payload()) == {
"__typename": "CalculateTaxes",
"taxBase": {
"address": {"id": to_global_id_or_none(order.shipping_address)},
"currency": "USD",
"discounts": [
{
"amount": {"amount": subtotal_manual_reward_portion},
"type": TaxableObjectDiscountType.SUBTOTAL,
},
],
"channel": {"id": to_global_id_or_none(channel)},
"lines": [
{
"chargeTaxes": True,
"productName": line_1.product_name,
"productSku": line_1.product_sku,
"quantity": line_1.quantity,
"sourceLine": {
"__typename": "OrderLine",
"id": to_global_id_or_none(line_1),
},
"totalPrice": {"amount": line_1_unit_price * line_1.quantity},
"unitPrice": {"amount": line_1_unit_price},
"variantName": line_1.variant_name,
},
{
"chargeTaxes": True,
"productName": line_2.product_name,
"productSku": line_2.product_sku,
"quantity": line_2.quantity,
"sourceLine": {
"__typename": "OrderLine",
"id": to_global_id_or_none(line_2),
},
"totalPrice": {"amount": line_2_unit_price * line_2.quantity},
"unitPrice": {"amount": line_2_unit_price},
"variantName": line_2.variant_name,
},
],
"pricesEnteredWithTax": False,
"shippingPrice": {"amount": 0},
"sourceObject": {
"__typename": "Order",
"id": to_global_id_or_none(order),
"metadata": [
{"key": key, "value": value}
for key, value in order.metadata.items()
],
"privateMetadata": [
{"key": key, "value": value}
for key, value in order.private_metadata.items()
],
"user": {"id": to_global_id_or_none(order.user)},
},
},
}
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/order/tests/webhooks/subscriptions/test_order_calculate_taxes.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 1234,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/order/tests/webhooks/test_order_calculate_taxes.py | import json
from unittest.mock import ANY, patch
import pytest
from freezegun import freeze_time
from ....core import EventDeliveryStatus
from ....core.models import EventDelivery
from ....core.taxes import TaxDataError
from ....graphql.webhook.subscription_payload import (
generate_payload_promise_from_subscription,
)
from ....tax.webhooks.parser import parse_tax_data
from ....webhook.event_types import WebhookEventSyncType
from ...webhooks.order_calculate_taxes import (
generate_order_payload_for_tax_calculation,
get_taxes,
)
@freeze_time()
@patch("saleor.webhook.transport.synchronous.transport.send_webhook_request_sync")
def test_get_taxes_for_order(
mock_request,
tax_data_response,
order,
tax_app,
):
# given
mock_request.return_value = tax_data_response
app_identifier = None
webhook = tax_app.webhooks.get(name="tax-webhook-1")
webhook.subscription_query = None
webhook.save(update_fields=["subscription_query"])
# when
tax_data = get_taxes(
order=order,
lines=order.lines.all(),
app_identifier=app_identifier,
requestor=None,
).get()
# then
mock_request.assert_called_once()
assert not EventDelivery.objects.exists()
delivery = mock_request.mock_calls[0].args[0]
assert delivery.payload.get_payload() == generate_order_payload_for_tax_calculation(
order
)
assert delivery.status == EventDeliveryStatus.PENDING
assert delivery.event_type == WebhookEventSyncType.ORDER_CALCULATE_TAXES
assert delivery.webhook == webhook
assert tax_data == parse_tax_data(
WebhookEventSyncType.ORDER_CALCULATE_TAXES,
tax_data_response,
order.lines.count(),
)
@freeze_time()
@patch("saleor.webhook.transport.synchronous.transport.send_webhook_request_sync")
def test_get_taxes_for_order_with_sync_subscription(
mock_request,
tax_data_response,
order,
tax_app,
):
# given
mock_request.return_value = tax_data_response
webhook = tax_app.webhooks.get(name="tax-webhook-1")
webhook.subscription_query = (
"subscription{event{... on CalculateTaxes{taxBase{currency}}}}"
)
webhook.save(update_fields=["subscription_query"])
app_identifier = None
# when
tax_data = get_taxes(
order=order,
lines=order.lines.all(),
app_identifier=app_identifier,
requestor=None,
).get()
# then
mock_request.assert_called_once()
assert not EventDelivery.objects.exists()
delivery = mock_request.mock_calls[0].args[0]
assert delivery.payload.get_payload() == json.dumps(
{"taxBase": {"currency": "USD"}}
)
assert delivery.status == EventDeliveryStatus.PENDING
assert delivery.event_type == WebhookEventSyncType.ORDER_CALCULATE_TAXES
assert delivery.webhook == webhook
assert tax_data == parse_tax_data(
WebhookEventSyncType.ORDER_CALCULATE_TAXES,
tax_data_response,
order.lines.count(),
)
def test_get_taxes_for_order_with_app_identifier_app_missing(order):
# given
app_identifier = "missing_app"
# when & then
with pytest.raises(TaxDataError, match="Configured tax app doesn't exist."):
get_taxes(order, order.lines.all(), app_identifier, None).get()
def test_get_taxes_for_order_with_app_identifier_webhook_is_missing(order, app):
with pytest.raises(
TaxDataError,
match="Configured tax app's webhook for taxes calculation doesn't exists.",
):
get_taxes(order, order.lines.all(), app.identifier, None).get()
@patch("saleor.webhook.transport.synchronous.transport.send_webhook_request_sync")
def test_get_taxes_for_order_with_app_identifier_invalid_response(
mock_request,
order,
tax_app,
tax_data_response_factory,
):
# given
subscription_query = "subscription{event{... on CalculateTaxes{taxBase{currency}}}}"
mock_request.return_value = tax_data_response_factory(shipping_tax_rate=-10)
webhook = tax_app.webhooks.get(name="tax-webhook-1")
webhook.subscription_query = subscription_query
webhook.save(update_fields=["subscription_query"])
app_identifier = tax_app.identifier
# when & then
with pytest.raises(TaxDataError):
get_taxes(order, order.lines.all(), app_identifier, None).get()
@patch("saleor.webhook.transport.synchronous.transport.send_webhook_request_sync")
def test_get_taxes_for_order_with_app_identifier_empty_response(
mock_request,
order,
tax_app,
):
# given
subscription_query = "subscription{event{... on CalculateTaxes{taxBase{currency}}}}"
mock_request.return_value = None
webhook = tax_app.webhooks.get(name="tax-webhook-1")
webhook.subscription_query = subscription_query
webhook.save(update_fields=["subscription_query"])
app_identifier = tax_app.identifier
# when & then
with pytest.raises(TaxDataError):
get_taxes(order, order.lines.all(), app_identifier, None).get()
@freeze_time()
@patch(
"saleor.webhook.transport.synchronous.transport.generate_payload_promise_from_subscription",
wraps=generate_payload_promise_from_subscription,
)
@patch("saleor.webhook.transport.synchronous.transport.send_webhook_request_sync")
def test_get_taxes_for_order_with_app_identifier(
mock_request,
mock_generate_payload,
tax_data_response,
order,
tax_app,
):
# given
subscription_query = "subscription{event{... on CalculateTaxes{taxBase{currency}}}}"
expected_payload = {"taxBase": {"currency": "USD"}}
mock_request.return_value = tax_data_response
webhook = tax_app.webhooks.get(name="tax-webhook-1")
webhook.subscription_query = subscription_query
webhook.save(update_fields=["subscription_query"])
app_identifier = tax_app.identifier
# when
tax_data = get_taxes(order, order.lines.all(), app_identifier, None).get()
# then
mock_generate_payload.assert_called_once_with(
event_type=WebhookEventSyncType.ORDER_CALCULATE_TAXES,
subscribable_object=order,
subscription_query=subscription_query,
request=ANY,
)
mock_request.assert_called_once()
assert not EventDelivery.objects.exists()
delivery = mock_request.mock_calls[0].args[0]
assert delivery.payload.get_payload() == json.dumps(expected_payload)
assert delivery.status == EventDeliveryStatus.PENDING
assert delivery.event_type == WebhookEventSyncType.ORDER_CALCULATE_TAXES
assert delivery.webhook == webhook
assert tax_data == parse_tax_data(
WebhookEventSyncType.ORDER_CALCULATE_TAXES,
tax_data_response,
order.lines.count(),
)
@freeze_time()
@patch("saleor.webhook.transport.synchronous.transport.send_webhook_request_sync")
def test_get_taxes_for_order_sets_issuing_principal(
mock_request, tax_data_response, order, tax_app, customer_user
):
# given
mock_request.return_value = tax_data_response
webhook = tax_app.webhooks.get(name="tax-webhook-1")
webhook.subscription_query = "subscription{event{issuingPrincipal {__typename}}}"
webhook.save(update_fields=["subscription_query"])
app_identifier = tax_app.identifier
# when
tax_data = get_taxes(
order=order,
lines=order.lines.all(),
app_identifier=app_identifier,
requestor=customer_user,
).get()
# then
mock_request.assert_called_once()
assert not EventDelivery.objects.exists()
delivery = mock_request.mock_calls[0].args[0]
assert delivery.payload.get_payload() == json.dumps(
{"issuingPrincipal": {"__typename": "User"}}
)
assert delivery.status == EventDeliveryStatus.PENDING
assert delivery.event_type == WebhookEventSyncType.ORDER_CALCULATE_TAXES
assert delivery.webhook == webhook
assert tax_data == parse_tax_data(
WebhookEventSyncType.ORDER_CALCULATE_TAXES,
tax_data_response,
order.lines.count(),
)
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/order/tests/webhooks/test_order_calculate_taxes.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 210,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/order/webhooks/order_calculate_taxes.py | import json
import logging
from collections.abc import Iterable
from typing import TYPE_CHECKING, Union
import graphene
from django.db.models import QuerySet
from promise import Promise
from ...app.models import App
from ...core.db.connection import allow_writer
from ...core.prices import quantize_price, quantize_price_fields
from ...core.taxes import TaxData
from ...discount.utils.shared import is_order_level_discount
from ...tax.utils import get_charge_taxes_for_order
from ...tax.webhooks import shared
from ...webhook import traced_payload_generator
from ...webhook.event_types import WebhookEventSyncType
from ...webhook.payload_serializers import PayloadSerializer
from ..models import Order, OrderLine
if TYPE_CHECKING:
from ...account.models import User
from ...app.models import App
logger = logging.getLogger(__name__)
def _generate_order_lines_payload_for_tax_calculation(lines: QuerySet[OrderLine]):
serializer = PayloadSerializer()
charge_taxes = False
if lines:
charge_taxes = get_charge_taxes_for_order(lines[0].order)
return serializer.serialize(
lines,
fields=("product_name", "variant_name", "quantity"),
extra_dict_data={
"variant_id": (lambda line: line.product_variant_id),
"full_name": (
lambda line: line.variant.display_product() if line.variant else None
),
"product_metadata": (
lambda line: line.variant.product.metadata if line.variant else {}
),
"product_type_metadata": (
lambda line: (
line.variant.product.product_type.metadata if line.variant else {}
)
),
"charge_taxes": (lambda _line: charge_taxes),
"sku": (lambda line: line.product_sku),
"unit_amount": (
lambda line: quantize_price(line.base_unit_price_amount, line.currency)
),
"total_amount": (
lambda line: quantize_price(
line.base_unit_price_amount * line.quantity, line.currency
)
),
},
)
@allow_writer()
@traced_payload_generator
def generate_order_payload_for_tax_calculation(order: "Order"):
serializer = PayloadSerializer()
tax_configuration = order.channel.tax_configuration
prices_entered_with_tax = tax_configuration.prices_entered_with_tax
# Prepare Order data
address = order.shipping_address or order.billing_address
lines = order.lines.all()
# Prepare user data
user = order.user
user_id = None
user_public_metadata = {}
if user:
user_id = graphene.Node.to_global_id("User", user.id)
user_public_metadata = user.metadata
# Prepare discount data
discounts = order.discounts.all()
discounts_dict = []
for discount in discounts:
# Only order level discounts, like entire order vouchers,
# order promotions and manual discounts should be taken into account
if not is_order_level_discount(discount):
continue
quantize_price_fields(discount, ("amount_value",), order.currency)
discount_amount = quantize_price(discount.amount_value, order.currency)
discounts_dict.append({"name": discount.name, "amount": discount_amount})
# Prepare shipping data
shipping_method_name = order.shipping_method_name
shipping_method_amount = quantize_price(
order.base_shipping_price_amount, order.currency
)
address_fields = (
"first_name",
"last_name",
"company_name",
"street_address_1",
"street_address_2",
"city",
"city_area",
"postal_code",
"country",
"country_area",
"phone",
)
order_data = serializer.serialize(
[order],
fields=["currency", "metadata"],
additional_fields={
"channel": (lambda o: o.channel, ("slug", "currency_code")),
"address": (lambda o: address, address_fields),
},
extra_dict_data={
"id": graphene.Node.to_global_id("Order", order.id),
"user_id": user_id,
"user_public_metadata": user_public_metadata,
"discounts": discounts_dict,
"included_taxes_in_prices": prices_entered_with_tax,
"shipping_amount": shipping_method_amount,
"shipping_name": shipping_method_name,
"lines": json.loads(
_generate_order_lines_payload_for_tax_calculation(lines)
),
},
)
return order_data
def get_taxes(
order: Order,
lines: Iterable["OrderLine"],
app_identifier: str | None,
requestor: Union["App", "User", None],
) -> Promise[TaxData | None]:
return shared.get_taxes(
taxable_object=order,
event_type=WebhookEventSyncType.ORDER_CALCULATE_TAXES,
app_identifier=app_identifier,
static_payload=generate_order_payload_for_tax_calculation(order),
lines_count=len(list(lines)),
requestor=requestor,
)
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/order/webhooks/order_calculate_taxes.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 135,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
saleor/saleor:saleor/tax/tests/webhooks/test_shared.py | import json
from unittest import mock
import graphene
import pytest
from saleor.core.taxes import TaxDataError
from ....core import EventDeliveryStatus
from ....core.models import EventDelivery
from ....webhook.event_types import WebhookEventSyncType
from ....webhook.models import Webhook, WebhookEvent
# from ....webhook.transport.synchronous import trigger_taxes_all_webhooks_sync
from ...webhooks.parser import parse_tax_data
from ...webhooks.shared import (
get_taxes,
get_taxes_for_app_identifier,
get_taxes_from_all_webhooks,
)
TAX_SUBSCRIPTION_QUERY = """
subscription {
event {
... on CalculateTaxes {
taxBase {
sourceObject {
...on Order{
id
lines{
id
}
}
}
}
}
}
}
"""
@pytest.fixture
def tax_webhooks(tax_app):
webhooks = [
Webhook(
name=f"Tax order webhook no {i}",
app=tax_app,
target_url=f"https://127.0.0.1/tax-order-{i}",
)
for i in range(3)
]
webhooks = Webhook.objects.bulk_create(webhooks)
WebhookEvent.objects.bulk_create(
WebhookEvent(
event_type=WebhookEventSyncType.ORDER_CALCULATE_TAXES,
webhook=webhook,
)
for webhook in webhooks
)
return webhooks
@mock.patch(
"saleor.tax.webhooks.shared.get_taxes_from_all_webhooks",
wraps=get_taxes_from_all_webhooks,
)
@mock.patch("saleor.webhook.transport.synchronous.transport.send_webhook_request_sync")
def test_get_taxes_from_all_webhooks(
mocked_send_webhook_request_sync,
mocked_get_taxes_from_all_webhooks,
tax_app,
tax_data_response,
tax_configuration_tax_app,
order_with_lines,
customer_user,
):
# given
tax_configuration_tax_app.tax_app_id = None
tax_configuration_tax_app.save(update_fields=["tax_app_id"])
mocked_send_webhook_request_sync.return_value = tax_data_response
event_type = WebhookEventSyncType.ORDER_CALCULATE_TAXES
data = '{"key": "value"}'
webhook = tax_app.webhooks.get(name="tax-webhook-1")
webhook.subscription_query = TAX_SUBSCRIPTION_QUERY
webhook.save(update_fields=["subscription_query"])
lines_count = len(tax_data_response["lines"])
# when
tax_data = get_taxes(
taxable_object=order_with_lines,
event_type=event_type,
app_identifier=None,
static_payload=data,
lines_count=lines_count,
requestor=customer_user,
).get()
# then
mocked_send_webhook_request_sync.assert_called_once()
mocked_get_taxes_from_all_webhooks.assert_called_once()
assert not EventDelivery.objects.exists()
delivery = mocked_send_webhook_request_sync.mock_calls[0].args[0]
assert json.loads(delivery.payload.get_payload()) == {
"taxBase": {
"sourceObject": {
"id": graphene.Node.to_global_id("Order", order_with_lines.pk),
"lines": [
{"id": graphene.Node.to_global_id("OrderLine", line.pk)}
for line in order_with_lines.lines.all()
],
}
}
}
assert delivery.status == EventDeliveryStatus.PENDING
assert delivery.event_type == event_type
assert delivery.webhook == webhook
assert tax_data == parse_tax_data(event_type, tax_data_response, lines_count)
@mock.patch(
"saleor.tax.webhooks.shared.get_taxes_from_all_webhooks",
wraps=get_taxes_from_all_webhooks,
)
@mock.patch("saleor.webhook.transport.synchronous.transport.send_webhook_request_sync")
def test_get_taxes_from_all_webhooks_multiple_webhooks(
mocked_send_webhook_request_sync,
mocked_get_taxes_from_all_webhooks,
tax_app,
tax_data_response,
tax_configuration_tax_app,
order_with_lines,
customer_user,
tax_webhooks,
):
# given
tax_configuration_tax_app.tax_app_id = None
tax_configuration_tax_app.save(update_fields=["tax_app_id"])
mocked_send_webhook_request_sync.side_effect = [None, {}, tax_data_response, None]
event_type = WebhookEventSyncType.ORDER_CALCULATE_TAXES
data = '{"key": "value"}'
webhook = tax_app.webhooks.get(name="tax-webhook-1")
webhook.subscription_query = TAX_SUBSCRIPTION_QUERY
webhook.save(update_fields=["subscription_query"])
lines_count = len(tax_data_response["lines"])
# when
tax_data = get_taxes(
taxable_object=order_with_lines,
event_type=event_type,
app_identifier=None,
static_payload=data,
lines_count=lines_count,
requestor=customer_user,
).get()
# then
assert mocked_send_webhook_request_sync.call_count == 4
assert mocked_get_taxes_from_all_webhooks.call_count == 1
assert not EventDelivery.objects.exists()
delivery = mocked_send_webhook_request_sync.mock_calls[0].args[0]
assert json.loads(delivery.payload.get_payload()) == {
"taxBase": {
"sourceObject": {
"id": graphene.Node.to_global_id("Order", order_with_lines.pk),
"lines": [
{"id": graphene.Node.to_global_id("OrderLine", line.pk)}
for line in order_with_lines.lines.all()
],
}
}
}
assert delivery.status == EventDeliveryStatus.PENDING
assert delivery.event_type == event_type
assert delivery.webhook == webhook
assert tax_data == parse_tax_data(event_type, tax_data_response, lines_count)
@mock.patch(
"saleor.tax.webhooks.shared.get_taxes_from_all_webhooks",
wraps=get_taxes_from_all_webhooks,
)
@mock.patch("saleor.webhook.transport.synchronous.transport.send_webhook_request_sync")
def test_get_taxes_from_all_webhooks_multiple_invalid_webhooks(
mocked_send_webhook_request_sync,
mocked_get_taxes_from_all_webhooks,
tax_app,
tax_data_response,
tax_configuration_tax_app,
order_with_lines,
customer_user,
tax_webhooks,
):
# given
tax_configuration_tax_app.tax_app_id = None
tax_configuration_tax_app.save(update_fields=["tax_app_id"])
mocked_send_webhook_request_sync.side_effect = [None, {}, None, None]
event_type = WebhookEventSyncType.ORDER_CALCULATE_TAXES
data = '{"key": "value"}'
webhook = tax_app.webhooks.get(name="tax-webhook-1")
webhook.subscription_query = TAX_SUBSCRIPTION_QUERY
webhook.save(update_fields=["subscription_query"])
lines_count = len(tax_data_response["lines"])
# when
tax_data = get_taxes(
taxable_object=order_with_lines,
event_type=event_type,
app_identifier=None,
static_payload=data,
lines_count=lines_count,
requestor=customer_user,
).get()
# then
assert mocked_send_webhook_request_sync.call_count == 4
assert mocked_get_taxes_from_all_webhooks.call_count == 1
assert not EventDelivery.objects.exists()
delivery = mocked_send_webhook_request_sync.mock_calls[0].args[0]
assert json.loads(delivery.payload.get_payload()) == {
"taxBase": {
"sourceObject": {
"id": graphene.Node.to_global_id("Order", order_with_lines.pk),
"lines": [
{"id": graphene.Node.to_global_id("OrderLine", line.pk)}
for line in order_with_lines.lines.all()
],
}
}
}
assert delivery.status == EventDeliveryStatus.PENDING
assert delivery.event_type == event_type
assert delivery.webhook == webhook
assert tax_data is None
@mock.patch(
"saleor.tax.webhooks.shared.get_taxes_for_app_identifier",
wraps=get_taxes_for_app_identifier,
)
@mock.patch("saleor.webhook.transport.synchronous.transport.send_webhook_request_sync")
def test_get_taxes_for_app_identifier(
mocked_send_webhook_request_sync,
mocked_get_taxes_for_app_identifier,
tax_app,
tax_data_response,
tax_configuration_tax_app,
order_with_lines,
customer_user,
):
# given
tax_configuration_tax_app.tax_app_id = tax_app.identifier
tax_configuration_tax_app.save(update_fields=["tax_app_id"])
mocked_send_webhook_request_sync.return_value = tax_data_response
event_type = WebhookEventSyncType.ORDER_CALCULATE_TAXES
data = '{"key": "value"}'
webhook = tax_app.webhooks.get(name="tax-webhook-1")
webhook.subscription_query = TAX_SUBSCRIPTION_QUERY
webhook.save(update_fields=["subscription_query"])
lines_count = len(tax_data_response["lines"])
# when
tax_data = get_taxes(
taxable_object=order_with_lines,
event_type=event_type,
app_identifier=tax_app.identifier,
static_payload=data,
lines_count=lines_count,
requestor=customer_user,
).get()
# then
mocked_send_webhook_request_sync.assert_called_once()
mocked_get_taxes_for_app_identifier.assert_called_once()
assert not EventDelivery.objects.exists()
delivery = mocked_send_webhook_request_sync.mock_calls[0].args[0]
assert json.loads(delivery.payload.get_payload()) == {
"taxBase": {
"sourceObject": {
"id": graphene.Node.to_global_id("Order", order_with_lines.pk),
"lines": [
{"id": graphene.Node.to_global_id("OrderLine", line.pk)}
for line in order_with_lines.lines.all()
],
}
}
}
assert delivery.status == EventDeliveryStatus.PENDING
assert delivery.event_type == event_type
assert delivery.webhook == webhook
assert tax_data == parse_tax_data(event_type, tax_data_response, lines_count)
@mock.patch(
"saleor.tax.webhooks.shared.get_taxes_for_app_identifier",
wraps=get_taxes_for_app_identifier,
)
@mock.patch("saleor.webhook.transport.synchronous.transport.send_webhook_request_sync")
def test_get_taxes_for_app_identifier_multiple_webhooks(
mocked_send_webhook_request_sync,
mocked_get_taxes_for_app_identifier,
tax_app,
tax_data_response,
tax_configuration_tax_app,
order_with_lines,
customer_user,
tax_webhooks,
):
# given
tax_configuration_tax_app.tax_app_id = tax_app.identifier
tax_configuration_tax_app.save(update_fields=["tax_app_id"])
mocked_send_webhook_request_sync.return_value = tax_data_response
event_type = WebhookEventSyncType.ORDER_CALCULATE_TAXES
data = '{"key": "value"}'
webhook = tax_app.webhooks.get(name="tax-webhook-1")
webhook.subscription_query = TAX_SUBSCRIPTION_QUERY
webhook.save(update_fields=["subscription_query"])
lines_count = len(tax_data_response["lines"])
# when
tax_data = get_taxes(
taxable_object=order_with_lines,
event_type=event_type,
app_identifier=tax_app.identifier,
static_payload=data,
lines_count=lines_count,
requestor=customer_user,
).get()
# then
mocked_send_webhook_request_sync.assert_called_once()
mocked_get_taxes_for_app_identifier.assert_called_once()
assert not EventDelivery.objects.exists()
delivery = mocked_send_webhook_request_sync.mock_calls[0].args[0]
assert json.loads(delivery.payload.get_payload()) == {
"taxBase": {
"sourceObject": {
"id": graphene.Node.to_global_id("Order", order_with_lines.pk),
"lines": [
{"id": graphene.Node.to_global_id("OrderLine", line.pk)}
for line in order_with_lines.lines.all()
],
}
}
}
assert delivery.status == EventDeliveryStatus.PENDING
assert delivery.event_type == event_type
assert delivery.webhook == webhook
assert tax_data == parse_tax_data(event_type, tax_data_response, lines_count)
@mock.patch(
"saleor.tax.webhooks.shared.get_taxes_for_app_identifier",
wraps=get_taxes_for_app_identifier,
)
@mock.patch("saleor.webhook.transport.synchronous.transport.send_webhook_request_sync")
def test_get_taxes_for_app_identifier_invalid_response(
mocked_send_webhook_request_sync,
mocked_get_taxes_for_app_identifier,
tax_app,
tax_data_response,
tax_configuration_tax_app,
order_with_lines,
customer_user,
tax_webhooks,
):
# given
tax_configuration_tax_app.tax_app_id = None
tax_configuration_tax_app.save(update_fields=["tax_app_id"])
mocked_send_webhook_request_sync.return_value = None
event_type = WebhookEventSyncType.ORDER_CALCULATE_TAXES
data = '{"key": "value"}'
webhook = tax_app.webhooks.get(name="tax-webhook-1")
webhook.subscription_query = TAX_SUBSCRIPTION_QUERY
webhook.save(update_fields=["subscription_query"])
lines_count = len(tax_data_response["lines"])
# when
with pytest.raises(TaxDataError):
get_taxes(
taxable_object=order_with_lines,
event_type=event_type,
app_identifier=tax_app.identifier,
static_payload=data,
lines_count=lines_count,
requestor=customer_user,
).get()
# then
assert mocked_send_webhook_request_sync.call_count == 1
assert mocked_get_taxes_for_app_identifier.call_count == 1
assert not EventDelivery.objects.exists()
delivery = mocked_send_webhook_request_sync.mock_calls[0].args[0]
assert json.loads(delivery.payload.get_payload()) == {
"taxBase": {
"sourceObject": {
"id": graphene.Node.to_global_id("Order", order_with_lines.pk),
"lines": [
{"id": graphene.Node.to_global_id("OrderLine", line.pk)}
for line in order_with_lines.lines.all()
],
}
}
}
assert delivery.status == EventDeliveryStatus.PENDING
assert delivery.event_type == event_type
assert delivery.webhook == webhook
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/tax/tests/webhooks/test_shared.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 375,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/tax/webhooks/parser.py | import logging
from typing import Any
from pydantic import ValidationError
from ...core.taxes import TAX_ERROR_FIELD_LENGTH, TaxData, TaxDataError, TaxLineData
from ...core.utils.text import safe_truncate
from ...webhook.response_schemas.taxes import CalculateTaxesSchema
from ...webhook.response_schemas.utils.helpers import parse_validation_error
logger = logging.getLogger(__name__)
def parse_tax_data(
event_type: str, response_data: Any, expected_lines_count: int
) -> TaxData:
try:
tax_data = _parse_tax_data(response_data, expected_lines_count)
except ValidationError as e:
errors = e.errors()
logger.warning(
"Webhook response for event %s is invalid: %s",
event_type,
str(e),
extra={"errors": errors},
)
error_msg = safe_truncate(parse_validation_error(e), TAX_ERROR_FIELD_LENGTH)
raise TaxDataError(error_msg, errors=errors) from e
return tax_data
def _parse_tax_data(
response_data: Any,
lines_count: int,
) -> TaxData:
calculated_taxes_model = CalculateTaxesSchema.model_validate(
response_data,
context={"expected_line_count": lines_count},
)
return TaxData(
shipping_price_gross_amount=calculated_taxes_model.shipping_price_gross_amount,
shipping_price_net_amount=calculated_taxes_model.shipping_price_net_amount,
shipping_tax_rate=calculated_taxes_model.shipping_tax_rate,
lines=[
TaxLineData(
tax_rate=tax_line.tax_rate,
total_gross_amount=tax_line.total_gross_amount,
total_net_amount=tax_line.total_net_amount,
)
for tax_line in calculated_taxes_model.lines
],
)
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/tax/webhooks/parser.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 45,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
saleor/saleor:saleor/tax/webhooks/shared.py | import logging
from typing import TYPE_CHECKING, Any, Union
from django.conf import settings
from promise import Promise
from ...app.models import App
from ...core.taxes import TaxData, TaxDataError
from ...webhook.transport.synchronous.transport import (
trigger_webhook_sync_promise,
)
from ...webhook.utils import get_webhooks_for_event
from .parser import parse_tax_data
if TYPE_CHECKING:
from ...account.models import User
from ...app.models import App
from ...checkout.models import Checkout
from ...order.models import Order
logger = logging.getLogger(__name__)
def get_taxes(
taxable_object: Union["Checkout", "Order"],
event_type: str,
app_identifier: str | None,
static_payload: str,
lines_count: int,
requestor: Union["App", "User", None],
) -> Promise[TaxData | None]:
if app_identifier:
return get_taxes_for_app_identifier(
event_type=event_type,
app_identifier=app_identifier,
expected_lines_count=lines_count,
subscribable_object=taxable_object,
requestor=requestor,
static_payload=static_payload,
)
return get_taxes_from_all_webhooks(
event_type=event_type,
expected_lines_count=lines_count,
static_payload=static_payload,
subscribable_object=taxable_object,
requestor=requestor,
)
def get_taxes_for_app_identifier(
event_type: str,
app_identifier: str,
static_payload: str,
expected_lines_count: int,
subscribable_object: Union["Checkout", "Order"],
requestor: Union["App", "User", None],
) -> Promise[TaxData | None]:
app = (
App.objects.using(settings.DATABASE_CONNECTION_REPLICA_NAME)
.filter(
identifier=app_identifier,
is_active=True,
)
.order_by("-created_at")
.first()
)
if app is None:
msg = "Configured tax app doesn't exist."
logger.warning(msg)
return Promise.reject(TaxDataError(msg))
webhook = get_webhooks_for_event(event_type, apps_ids=[app.id]).first()
if webhook is None:
msg = "Configured tax app's webhook for taxes calculation doesn't exists."
logger.warning(msg)
return Promise.reject(TaxDataError(msg))
tax_webhook_promise = trigger_webhook_sync_promise(
event_type=event_type,
webhook=webhook,
static_payload=static_payload,
allow_replica=False,
subscribable_object=subscribable_object,
requestor=requestor,
)
def process_response(response_data):
try:
tax_data = parse_tax_data(event_type, response_data, expected_lines_count)
return Promise.resolve(tax_data)
except TaxDataError as e:
return Promise.reject(e)
return tax_webhook_promise.then(process_response)
def get_taxes_from_all_webhooks(
event_type: str,
static_payload: str,
expected_lines_count: int,
subscribable_object: Union["Checkout", "Order"],
requestor: Union["App", "User", None],
) -> Promise[TaxData | None]:
webhooks = get_webhooks_for_event(event_type)
logger.warning(
"Missing tax configuration for channel: %s. All tax sync webhooks "
"will be triggered. This will stop working in future releases. "
"Make sure to configure tax webhooks for each channel.",
subscribable_object.channel.slug,
)
tax_webhook_promises = []
for webhook in webhooks:
tax_webhook_promises.append(
trigger_webhook_sync_promise(
event_type=event_type,
webhook=webhook,
static_payload=static_payload,
allow_replica=False,
subscribable_object=subscribable_object,
requestor=requestor,
)
)
def process_responses(response_data: list[Any]) -> TaxData | None:
for response in response_data:
try:
parsed_response = parse_tax_data(
event_type, response, expected_lines_count
)
return parsed_response
except TaxDataError:
continue
return None
return Promise.all(tax_webhook_promises).then(process_responses)
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/tax/webhooks/shared.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 119,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
saleor/saleor:saleor/graphql/order/tests/queries/test_order_excluded_shipping_methods.py | from unittest import mock
import pytest
from promise import Promise
from .....order import OrderStatus
from .....order.webhooks.exclude_shipping import ExcludedShippingMethod
from ....core.utils import to_global_id_or_none
from ....tests.utils import get_graphql_content
ORDER_QUERY_SHIPPING_METHOD = """
query OrderQuery($id: ID) {
order(id: $id) {
shippingMethods {
id
name
active
message
}
availableShippingMethods {
id
name
active
message
}
}
}
"""
@mock.patch(
"saleor.order.webhooks.exclude_shipping.excluded_shipping_methods_for_order"
)
def test_order_shipping_methods(
mocked_webhook,
staff_api_client,
order_with_lines,
permission_group_manage_orders,
settings,
):
# given
order_with_lines.status = OrderStatus.UNCONFIRMED
order_with_lines.save(update_fields=["status"])
settings.PLUGINS = ["saleor.plugins.webhook.plugin.WebhookPlugin"]
webhook_reason = "spanish-inquisition"
excluded_shipping_method_id = str(order_with_lines.shipping_method.id)
mocked_webhook.return_value = Promise.resolve(
[ExcludedShippingMethod(excluded_shipping_method_id, webhook_reason)]
)
permission_group_manage_orders.user_set.add(staff_api_client.user)
# when
response = staff_api_client.post_graphql(
ORDER_QUERY_SHIPPING_METHOD,
variables={"id": to_global_id_or_none(order_with_lines)},
)
content = get_graphql_content(response)
order_data = content["data"]["order"]
shipping_methods = order_data["shippingMethods"]
# then
assert len(shipping_methods) == 1
assert not shipping_methods[0]["active"]
assert shipping_methods[0]["message"] == webhook_reason
@mock.patch(
"saleor.order.webhooks.exclude_shipping.excluded_shipping_methods_for_order"
)
def test_draft_order_shipping_methods(
mocked_webhook,
staff_api_client,
order_with_lines,
permission_group_manage_orders,
settings,
):
# given
order_with_lines.status = OrderStatus.DRAFT
order_with_lines.save(update_fields=["status"])
settings.PLUGINS = ["saleor.plugins.webhook.plugin.WebhookPlugin"]
webhook_reason = "spanish-inquisition"
excluded_shipping_method_id = str(order_with_lines.shipping_method.id)
mocked_webhook.return_value = Promise.resolve(
[ExcludedShippingMethod(excluded_shipping_method_id, webhook_reason)]
)
permission_group_manage_orders.user_set.add(staff_api_client.user)
# when
response = staff_api_client.post_graphql(
ORDER_QUERY_SHIPPING_METHOD,
variables={"id": to_global_id_or_none(order_with_lines)},
)
content = get_graphql_content(response)
order_data = content["data"]["order"]
shipping_methods = order_data["shippingMethods"]
# then
assert len(shipping_methods) == 1
assert not shipping_methods[0]["active"]
assert shipping_methods[0]["message"] == webhook_reason
@pytest.mark.parametrize(
"order_status",
[
OrderStatus.UNFULFILLED,
OrderStatus.PARTIALLY_FULFILLED,
OrderStatus.FULFILLED,
OrderStatus.CANCELED,
OrderStatus.EXPIRED,
OrderStatus.RETURNED,
OrderStatus.PARTIALLY_RETURNED,
],
)
@mock.patch(
"saleor.order.webhooks.exclude_shipping.excluded_shipping_methods_for_order"
)
def test_order_shipping_methods_skips_sync_webhook_for_non_editable_statuses(
mocked_webhook,
order_status,
staff_api_client,
order_with_lines,
permission_group_manage_orders,
settings,
):
# given
order_with_lines.status = order_status
order_with_lines.save(update_fields=["status"])
settings.PLUGINS = ["saleor.plugins.webhook.plugin.WebhookPlugin"]
permission_group_manage_orders.user_set.add(staff_api_client.user)
# when
response = staff_api_client.post_graphql(
ORDER_QUERY_SHIPPING_METHOD,
variables={"id": to_global_id_or_none(order_with_lines)},
)
content = get_graphql_content(response)
order_data = content["data"]["order"]
shipping_methods = order_data["shippingMethods"]
# then
assert not mocked_webhook.called
assert len(shipping_methods) == 1
assert shipping_methods[0]["active"]
@pytest.mark.parametrize(
("webhook_response", "expected_count"),
[
(lambda s: [ExcludedShippingMethod(str(s.id), "")], 0),
(lambda s: [], 1),
],
)
@mock.patch(
"saleor.order.webhooks.exclude_shipping.excluded_shipping_methods_for_order"
)
def test_order_available_shipping_methods(
mocked_webhook,
staff_api_client,
order_with_lines,
permission_group_manage_orders,
settings,
webhook_response,
expected_count,
):
# given
settings.PLUGINS = ["saleor.plugins.webhook.plugin.WebhookPlugin"]
order_with_lines.status = OrderStatus.UNCONFIRMED
order_with_lines.save(update_fields=["status"])
shipping_method = order_with_lines.shipping_method
def respond(*args, **kwargs):
return Promise.resolve(webhook_response(shipping_method))
mocked_webhook.side_effect = respond
permission_group_manage_orders.user_set.add(staff_api_client.user)
# when
response = staff_api_client.post_graphql(
ORDER_QUERY_SHIPPING_METHOD,
variables={"id": to_global_id_or_none(order_with_lines)},
)
content = get_graphql_content(response)
order_data = content["data"]["order"]
# then
assert len(order_data["availableShippingMethods"]) == expected_count
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/order/tests/queries/test_order_excluded_shipping_methods.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 166,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/order/delivery_context.py | import logging
from collections.abc import Iterable
from typing import TYPE_CHECKING, Union
from django.conf import settings
from promise import Promise
from ..shipping.interface import ExcludedShippingMethod, ShippingMethodData
from ..shipping.models import ShippingMethod, ShippingMethodChannelListing
from ..shipping.utils import (
convert_to_shipping_method_data,
initialize_shipping_method_active_status,
)
from ..warehouse.models import Warehouse
from . import (
ORDER_EDITABLE_STATUS,
)
from .models import Order, OrderLine
if TYPE_CHECKING:
from ..account.models import User
from ..app.models import App
logger = logging.getLogger(__name__)
PRIVATE_META_APP_SHIPPING_ID = "external_app_shipping_id"
def get_all_shipping_methods_for_order(
order: Order,
shipping_channel_listings: Iterable["ShippingMethodChannelListing"],
database_connection_name: str = settings.DATABASE_CONNECTION_DEFAULT_NAME,
) -> list[ShippingMethodData]:
if not order.is_shipping_required(
database_connection_name=database_connection_name
):
return []
shipping_address = order.shipping_address
if not shipping_address:
return []
all_methods = []
shipping_methods = (
ShippingMethod.objects.using(database_connection_name)
.applicable_shipping_methods_for_instance(
order,
channel_id=order.channel_id,
price=order.subtotal.gross,
shipping_address=shipping_address,
country_code=shipping_address.country.code,
database_connection_name=database_connection_name,
)
.prefetch_related("channel_listings")
)
listing_map = {
listing.shipping_method_id: listing for listing in shipping_channel_listings
}
for method in shipping_methods:
listing = listing_map.get(method.id)
if listing:
shipping_method_data = convert_to_shipping_method_data(method, listing)
all_methods.append(shipping_method_data)
return all_methods
def get_valid_shipping_methods_for_order(
order: Order,
shipping_channel_listings: Iterable["ShippingMethodChannelListing"],
requestor: Union["App", "User", None],
database_connection_name: str = settings.DATABASE_CONNECTION_DEFAULT_NAME,
allow_sync_webhooks: bool = True,
) -> Promise[list[ShippingMethodData]]:
"""Return a list of shipping methods according to Saleor's own business logic."""
# Circular import caused by the current definition of subscription payloads
# and their usage in webhook/transport layer. Until moving them out from the
# transport, we will have circular imports.
from .webhooks.exclude_shipping import (
excluded_shipping_methods_for_order,
)
valid_methods = get_all_shipping_methods_for_order(
order, shipping_channel_listings, database_connection_name
)
if not valid_methods:
return Promise.resolve([])
allow_replica = True
if database_connection_name == settings.DATABASE_CONNECTION_DEFAULT_NAME:
allow_replica = False
promised_excluded_methods: Promise[list[ExcludedShippingMethod]] = Promise.resolve(
[]
)
if order.status in ORDER_EDITABLE_STATUS and allow_sync_webhooks:
promised_excluded_methods = excluded_shipping_methods_for_order(
order, valid_methods, allow_replica=allow_replica, requestor=requestor
)
def handle_excluded_methods(excluded_methods):
initialize_shipping_method_active_status(valid_methods, excluded_methods)
return valid_methods
return promised_excluded_methods.then(handle_excluded_methods)
def get_external_shipping_id(order: "Order"):
if not order:
return None
return order.get_value_from_private_metadata(PRIVATE_META_APP_SHIPPING_ID)
def is_shipping_required(lines: Iterable["OrderLine"]):
return any(line.is_shipping_required for line in lines)
def get_valid_collection_points_for_order(
lines: Iterable["OrderLine"],
channel_id: int,
database_connection_name: str = settings.DATABASE_CONNECTION_DEFAULT_NAME,
):
if not is_shipping_required(lines):
return []
line_ids = [line.id for line in lines]
qs = OrderLine.objects.using(database_connection_name).filter(id__in=line_ids)
return Warehouse.objects.using(
database_connection_name
).applicable_for_click_and_collect(qs, channel_id)
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/order/delivery_context.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 106,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
saleor/saleor:saleor/order/tests/test_shipping_context.py | from ...shipping.models import ShippingMethod, ShippingMethodType
from ..delivery_context import (
get_all_shipping_methods_for_order,
get_valid_shipping_methods_for_order,
)
def test_get_valid_shipping_methods_for_order(order_line_with_one_allocation, address):
# given
order = order_line_with_one_allocation.order
order_line_with_one_allocation.is_shipping_required = True
order_line_with_one_allocation.save(update_fields=["is_shipping_required"])
order.currency = "USD"
order.shipping_address = address
order.save(update_fields=["shipping_address"])
# when
valid_shipping_methods = get_valid_shipping_methods_for_order(
order, order.channel.shipping_method_listings.all(), None
).get()
# then
assert len(valid_shipping_methods) == 1
def test_get_valid_shipping_methods_for_order_no_channel_shipping_zones(
order_line_with_one_allocation, address
):
# given
order = order_line_with_one_allocation.order
order.channel.shipping_zones.clear()
order_line_with_one_allocation.is_shipping_required = True
order_line_with_one_allocation.save(update_fields=["is_shipping_required"])
order.currency = "USD"
order.shipping_address = address
order.save(update_fields=["shipping_address"])
# when
valid_shipping_methods = get_valid_shipping_methods_for_order(
order, order.channel.shipping_method_listings.all(), None
).get()
# then
assert len(valid_shipping_methods) == 0
def test_get_valid_shipping_methods_for_order_no_shipping_address(
order_line_with_one_allocation, address
):
# given
order = order_line_with_one_allocation.order
order_line_with_one_allocation.is_shipping_required = True
order_line_with_one_allocation.save(update_fields=["is_shipping_required"])
order.currency = "USD"
# when
valid_shipping_methods = get_valid_shipping_methods_for_order(
order, order.channel.shipping_method_listings.all(), None
).get()
# then
assert valid_shipping_methods == []
def test_get_valid_shipping_methods_for_order_shipping_not_required(
order_line_with_one_allocation, address
):
# given
order = order_line_with_one_allocation.order
order_line_with_one_allocation.is_shipping_required = False
order_line_with_one_allocation.save(update_fields=["is_shipping_required"])
order.currency = "USD"
order.shipping_address = address
order.save(update_fields=["shipping_address"])
# when
valid_shipping_methods = get_valid_shipping_methods_for_order(
order, order.channel.shipping_method_listings.all(), None
).get()
# then
assert valid_shipping_methods == []
def test_get_all_shipping_methods_returns_empty_when_shipping_not_required(
order_with_lines, address
):
# given
order = order_with_lines
order.lines.update(is_shipping_required=False)
order.shipping_address = address
order.save(update_fields=["shipping_address"])
# when
result = get_all_shipping_methods_for_order(
order, order.channel.shipping_method_listings.all()
)
# then
assert result == []
def test_get_all_shipping_methods_returns_empty_when_no_shipping_address(
order_with_lines,
):
# given
order = order_with_lines
order.shipping_address = None
order.save(update_fields=["shipping_address"])
# when
result = get_all_shipping_methods_for_order(
order, order.channel.shipping_method_listings.all()
)
# then
assert result == []
def test_get_all_shipping_methods_returns_applicable_methods_with_listings(
order_with_lines, address, shipping_zone
):
# given
order = order_with_lines
order.shipping_address = address
order.save(update_fields=["shipping_address"])
shipping_method = shipping_zone.shipping_methods.first()
# when
result = get_all_shipping_methods_for_order(
order, order.channel.shipping_method_listings.all()
)
# then
assert len(result) == 1
assert result[0].id == str(shipping_method.id)
def test_get_all_shipping_methods_excludes_methods_without_channel_listing(
order_with_lines, address, shipping_zone
):
# given
order = order_with_lines
order.shipping_address = address
order.save(update_fields=["shipping_address"])
method_without_listing = ShippingMethod.objects.create(
name="No Listing Method",
shipping_zone=shipping_zone,
type=ShippingMethodType.PRICE_BASED,
)
# when
result = get_all_shipping_methods_for_order(
order, order.channel.shipping_method_listings.all()
)
# then
assert len(result) == 1
existing_method = shipping_zone.shipping_methods.first()
assert result[0].id == str(existing_method.id)
assert method_without_listing.id not in [int(m.id) for m in result]
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/order/tests/test_shipping_context.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 129,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/order/tests/webhooks/test_exclude_shipping.py | import json
import uuid
from decimal import Decimal
from unittest import mock
from unittest.mock import call
import graphene
import pytest
from measurement.measures import Weight
from prices import Money
from promise import Promise
from ....core.prices import quantize_price
from ....shipping.interface import ShippingMethodData
from ....shipping.webhooks.shared import CACHE_EXCLUDED_SHIPPING_TIME
from ....webhook.event_types import WebhookEventSyncType
from ....webhook.models import Webhook
from ....webhook.transport.utils import generate_cache_key_for_webhook
from ...webhooks.exclude_shipping import (
excluded_shipping_methods_for_order,
generate_excluded_shipping_methods_for_order_payload,
)
ORDER_QUERY_SHIPPING_METHOD = """
query OrderQuery($id: ID) {
order(id: $id) {
shippingMethods {
id
name
active
message
}
availableShippingMethods {
id
name
active
message
}
}
}
"""
@pytest.fixture
def available_shipping_methods():
methods = []
for i in range(2):
methods.append(
ShippingMethodData(
id=str(i),
price=Money(Decimal(10), "usd"),
name=uuid.uuid4().hex,
maximum_order_weight=Weight(kg=0),
minimum_order_weight=Weight(kg=0),
maximum_delivery_days=0,
minimum_delivery_days=5,
)
)
return methods
@mock.patch("saleor.order.webhooks.exclude_shipping.get_excluded_shipping_data")
def test_excluded_shipping_methods_for_order_dont_run_webhook_on_missing_shipping_methods(
mocked_get_excluded_shipping_data, draft_order
):
# given
empty_shipping_methods = []
# when
excluded_shipping_methods_for_order(
order=draft_order,
available_shipping_methods=empty_shipping_methods,
allow_replica=False,
requestor=None,
).get()
# then
mocked_get_excluded_shipping_data.assert_not_called()
@mock.patch("saleor.webhook.transport.synchronous.transport.cache.set")
@mock.patch(
"saleor.webhook.transport.synchronous.transport.trigger_webhook_sync_promise"
)
@mock.patch(
"saleor.order.webhooks.exclude_shipping.generate_excluded_shipping_methods_for_order_payload"
)
def test_excluded_shipping_methods_for_order(
mocked_payload,
mocked_webhook,
mocked_cache_set,
order_with_lines,
available_shipping_methods,
app_exclude_shipping_for_order,
settings,
):
# given
shipping_app = app_exclude_shipping_for_order
shipping_webhook = shipping_app.webhooks.get()
webhook_reason = "Order contains dangerous products."
shipping_method_id_to_exclude = available_shipping_methods[0].id
webhook_response = {
"excluded_methods": [
{
"id": graphene.Node.to_global_id(
"ShippingMethod", shipping_method_id_to_exclude
),
"reason": webhook_reason,
}
]
}
mocked_webhook.return_value = Promise.resolve(webhook_response)
payload_dict = {"order": {"id": 1, "some_field": "12"}}
payload = json.dumps(payload_dict)
mocked_payload.return_value = payload
expected_cache_key_data = {
"order": {
"id": 1,
"some_field": "12",
"base_shipping_price_amount": str(
quantize_price(
order_with_lines.base_shipping_price.amount,
order_with_lines.currency,
)
),
"lines_pricing": [
{
"base_unit_price_amount": str(
quantize_price(
line.base_unit_price.amount, order_with_lines.currency
)
),
}
for line in order_with_lines.lines.all()
],
}
}
# when
excluded_methods = excluded_shipping_methods_for_order(
order=order_with_lines,
available_shipping_methods=available_shipping_methods,
allow_replica=False,
requestor=None,
).get()
# then
assert len(excluded_methods) == 1
excluded_method = excluded_methods[0]
assert excluded_method.id == shipping_method_id_to_exclude
assert webhook_reason in excluded_method.reason
mocked_webhook.assert_called_once_with(
WebhookEventSyncType.ORDER_FILTER_SHIPPING_METHODS,
shipping_webhook,
False,
static_payload=payload,
subscribable_object=(order_with_lines, available_shipping_methods),
timeout=settings.WEBHOOK_SYNC_TIMEOUT,
request=None,
requestor=None,
)
expected_cache_key = generate_cache_key_for_webhook(
expected_cache_key_data,
shipping_webhook.target_url,
WebhookEventSyncType.ORDER_FILTER_SHIPPING_METHODS,
shipping_app.id,
)
mocked_cache_set.assert_called_once_with(
expected_cache_key,
webhook_response,
timeout=CACHE_EXCLUDED_SHIPPING_TIME,
)
@mock.patch("saleor.webhook.transport.synchronous.transport.cache.set")
@mock.patch(
"saleor.webhook.transport.synchronous.transport.trigger_webhook_sync_promise"
)
@mock.patch(
"saleor.order.webhooks.exclude_shipping.generate_excluded_shipping_methods_for_order_payload"
)
def test_multiple_app_with_excluded_shipping_methods_for_order(
mocked_payload,
mocked_webhook,
mocked_cache_set,
order_with_lines,
available_shipping_methods,
app_exclude_shipping_for_order,
second_app_exclude_shipping_for_order,
settings,
):
# given
shipping_app = app_exclude_shipping_for_order
shipping_webhook = shipping_app.webhooks.get()
second_shipping_app = second_app_exclude_shipping_for_order
second_shipping_webhook = second_shipping_app.webhooks.get()
webhook_reason = "Order contains dangerous products."
webhook_second_reason = "Shipping is not applicable for this order."
first_excluded_id = available_shipping_methods[0].id
second_excluded_id = available_shipping_methods[1].id
first_webhook_response = {
"excluded_methods": [
{
"id": graphene.Node.to_global_id(
"ShippingMethod",
first_excluded_id,
),
"reason": webhook_reason,
}
]
}
second_webhook_response = {
"excluded_methods": [
{
"id": graphene.Node.to_global_id(
"ShippingMethod",
first_excluded_id,
),
"reason": webhook_second_reason,
},
{
"id": graphene.Node.to_global_id(
"ShippingMethod",
second_excluded_id,
),
"reason": webhook_second_reason,
},
]
}
mocked_webhook.side_effect = [
Promise.resolve(first_webhook_response),
Promise.resolve(second_webhook_response),
]
payload_dict = {"order": {"id": 1, "some_field": "12"}}
payload = json.dumps(payload_dict)
mocked_payload.return_value = payload
expected_cache_key_data = {
"order": {
"id": 1,
"some_field": "12",
"base_shipping_price_amount": str(
quantize_price(
order_with_lines.base_shipping_price.amount,
order_with_lines.currency,
)
),
"lines_pricing": [
{
"base_unit_price_amount": str(
quantize_price(
line.base_unit_price.amount, order_with_lines.currency
)
),
}
for line in order_with_lines.lines.all()
],
}
}
# when
excluded_methods = excluded_shipping_methods_for_order(
order=order_with_lines,
available_shipping_methods=available_shipping_methods,
allow_replica=False,
requestor=None,
).get()
# then
assert len(excluded_methods) == 2
first_excluded_method_data = next(
em for em in excluded_methods if em.id == first_excluded_id
)
assert webhook_reason in first_excluded_method_data.reason
assert webhook_second_reason in first_excluded_method_data.reason
event_type = WebhookEventSyncType.ORDER_FILTER_SHIPPING_METHODS
mocked_webhook.assert_any_call(
event_type,
shipping_webhook,
False,
static_payload=payload,
subscribable_object=(order_with_lines, available_shipping_methods),
timeout=settings.WEBHOOK_SYNC_TIMEOUT,
request=None,
requestor=None,
)
mocked_webhook.assert_any_call(
event_type,
second_shipping_webhook,
False,
static_payload=payload,
subscribable_object=(order_with_lines, available_shipping_methods),
timeout=settings.WEBHOOK_SYNC_TIMEOUT,
request=None,
requestor=None,
)
assert mocked_webhook.call_count == 2
expected_cache_for_first_webhook_key = generate_cache_key_for_webhook(
expected_cache_key_data,
shipping_webhook.target_url,
WebhookEventSyncType.ORDER_FILTER_SHIPPING_METHODS,
shipping_app.id,
)
expected_cache_for_second_webhook_key = generate_cache_key_for_webhook(
expected_cache_key_data,
second_shipping_webhook.target_url,
WebhookEventSyncType.ORDER_FILTER_SHIPPING_METHODS,
second_shipping_app.id,
)
assert expected_cache_for_first_webhook_key != expected_cache_for_second_webhook_key
mocked_cache_set.assert_has_calls(
[
call(
expected_cache_for_first_webhook_key,
first_webhook_response,
timeout=CACHE_EXCLUDED_SHIPPING_TIME,
),
call(
expected_cache_for_second_webhook_key,
second_webhook_response,
timeout=CACHE_EXCLUDED_SHIPPING_TIME,
),
]
)
@mock.patch("saleor.webhook.transport.synchronous.transport.cache.set")
@mock.patch(
"saleor.webhook.transport.synchronous.transport.trigger_webhook_sync_promise"
)
@mock.patch(
"saleor.order.webhooks.exclude_shipping.generate_excluded_shipping_methods_for_order_payload"
)
def test_multiple_webhooks_on_the_same_app_with_excluded_shipping_methods_for_order(
mocked_payload,
mocked_webhook,
mocked_cache_set,
order_with_lines,
available_shipping_methods,
app_exclude_shipping_for_order,
settings,
):
# given
shipping_app = app_exclude_shipping_for_order
first_webhook = shipping_app.webhooks.get()
event_type = WebhookEventSyncType.ORDER_FILTER_SHIPPING_METHODS
# create the second webhook with the same event
second_webhook = Webhook.objects.create(
name="shipping-webhook-1",
app=shipping_app,
target_url="https://shipping-gateway.com/apiv2/",
)
second_webhook.events.create(
event_type=event_type,
webhook=second_webhook,
)
webhook_reason = "Order contains dangerous products."
webhook_second_reason = "Shipping is not applicable for this order."
first_webhook_response = {
"excluded_methods": [
{
"id": graphene.Node.to_global_id("ShippingMethod", "1"),
"reason": webhook_reason,
}
]
}
second_webhook_response = {
"excluded_methods": [
{
"id": graphene.Node.to_global_id("ShippingMethod", "1"),
"reason": webhook_second_reason,
},
{
"id": graphene.Node.to_global_id("ShippingMethod", "2"),
"reason": webhook_second_reason,
},
]
}
mocked_webhook.side_effect = [
Promise.resolve(first_webhook_response),
Promise.resolve(second_webhook_response),
]
payload_dict = {"order": {"id": 1, "some_field": "12"}}
payload = json.dumps(payload_dict)
mocked_payload.return_value = payload
expected_cache_key_data = {
"order": {
"id": 1,
"some_field": "12",
"base_shipping_price_amount": str(
quantize_price(
order_with_lines.base_shipping_price.amount,
order_with_lines.currency,
)
),
"lines_pricing": [
{
"base_unit_price_amount": str(
quantize_price(
line.base_unit_price.amount, order_with_lines.currency
)
),
}
for line in order_with_lines.lines.all()
],
}
}
# when
excluded_methods = excluded_shipping_methods_for_order(
order=order_with_lines,
available_shipping_methods=available_shipping_methods,
allow_replica=False,
requestor=None,
).get()
# then
assert len(excluded_methods) == 2
em_1 = next(em for em in excluded_methods if em.id == "1")
assert webhook_reason in em_1.reason
assert webhook_second_reason in em_1.reason
mocked_webhook.assert_any_call(
event_type,
first_webhook,
False,
static_payload=payload,
subscribable_object=(order_with_lines, available_shipping_methods),
timeout=settings.WEBHOOK_SYNC_TIMEOUT,
request=None,
requestor=None,
)
mocked_webhook.assert_any_call(
event_type,
second_webhook,
False,
static_payload=payload,
subscribable_object=(order_with_lines, available_shipping_methods),
timeout=settings.WEBHOOK_SYNC_TIMEOUT,
request=None,
requestor=None,
)
assert mocked_webhook.call_count == 2
expected_cache_for_first_webhook_key = generate_cache_key_for_webhook(
expected_cache_key_data,
first_webhook.target_url,
WebhookEventSyncType.ORDER_FILTER_SHIPPING_METHODS,
shipping_app.id,
)
expected_cache_for_second_webhook_key = generate_cache_key_for_webhook(
expected_cache_key_data,
second_webhook.target_url,
WebhookEventSyncType.ORDER_FILTER_SHIPPING_METHODS,
shipping_app.id,
)
assert expected_cache_for_first_webhook_key != expected_cache_for_second_webhook_key
mocked_cache_set.assert_has_calls(
[
call(
expected_cache_for_first_webhook_key,
first_webhook_response,
timeout=CACHE_EXCLUDED_SHIPPING_TIME,
),
call(
expected_cache_for_second_webhook_key,
second_webhook_response,
timeout=CACHE_EXCLUDED_SHIPPING_TIME,
),
]
)
def test_generate_excluded_shipping_methods_for_order_payload(
order_with_lines,
available_shipping_methods,
):
# given
methods = available_shipping_methods
# when
json_payload = json.loads(
generate_excluded_shipping_methods_for_order_payload(
order=order_with_lines, available_shipping_methods=methods
)
)
# then
assert len(json_payload["shipping_methods"]) == 2
assert json_payload["shipping_methods"][0]["id"] == graphene.Node.to_global_id(
"ShippingMethod", methods[0].id
)
assert json_payload["shipping_methods"][1]["id"] == graphene.Node.to_global_id(
"ShippingMethod", methods[1].id
)
graphql_order_id = graphene.Node.to_global_id("Order", order_with_lines.id)
assert json_payload["order"]["id"] == graphql_order_id
@mock.patch("saleor.order.calculations.fetch_order_prices_if_expired")
def test_generate_excluded_shipping_methods_for_order(mocked_fetch, order):
shipping_method = ShippingMethodData(
id="123",
price=Money(Decimal("10.59"), "USD"),
name="shipping",
maximum_order_weight=Weight(kg=10),
minimum_order_weight=Weight(g=1),
maximum_delivery_days=10,
minimum_delivery_days=2,
)
response = json.loads(
generate_excluded_shipping_methods_for_order_payload(order, [shipping_method])
)
assert "order" in response
assert response["shipping_methods"] == [
{
"id": graphene.Node.to_global_id("ShippingMethod", "123"),
"price": "10.59",
"currency": "USD",
"name": "shipping",
"maximum_order_weight": "10.0:kg",
"minimum_order_weight": "1.0:g",
"maximum_delivery_days": 10,
"minimum_delivery_days": 2,
}
]
mocked_fetch.assert_not_called()
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/order/tests/webhooks/test_exclude_shipping.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 497,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/order/tests/webhooks/test_exclude_shipping_cache.py | import json
import uuid
from decimal import Decimal
from unittest import mock
import graphene
import pytest
from measurement.measures import Weight
from prices import Money
from ....core.prices import quantize_price
from ....shipping.interface import ShippingMethodData
from ....shipping.webhooks.shared import CACHE_EXCLUDED_SHIPPING_TIME
from ....webhook.event_types import WebhookEventSyncType
from ....webhook.transport.utils import generate_cache_key_for_webhook
from ...webhooks.exclude_shipping import (
excluded_shipping_methods_for_order,
)
@pytest.fixture
def available_shipping_methods():
return [
ShippingMethodData(
id="1",
price=Money(Decimal(10), "usd"),
name=uuid.uuid4().hex,
maximum_order_weight=Weight(kg=0),
minimum_order_weight=Weight(kg=0),
maximum_delivery_days=0,
minimum_delivery_days=5,
),
ShippingMethodData(
id="2",
price=Money(Decimal(10), "usd"),
name=uuid.uuid4().hex,
maximum_order_weight=Weight(kg=0),
minimum_order_weight=Weight(kg=0),
maximum_delivery_days=0,
minimum_delivery_days=5,
),
]
@mock.patch("saleor.webhook.transport.synchronous.transport.cache.get")
@mock.patch("saleor.webhook.transport.synchronous.transport.cache.set")
@mock.patch("saleor.webhook.transport.synchronous.transport.send_webhook_request_sync")
@mock.patch(
"saleor.order.webhooks.exclude_shipping.generate_excluded_shipping_methods_for_order_payload"
)
def test_excluded_shipping_methods_for_order_use_cache(
mocked_payload,
mocked_webhook,
mocked_cache_set,
mocked_cache_get,
order_with_lines,
available_shipping_methods,
app_exclude_shipping_for_order,
):
# given
webhook_reason = "Order contains dangerous products."
mocked_webhook.return_value = {
"excluded_methods": [
{
"id": graphene.Node.to_global_id("ShippingMethod", "1"),
"reason": webhook_reason,
}
]
}
payload = json.dumps({"order": {"id": 1, "some_field": "12"}})
mocked_payload.return_value = payload
mocked_cache_get.return_value = (payload, [{"id": "1", "reason": webhook_reason}])
# when
excluded_shipping_methods_for_order(
order=order_with_lines,
available_shipping_methods=available_shipping_methods,
allow_replica=False,
requestor=None,
)
# then
assert not mocked_webhook.called
assert not mocked_cache_set.called
@mock.patch("saleor.webhook.transport.synchronous.transport.cache.get")
@mock.patch("saleor.webhook.transport.synchronous.transport.cache.set")
@mock.patch("saleor.webhook.transport.synchronous.transport.send_webhook_request_sync")
@mock.patch(
"saleor.order.webhooks.exclude_shipping.generate_excluded_shipping_methods_for_order_payload"
)
def test_excluded_shipping_methods_for_order_stores_in_cache_when_empty(
mocked_payload,
mocked_webhook,
mocked_cache_set,
mocked_cache_get,
order_with_lines,
available_shipping_methods,
app_exclude_shipping_for_order,
):
# given
shipping_app = app_exclude_shipping_for_order
shipping_webhook = shipping_app.webhooks.get()
webhook_reason = "Order contains dangerous products."
webhook_response = {
"excluded_methods": [
{
"id": graphene.Node.to_global_id("ShippingMethod", "1"),
"reason": webhook_reason,
}
]
}
mocked_webhook.return_value = webhook_response
payload_dict = {"order": {"id": 1, "some_field": "12"}}
payload = json.dumps(payload_dict)
mocked_payload.return_value = payload
expected_cache_key_data = {
"order": {
"id": 1,
"some_field": "12",
"base_shipping_price_amount": str(
quantize_price(
order_with_lines.base_shipping_price.amount,
order_with_lines.currency,
)
),
"lines_pricing": [
{
"base_unit_price_amount": str(
quantize_price(
line.base_unit_price.amount, order_with_lines.currency
)
),
}
for line in order_with_lines.lines.all()
],
}
}
mocked_cache_get.return_value = None
# when
excluded_shipping_methods_for_order(
order=order_with_lines,
available_shipping_methods=available_shipping_methods,
allow_replica=False,
requestor=None,
)
# then
assert mocked_webhook.called
expected_cache_key = generate_cache_key_for_webhook(
expected_cache_key_data,
shipping_webhook.target_url,
WebhookEventSyncType.ORDER_FILTER_SHIPPING_METHODS,
shipping_app.id,
)
mocked_cache_set.assert_called_once_with(
expected_cache_key,
webhook_response,
timeout=CACHE_EXCLUDED_SHIPPING_TIME,
)
@mock.patch("saleor.webhook.transport.synchronous.transport.cache.get")
@mock.patch("saleor.webhook.transport.synchronous.transport.cache.set")
@mock.patch("saleor.webhook.transport.synchronous.transport.send_webhook_request_sync")
@mock.patch(
"saleor.order.webhooks.exclude_shipping.generate_excluded_shipping_methods_for_order_payload"
)
def test_excluded_shipping_methods_for_order_stores_in_cache_when_payload_is_different(
mocked_payload,
mocked_webhook,
mocked_cache_set,
mocked_cache_get,
order_with_lines,
available_shipping_methods,
app_exclude_shipping_for_order,
):
# given
shipping_app = app_exclude_shipping_for_order
shipping_webhook = shipping_app.webhooks.get()
webhook_reason = "Order contains dangerous products."
webhook_response = {
"excluded_methods": [
{
"id": graphene.Node.to_global_id("ShippingMethod", "1"),
"reason": webhook_reason,
}
]
}
mocked_webhook.return_value = webhook_response
payload_dict = {"order": {"id": 1, "some_field": "12"}}
payload = json.dumps(payload_dict)
mocked_payload.return_value = payload
expected_cache_key_data = {
"order": {
"id": 1,
"some_field": "12",
"base_shipping_price_amount": str(
quantize_price(
order_with_lines.base_shipping_price.amount,
order_with_lines.currency,
)
),
"lines_pricing": [
{
"base_unit_price_amount": str(
quantize_price(
line.base_unit_price.amount, order_with_lines.currency
)
),
}
for line in order_with_lines.lines.all()
],
}
}
mocked_cache_get.return_value = None
# when
excluded_shipping_methods_for_order(
order=order_with_lines,
available_shipping_methods=available_shipping_methods,
allow_replica=False,
requestor=None,
)
# then
assert mocked_webhook.called
expected_cache_key = generate_cache_key_for_webhook(
expected_cache_key_data,
shipping_webhook.target_url,
WebhookEventSyncType.ORDER_FILTER_SHIPPING_METHODS,
shipping_app.id,
)
mocked_cache_get.assert_called_once_with(expected_cache_key)
mocked_cache_set.assert_called_once_with(
expected_cache_key,
webhook_response,
timeout=CACHE_EXCLUDED_SHIPPING_TIME,
)
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/order/tests/webhooks/test_exclude_shipping_cache.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 225,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/order/webhooks/exclude_shipping.py | import json
import logging
from typing import TYPE_CHECKING, Union
from promise import Promise
from ...core.db.connection import allow_writer
from ...core.prices import quantize_price
from ...core.utils.json_serializer import CustomJsonEncoder
from ...shipping.interface import ExcludedShippingMethod, ShippingMethodData
from ...shipping.webhooks.shared import (
generate_payload_for_shipping_method,
get_excluded_shipping_data,
)
from ...webhook import traced_payload_generator
from ...webhook.event_types import WebhookEventSyncType
from ...webhook.payloads import (
generate_order_payload,
)
from ...webhook.utils import get_webhooks_for_event
if TYPE_CHECKING:
from ...account.models import User
from ...app.models import App
from ...order.models import Order
logger = logging.getLogger(__name__)
@allow_writer()
@traced_payload_generator
def generate_excluded_shipping_methods_for_order_payload(
order: "Order",
available_shipping_methods: list[ShippingMethodData],
):
order_data = json.loads(generate_order_payload(order))[0]
payload = {
"order": order_data,
"shipping_methods": [
generate_payload_for_shipping_method(shipping_method)
for shipping_method in available_shipping_methods
],
}
return json.dumps(payload, cls=CustomJsonEncoder)
def excluded_shipping_methods_for_order(
order: "Order",
available_shipping_methods: list["ShippingMethodData"],
allow_replica: bool,
requestor: Union["App", "User", None],
) -> Promise[list[ExcludedShippingMethod]]:
if not available_shipping_methods:
return Promise.resolve([])
webhooks = get_webhooks_for_event(
WebhookEventSyncType.ORDER_FILTER_SHIPPING_METHODS,
)
if not webhooks:
return Promise.resolve([])
static_payload = generate_excluded_shipping_methods_for_order_payload(
order,
available_shipping_methods,
)
cache_data = _get_cache_data_for_exclude_shipping_methods(order, static_payload)
return get_excluded_shipping_data(
webhooks=webhooks,
event_type=WebhookEventSyncType.ORDER_FILTER_SHIPPING_METHODS,
static_payload=static_payload,
subscribable_object=(order, available_shipping_methods),
allow_replica=allow_replica,
requestor=requestor,
cache_data=cache_data,
)
def _get_cache_data_for_exclude_shipping_methods(order: "Order", payload: str) -> dict:
payload_dict = json.loads(payload)
source_object = payload_dict.get("order", {})
# Drop fields that can be set by tax-app
order_fields_to_drop = [
"shipping_price_gross_amount",
"shipping_price_net_amount",
"total_net_amount",
"total_gross_amount",
"shipping_tax_rate",
"undiscounted_total_net_amount",
"undiscounted_total_gross_amount",
]
line_fields_to_drop = [
"undiscounted_unit_price_gross_amount",
"undiscounted_unit_price_net_amount",
"undiscounted_total_price_net_amount",
"undiscounted_total_price_gross_amount",
"unit_price_net_amount",
"unit_price_gross_amount",
"tax_rate",
"total_price_net_amount",
"total_price_gross_amount",
]
for field in order_fields_to_drop:
source_object.pop(field, None)
source_object["base_shipping_price_amount"] = str(
quantize_price(order.base_shipping_price_amount, order.currency)
)
source_object["lines_pricing"] = [
{
"base_unit_price_amount": str(
quantize_price(order_line.base_unit_price_amount, order.currency)
),
}
for order_line in order.lines.all()
]
lines_list = source_object.get("lines", [])
for line in lines_list:
for field in line_fields_to_drop:
line.pop(field, None)
# drop fields that change between requests but are not relevant for cache key
source_object.pop("last_change", None)
source_object.pop("meta", None)
source_object.pop("shipping_method", None)
return payload_dict
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/order/webhooks/exclude_shipping.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 111,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
saleor/saleor:saleor/asgi/tests/test_usage_telemetry.py | from datetime import timedelta
from django.utils import timezone
from ..usage_telemetry import get_usage_telemetry
def test_get_usage_telemetry(site_settings):
# given
site_settings.usage_telemetry_reported_at = None
site_settings.save(update_fields=["usage_telemetry_reported_at"])
expected_instance_keys = {
"instance_id",
"python_version",
"saleor_version",
"is_debug",
"is_local",
}
expected_usage_keys = {
"app_count",
"attribute_count",
"attribute_entity_type_count",
"attribute_type_count",
"attribute_input_type_count",
"attribute_page_count",
"attribute_variant_count",
"attribute_product_count",
"channel_count",
"currencies",
"model_count",
"product_count",
"saleor_apps",
}
# when
data = get_usage_telemetry()
# then
assert data is not None
assert "reported_at" in data
instance = data["instance"]
assert set(instance.keys()) == expected_instance_keys
usage = data["usage"]
assert set(usage.keys()) == expected_usage_keys
def test_get_usage_telemetry_checks_reported_at(site_settings, settings):
# given
site_settings.usage_telemetry_reported_at = timezone.now() - timedelta(days=30)
site_settings.save(update_fields=["usage_telemetry_reported_at"])
assert settings.SEND_USAGE_TELEMETRY_AFTER_TIMEDELTA.days < 30
# when
data = get_usage_telemetry()
# then
assert data is not None
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/asgi/tests/test_usage_telemetry.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 47,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/asgi/usage_telemetry.py | import asyncio
import hashlib
import json
import logging
import random
import sys
from asgiref.sync import sync_to_async
from asgiref.typing import (
ASGI3Application,
ASGIReceiveCallable,
ASGISendCallable,
Scope,
)
from django.apps import apps
from django.conf import settings
from django.db import connections
from django.db.models import Count
from django.utils import timezone
from .. import __version__ as saleor_version
from ..attribute import AttributeEntityType, AttributeInputType, AttributeType
from ..core.http_client import HTTPClient
logger = logging.getLogger(__name__)
async def send_usage_telemetry_task():
# Multiple workers from the same parent process will start almost at the same time.
# Randomize the start of actual logic to avoid sending data more than once.
await asyncio.sleep(random.randint(0, 5))
try:
data = await sync_to_async(get_usage_telemetry, thread_sensitive=False)()
if data is None:
return
await sync_to_async(send_usage_telemetry, thread_sensitive=False)(data)
except Exception:
logger.exception("Sending usage telemetry has failed")
# Sending usage telemetry data failed, reset the field so during subsequent startup procedure
# another attempt will be made.
await sync_to_async(update_usage_telemetry_reported_at, thread_sensitive=False)(
dt=None, close_connections=True
)
def get_usage_telemetry():
"""Gather usage telemetry data.
Data will not be gathered if usage telemetry was recently sent.
"""
try:
Site = apps.get_model("sites", "Site")
site_settings = Site.objects.get_current().settings
usage_telemetry_reported_at = site_settings.usage_telemetry_reported_at
cutoff_datetime = timezone.now() - settings.SEND_USAGE_TELEMETRY_AFTER_TIMEDELTA
if (
usage_telemetry_reported_at
and usage_telemetry_reported_at > cutoff_datetime
):
return None
update_usage_telemetry_reported_at(dt=timezone.now(), close_connections=False)
instance = {
"instance_id": str(site_settings.instance_id),
"saleor_version": saleor_version,
"python_version": sys.version,
"is_debug": settings.DEBUG,
"is_local": isinstance(settings.PUBLIC_URL, str)
and (
"localhost" in settings.PUBLIC_URL or "127.0.0.1" in settings.PUBLIC_URL
),
}
usage = {}
Product = apps.get_model("product", "Product")
usage["product_count"] = Product.objects.using(
settings.DATABASE_CONNECTION_REPLICA_NAME
).count()
Attribute = apps.get_model("attribute", "Attribute")
usage["attribute_count"] = Attribute.objects.using(
settings.DATABASE_CONNECTION_REPLICA_NAME
).count()
usage["attribute_type_count"] = {t[0]: 0 for t in AttributeType.CHOICES}
for item in (
Attribute.objects.using(settings.DATABASE_CONNECTION_REPLICA_NAME)
.values("type")
.annotate(total=Count("type"))
):
usage["attribute_type_count"][item["type"]] = item["total"]
usage["attribute_entity_type_count"] = {
t[0]: 0 for t in AttributeEntityType.CHOICES
}
for item in (
Attribute.objects.using(settings.DATABASE_CONNECTION_REPLICA_NAME)
.exclude(entity_type__isnull=True)
.exclude(entity_type="")
.values("entity_type")
.annotate(total=Count("entity_type"))
):
usage["attribute_entity_type_count"][item["entity_type"]] = item["total"]
usage["attribute_input_type_count"] = {
t[0]: 0 for t in AttributeInputType.CHOICES
}
for item in (
Attribute.objects.using(settings.DATABASE_CONNECTION_REPLICA_NAME)
.values("input_type")
.annotate(total=Count("input_type"))
):
usage["attribute_input_type_count"][item["input_type"]] = item["total"]
AttributePage = apps.get_model("attribute", "AttributePage")
usage["attribute_page_count"] = AttributePage.objects.using(
settings.DATABASE_CONNECTION_REPLICA_NAME
).count()
AttributeProduct = apps.get_model("attribute", "AttributeProduct")
usage["attribute_product_count"] = AttributeProduct.objects.using(
settings.DATABASE_CONNECTION_REPLICA_NAME
).count()
AttributeVariant = apps.get_model("attribute", "AttributeVariant")
usage["attribute_variant_count"] = AttributeVariant.objects.using(
settings.DATABASE_CONNECTION_REPLICA_NAME
).count()
Page = apps.get_model("page", "Page") # also known as Model
usage["model_count"] = Page.objects.using(
settings.DATABASE_CONNECTION_REPLICA_NAME
).count()
Channel = apps.get_model("channel", "Channel")
usage["channel_count"] = Channel.objects.using(
settings.DATABASE_CONNECTION_REPLICA_NAME
).count()
usage["currencies"] = list(
Channel.objects.using(settings.DATABASE_CONNECTION_REPLICA_NAME)
.distinct("currency_code")
.values_list("currency_code", flat=True)
.order_by("currency_code")
)
App = apps.get_model("app", "App")
usage["saleor_apps"] = list(
App.objects.using(settings.DATABASE_CONNECTION_REPLICA_NAME)
.filter(identifier__startswith="saleor.app")
.values_list("identifier", flat=True)
)
usage["app_count"] = App.objects.using(
settings.DATABASE_CONNECTION_REPLICA_NAME
).count()
return {
"instance": instance,
"usage": usage,
"reported_at": timezone.now().isoformat(timespec="seconds"),
}
finally:
connections.close_all()
def send_usage_telemetry(data: dict):
url = "https://usage-telemetry.saleor.io/"
logger.info("Sending usage telemetry data: %s to: %s", data, url)
json_data = json.dumps(data)
headers = {
"content-type": "application/json",
# underlying infrastructure requires hash of the data
"x-amz-content-sha256": hashlib.sha256(json_data.encode("utf-8")).hexdigest(),
}
resp = HTTPClient.send_request(
"POST",
url,
json=data,
headers=headers,
allow_redirects=False,
)
return resp.status_code == 200
def update_usage_telemetry_reported_at(dt, close_connections):
Site = apps.get_model("sites", "Site")
try:
site_settings = Site.objects.get_current().settings
site_settings.usage_telemetry_reported_at = dt
site_settings.save(update_fields=["usage_telemetry_reported_at"])
finally:
if close_connections:
connections.close_all()
def usage_telemetry_middleware(application: ASGI3Application) -> ASGI3Application:
"""Send usage telemetry data.
Saleor does not extract any personal data from your Saleor instance.
Find more about motivation and how we use this data to improve Saleor at: https://docs.saleor.io/setup/usage-telemetry.
"""
async def wrapper(
scope: Scope, receive: ASGIReceiveCallable, send: ASGISendCallable
) -> None:
if scope.get("type") != "lifespan":
return await application(scope, receive, send)
while True:
message = await receive()
if message["type"] == "lifespan.startup":
try:
asyncio.create_task(send_usage_telemetry_task())
except Exception:
logger.exception(
"Exception happened during scheduling usage telemetry task"
)
finally:
await send({"type": "lifespan.startup.complete"})
elif message["type"] == "lifespan.shutdown":
await send({"type": "lifespan.shutdown.complete"})
return None
return wrapper
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/asgi/usage_telemetry.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 192,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
saleor/saleor:saleor/checkout/search/indexing.py | from typing import TYPE_CHECKING
import graphene
from django.conf import settings
from django.db import transaction
from django.db.models import Value
from ...account.search import (
generate_address_search_vector_value,
generate_email_vector,
)
from ...core.context import with_promise_context
from ...core.db.connection import allow_writer
from ...core.postgres import FlatConcatSearchVector, NoValidationSearchVector
from ..lock_objects import checkout_qs_select_for_update
from ..models import Checkout
from .loaders import CheckoutData, CheckoutLineData, TransactionData, load_checkout_data
if TYPE_CHECKING:
from uuid import UUID
from ...payment.models import Payment
from ..models import Checkout
@with_promise_context
def update_checkouts_search_vector(checkouts: list["Checkout"]):
"""Update search vectors for multiple checkouts using efficient data loading."""
checkout_pks = [checkout.pk for checkout in checkouts]
# update search_index_dirty flag before to ensure that will not update search vector
# with outdated data
set_search_index_dirty(checkout_pks, search_index_dirty_value=False)
try:
checkout_data_map = load_checkout_data(checkouts)
for checkout in checkouts:
data = checkout_data_map.get(checkout.pk)
if not data:
continue
checkout.search_vector = FlatConcatSearchVector(
*prepare_checkout_search_vector_value(checkout, data)
)
except Exception:
# Reset search_index_dirty flag if processing fails
set_search_index_dirty(checkout_pks, search_index_dirty_value=True)
raise
with transaction.atomic():
with allow_writer():
_locked_checkouts = (
checkout_qs_select_for_update()
.filter(pk__in=[checkout.pk for checkout in checkouts])
.values_list("pk", flat=True)
)
Checkout.objects.bulk_update(checkouts, ["search_vector"])
def set_search_index_dirty(checkout_pks: list["UUID"], search_index_dirty_value: bool):
"""Reset search vectors for multiple checkouts."""
with transaction.atomic():
with allow_writer():
# select and lock checkouts to ensure updating in correct order
pks = (
checkout_qs_select_for_update()
.filter(pk__in=checkout_pks)
.values_list("pk", flat=True)
)
Checkout.objects.filter(pk__in=pks).update(
search_vector=None, search_index_dirty=search_index_dirty_value
)
def prepare_checkout_search_vector_value(
checkout: "Checkout", data: CheckoutData
) -> list[NoValidationSearchVector]:
"""Prepare all search vector components for a checkout."""
search_vectors = [
NoValidationSearchVector(
Value(str(checkout.token)), config="simple", weight="A"
),
]
if checkout.email:
search_vectors.extend(generate_email_vector(checkout.email))
if data.user:
search_vectors.extend(generate_email_vector(data.user.email))
search_vectors.append(
NoValidationSearchVector(
Value(data.user.first_name), config="simple", weight="A"
)
)
search_vectors.append(
NoValidationSearchVector(
Value(data.user.last_name), config="simple", weight="A"
)
)
if data.billing_address:
search_vectors += generate_address_search_vector_value(
data.billing_address, weight="B"
)
if data.shipping_address:
search_vectors += generate_address_search_vector_value(
data.shipping_address, weight="B"
)
search_vectors += generate_checkout_payments_search_vector_value(data.payments)
search_vectors += generate_checkout_lines_search_vector_value(data.lines)
search_vectors += generate_checkout_transactions_search_vector_value(
data.transactions
)
return search_vectors
def generate_checkout_transactions_search_vector_value(
transactions_data: list[TransactionData],
) -> list[NoValidationSearchVector]:
"""Generate search vectors for checkout transactions."""
transaction_vectors = []
for transaction_data in transactions_data[
: settings.CHECKOUT_MAX_INDEXED_TRANSACTIONS
]:
transaction = transaction_data.transaction
transaction_vectors.append(
NoValidationSearchVector(
Value(graphene.Node.to_global_id("TransactionItem", transaction.token)),
config="simple",
weight="D",
)
)
if transaction.psp_reference:
transaction_vectors.append(
NoValidationSearchVector(
Value(transaction.psp_reference),
config="simple",
weight="D",
)
)
for event in transaction_data.events:
if event.psp_reference:
transaction_vectors.append(
NoValidationSearchVector(
Value(event.psp_reference),
config="simple",
weight="D",
)
)
return transaction_vectors
def generate_checkout_payments_search_vector_value(
payments: list["Payment"],
) -> list[NoValidationSearchVector]:
"""Generate search vectors for checkout payments."""
payment_vectors = []
for payment in payments[: settings.CHECKOUT_MAX_INDEXED_PAYMENTS]:
payment_vectors.append(
NoValidationSearchVector(
Value(graphene.Node.to_global_id("Payment", payment.id)),
config="simple",
weight="D",
)
)
if payment.psp_reference:
payment_vectors.append(
NoValidationSearchVector(
Value(payment.psp_reference),
config="simple",
weight="D",
)
)
return payment_vectors
def generate_checkout_lines_search_vector_value(
lines_data: list[CheckoutLineData],
) -> list[NoValidationSearchVector]:
"""Generate search vectors for checkout lines."""
line_vectors = []
for line_data in lines_data[: settings.CHECKOUT_MAX_INDEXED_LINES]:
variant = line_data.variant
if not variant:
continue
if variant.sku:
line_vectors.append(
NoValidationSearchVector(
Value(variant.sku),
config="simple",
weight="C",
)
)
product = line_data.product
if product and product.name:
line_vectors.append(
NoValidationSearchVector(
Value(product.name),
config="simple",
weight="C",
)
)
if variant.name:
line_vectors.append(
NoValidationSearchVector(
Value(variant.name),
config="simple",
weight="C",
)
)
return line_vectors
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/checkout/search/indexing.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 188,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
saleor/saleor:saleor/checkout/search/loaders.py | from itertools import chain
from typing import TYPE_CHECKING, NamedTuple
from uuid import UUID
from ...graphql.account.dataloaders import AddressByIdLoader, UserByUserIdLoader
from ...graphql.checkout.dataloaders.models import (
CheckoutLinesByCheckoutTokenLoader,
TransactionItemsByCheckoutIDLoader,
)
from ...graphql.core.context import SaleorContext
from ...graphql.payment.dataloaders import (
PaymentsByCheckoutTokenLoader,
TransactionEventByTransactionIdLoader,
)
from ...graphql.product.dataloaders.products import (
ProductByIdLoader,
ProductVariantByIdLoader,
)
if TYPE_CHECKING:
from ...account.models import Address, User
from ...payment.models import Payment, TransactionEvent, TransactionItem
from ...product.models import Product, ProductVariant
from ..models import Checkout, CheckoutLine
class CheckoutLineData(NamedTuple):
line: "CheckoutLine"
variant: "ProductVariant | None"
product: "Product | None"
class TransactionData(NamedTuple):
transaction: "TransactionItem"
events: list["TransactionEvent"]
class CheckoutData(NamedTuple):
user: "User | None"
billing_address: "Address | None"
shipping_address: "Address | None"
payments: list["Payment"]
lines: list[CheckoutLineData]
transactions: list[TransactionData]
def load_checkout_data(checkouts: list["Checkout"]) -> dict[UUID, CheckoutData]:
"""Load all related data for checkouts using dataloaders for efficient querying."""
context = SaleorContext()
checkout_ids = [checkout.pk for checkout in checkouts]
user_map = _load_users(context, checkouts)
address_map = _load_addresses(context, checkouts)
payments_list = PaymentsByCheckoutTokenLoader(context).load_many(checkout_ids).get()
lines_list = (
CheckoutLinesByCheckoutTokenLoader(context).load_many(checkout_ids).get()
)
transactions_list = (
TransactionItemsByCheckoutIDLoader(context).load_many(checkout_ids).get()
)
all_lines = list(chain.from_iterable(lines_list))
variant_map, product_map = _load_variants_and_products(context, all_lines)
all_transactions = list(chain.from_iterable(transactions_list))
transaction_events_map = _load_transaction_events(context, all_transactions)
payments_by_checkout = dict(zip(checkout_ids, payments_list, strict=False))
lines_by_checkout = dict(zip(checkout_ids, lines_list, strict=False))
transactions_by_checkout = dict(zip(checkout_ids, transactions_list, strict=False))
result = {}
for checkout in checkouts:
c_lines = lines_by_checkout.get(checkout.pk, [])
line_data_list = _build_checkout_line_data(c_lines, variant_map, product_map)
c_transactions = transactions_by_checkout.get(checkout.pk, [])
transaction_data_list = _build_transaction_data(
c_transactions, transaction_events_map
)
result[checkout.pk] = CheckoutData(
user=user_map.get(checkout.user_id) if checkout.user_id else None,
billing_address=address_map.get(checkout.billing_address_id)
if checkout.billing_address_id
else None,
shipping_address=address_map.get(checkout.shipping_address_id)
if checkout.shipping_address_id
else None,
payments=payments_by_checkout.get(checkout.pk, []),
lines=line_data_list,
transactions=transaction_data_list,
)
return result
def _load_users(
context: SaleorContext, checkouts: list["Checkout"]
) -> dict[int, "User"]:
"""Load users for checkouts."""
users = (
UserByUserIdLoader(context)
.load_many([c.user_id for c in checkouts if c.user_id])
.get()
)
return {user.id: user for user in users if user}
def _load_addresses(
context: SaleorContext, checkouts: list["Checkout"]
) -> dict[int, "Address"]:
"""Load billing and shipping addresses for checkouts."""
address_ids = {c.billing_address_id for c in checkouts if c.billing_address_id}
address_ids.update(
{c.shipping_address_id for c in checkouts if c.shipping_address_id}
)
addresses = AddressByIdLoader(context).load_many(list(address_ids)).get()
return {addr.id: addr for addr in addresses if addr}
def _load_variants_and_products(
context: SaleorContext, lines: list["CheckoutLine"]
) -> tuple[dict[int, "ProductVariant"], dict[int, "Product"]]:
"""Load variants and products for checkout lines."""
variant_ids = [line.variant_id for line in lines if line.variant_id]
variants = ProductVariantByIdLoader(context).load_many(variant_ids).get()
variant_map = {v.id: v for v in variants if v}
product_ids = [v.product_id for v in variants if v]
products = ProductByIdLoader(context).load_many(product_ids).get()
product_map = {p.id: p for p in products if p}
return variant_map, product_map
def _load_transaction_events(
context: SaleorContext, transactions: list["TransactionItem"]
) -> dict[int, list["TransactionEvent"]]:
"""Load transaction events for transactions."""
transaction_ids = [t.id for t in transactions]
transaction_events_list = (
TransactionEventByTransactionIdLoader(context).load_many(transaction_ids).get()
)
return dict(zip(transaction_ids, transaction_events_list, strict=False))
def _build_checkout_line_data(
lines: list["CheckoutLine"],
variant_map: dict[int, "ProductVariant"],
product_map: dict[int, "Product"],
) -> list[CheckoutLineData]:
"""Build CheckoutLineData objects from lines and related data."""
line_data_list = []
for line in lines:
variant = variant_map.get(line.variant_id)
product = product_map.get(variant.product_id) if variant else None
line_data_list.append(
CheckoutLineData(line=line, variant=variant, product=product)
)
return line_data_list
def _build_transaction_data(
transactions: list["TransactionItem"],
transaction_events_map: dict[int, list["TransactionEvent"]],
) -> list[TransactionData]:
"""Build TransactionData objects from transactions and events."""
transaction_data_list = []
for transaction_item in transactions:
events = transaction_events_map.get(transaction_item.id, [])
transaction_data_list.append(
TransactionData(transaction=transaction_item, events=events)
)
return transaction_data_list
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/checkout/search/loaders.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 143,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
saleor/saleor:saleor/checkout/tests/test_search_indexing.py | from decimal import Decimal
from unittest.mock import patch
import graphene
import pytest
from ...payment.models import Payment, TransactionEvent, TransactionItem
from ...plugins.manager import get_plugins_manager
from ...tests import race_condition
from ..fetch import fetch_checkout_info
from ..models import Checkout, CheckoutMetadata
from ..search.indexing import (
generate_checkout_lines_search_vector_value,
generate_checkout_payments_search_vector_value,
generate_checkout_transactions_search_vector_value,
prepare_checkout_search_vector_value,
update_checkouts_search_vector,
)
from ..search.loaders import (
CheckoutData,
CheckoutLineData,
TransactionData,
)
from ..utils import add_variant_to_checkout
@pytest.fixture
def checkout_list_with_relations(
channel_USD, customer_user, address, address_usa, product
):
"""Create multiple checkouts with various relations for testing."""
checkouts = []
variant = product.variants.first()
for i in range(3):
checkout = Checkout.objects.create(
currency=channel_USD.currency_code,
channel=channel_USD,
email=f"user{i}@example.com",
user=customer_user if i % 2 == 0 else None,
billing_address=address if i % 2 == 0 else None,
shipping_address=address_usa if i % 2 == 1 else None,
)
checkout.set_country("US", commit=True)
CheckoutMetadata.objects.create(checkout=checkout)
# Add checkout line
checkout_info = fetch_checkout_info(
checkout, [], get_plugins_manager(allow_replica=False)
)
add_variant_to_checkout(checkout_info, variant, i + 1)
checkout.save()
# Add payment
Payment.objects.create(
gateway="mirumee.payments.dummy",
is_active=True,
checkout=checkout,
total=Decimal("10.00") * (i + 1),
currency="USD",
psp_reference=f"PSP-REF-{i}",
)
# Add transaction
transaction = TransactionItem.objects.create(
name="Credit card",
psp_reference=f"TRANS-PSP-{i}",
available_actions=["refund"],
currency="USD",
checkout_id=checkout.pk,
charged_value=Decimal(10) * (i + 1),
)
TransactionEvent.objects.create(
psp_reference=f"EVENT-PSP-{i}",
currency="USD",
transaction=transaction,
amount_value=Decimal(10),
)
checkouts.append(checkout)
return checkouts
def _extract_search_vector_values(search_vectors):
"""Extract values from NoValidationSearchVector objects."""
values = []
for sv in search_vectors:
if hasattr(sv, "source_expressions") and sv.source_expressions:
expr = sv.source_expressions[0]
if hasattr(expr, "value"):
values.append(str(expr.value))
return values
def test_update_checkouts_search_vector(checkout_with_item):
# given
checkout = checkout_with_item
assert not checkout.search_vector
# when
update_checkouts_search_vector([checkout])
# then
checkout.refresh_from_db()
assert checkout.search_vector
assert checkout.search_index_dirty is False
def test_update_checkouts_search_vector_multiple_checkouts(
checkout_with_item, checkout
):
# given
assert not checkout_with_item.search_vector
assert not checkout.search_vector
# when
update_checkouts_search_vector([checkout_with_item, checkout])
# then
checkout_with_item.refresh_from_db()
checkout.refresh_from_db()
assert checkout_with_item.search_vector
assert checkout_with_item.search_index_dirty is False
assert checkout.search_vector
assert checkout.search_index_dirty is False
def test_update_checkouts_search_vector_empty_list(db):
# given
checkouts = []
# when/then - should not raise any errors
update_checkouts_search_vector(checkouts)
def test_update_checkouts_search_vector_constant_queries(
checkout_list_with_relations, django_assert_num_queries
):
"""Ensure that data loaders are working correctly and number of db queries is constant."""
# given
checkout_list = checkout_list_with_relations
# when & then
# Expected query breakdown (14 total):
# First transaction block:
# 1. Select for update (filter dirty checkouts)
# 2. Update search_index_dirty flag
# Load checkout data:
# 3. Load users (1 query)
# 4. Load addresses (1 query)
# 5. Load payments (1 query)
# 6. Load checkout lines (1 query)
# 7. Load transactions (1 query)
# 8. Load product variants (1 query)
# 9. Load products (1 query)
# 10. Load transaction events (1 query)
# Second transaction block:
# 11. Transaction savepoint
# 12. Select for update (lock checkouts)
# 13. Bulk update search vectors
# 14. Release savepoint
expected_queries = 14
with django_assert_num_queries(expected_queries):
update_checkouts_search_vector(checkout_list[: len(checkout_list) - 1])
with django_assert_num_queries(expected_queries):
update_checkouts_search_vector(checkout_list)
def test_prepare_checkout_search_vector_value_basic(checkout):
# given
checkout.email = "test@example.com"
checkout.save(update_fields=["email"])
data = CheckoutData(
user=None,
billing_address=None,
shipping_address=None,
payments=[],
lines=[],
transactions=[],
)
# when
search_vectors = prepare_checkout_search_vector_value(checkout, data)
# then
search_vector_values = _extract_search_vector_values(search_vectors)
assert str(checkout.token) in search_vector_values
assert checkout.email in search_vector_values
def test_prepare_checkout_search_vector_value_with_user(checkout, customer_user):
# given
checkout.user = customer_user
checkout.email = "test@example.com"
checkout.save(update_fields=["user", "email"])
data = CheckoutData(
user=customer_user,
billing_address=None,
shipping_address=None,
payments=[],
lines=[],
transactions=[],
)
# when
search_vectors = prepare_checkout_search_vector_value(checkout, data)
# then
search_vector_values = _extract_search_vector_values(search_vectors)
assert str(checkout.token) in search_vector_values
assert checkout.email in search_vector_values
assert customer_user.email in search_vector_values
assert customer_user.first_name in search_vector_values
assert customer_user.last_name in search_vector_values
def test_prepare_checkout_search_vector_value_with_addresses(
checkout, address, address_usa
):
# given
checkout.billing_address = address
checkout.shipping_address = address_usa
checkout.save(update_fields=["billing_address", "shipping_address"])
data = CheckoutData(
user=None,
billing_address=address,
shipping_address=address_usa,
payments=[],
lines=[],
transactions=[],
)
# when
search_vectors = prepare_checkout_search_vector_value(checkout, data)
# then
search_vector_values = _extract_search_vector_values(search_vectors)
assert str(checkout.token) in search_vector_values
# Check billing address data is included
assert address.city in search_vector_values
# Check shipping address data is included
assert address_usa.city in search_vector_values
def test_prepare_checkout_search_vector_value_with_no_email(checkout):
# given
checkout.email = None
checkout.save(update_fields=["email"])
data = CheckoutData(
user=None,
billing_address=None,
shipping_address=None,
payments=[],
lines=[],
transactions=[],
)
# when
search_vectors = prepare_checkout_search_vector_value(checkout, data)
# then
assert len(search_vectors) == 1
search_vector_values = _extract_search_vector_values(search_vectors)
assert str(checkout.token) in search_vector_values
def test_generate_checkout_payments_search_vector_value_empty():
# given
payments = []
# when
result = generate_checkout_payments_search_vector_value(payments)
# then
assert result == []
def test_generate_checkout_payments_search_vector_value(checkout):
# given
psp_ref_1 = "PSP-REF-123"
psp_ref_2 = "PSP-REF-456"
payment1 = Payment.objects.create(
gateway="mirumee.payments.dummy",
is_active=True,
checkout=checkout,
total=Decimal("10.00"),
currency="USD",
psp_reference=psp_ref_1,
)
payment2 = Payment.objects.create(
gateway="mirumee.payments.dummy",
is_active=False,
checkout=checkout,
total=Decimal("20.00"),
currency="USD",
psp_reference=psp_ref_2,
)
payments = [payment1, payment2]
# when
result = generate_checkout_payments_search_vector_value(payments)
# then
assert len(result) == 4
search_vector_values = _extract_search_vector_values(result)
assert psp_ref_1 in search_vector_values
assert (
str(graphene.Node.to_global_id("Payment", payment1.id)) in search_vector_values
)
assert psp_ref_2 in search_vector_values
assert (
str(graphene.Node.to_global_id("Payment", payment2.id)) in search_vector_values
)
def test_generate_checkout_payments_search_vector_value_respects_max_limit(
checkout, settings
):
# given
limit = 5
settings.CHECKOUT_MAX_INDEXED_PAYMENTS = limit
payments = []
for i in range(limit + 50):
payments.append(
Payment(
gateway="mirumee.payments.dummy",
is_active=True,
checkout=checkout,
total=Decimal("10.00"),
currency="USD",
psp_reference=f"PSP-REF-{i}",
)
)
Payment.objects.bulk_create(payments)
# when
result = generate_checkout_payments_search_vector_value(payments)
# then
assert len(result) == limit * 2 # IDs + psp_references
def test_generate_checkout_lines_search_vector_value_empty():
# given
lines = []
# when
result = generate_checkout_lines_search_vector_value(lines)
# then
assert result == []
def test_generate_checkout_lines_search_vector_value(checkout_with_item, product):
# given
sku = "TEST-SKU-123"
line = checkout_with_item.lines.first()
variant = line.variant
variant.sku = sku
variant.save(update_fields=["sku"])
line_data = CheckoutLineData(
line=line,
variant=variant,
product=product,
)
# when
result = generate_checkout_lines_search_vector_value([line_data])
# then
assert result
search_vector_values = _extract_search_vector_values(result)
assert sku in search_vector_values
assert product.name in search_vector_values
def test_generate_checkout_lines_search_vector_value_without_variant(
checkout_with_item,
):
# given
line = checkout_with_item.lines.first()
line_data = CheckoutLineData(
line=line,
variant=None,
product=None,
)
# when
result = generate_checkout_lines_search_vector_value([line_data])
# then
assert result == []
def test_generate_checkout_lines_search_vector_value_without_sku(
checkout_with_item, product
):
# given
line = checkout_with_item.lines.first()
variant = line.variant
variant.sku = None
variant.save()
line_data = CheckoutLineData(
line=line,
variant=variant,
product=product,
)
# when
result = generate_checkout_lines_search_vector_value([line_data])
# then
assert len(result) == 1
search_vector_values = _extract_search_vector_values(result)
assert product.name in search_vector_values
# Variant name may be empty, only check if it's not empty
if variant.name:
assert variant.name in search_vector_values
def test_generate_checkout_lines_search_vector_value_respects_max_limit(
checkout, product, settings
):
# given
variant = product.variants.first()
variant.name = "variant name"
variant.sku = "variant-sku-001"
variant.save(update_fields=["name", "sku"])
product.name = "product name"
product.save(update_fields=["name"])
limit = 10
settings.CHECKOUT_MAX_INDEXED_LINES = limit
lines_data = []
for _ in range(limit + 50):
line_data = CheckoutLineData(
line=checkout.lines.first(), # Use actual line instead of None
variant=variant,
product=product,
)
lines_data.append(line_data)
# when
result = generate_checkout_lines_search_vector_value(lines_data)
# then
# Should respect limit (10)
# Each line can have up to 3 vectors (SKU, product name, variant name)
assert len(result) == limit * 3
def test_generate_checkout_transactions_search_vector_value_empty():
# given
transactions = []
# when
result = generate_checkout_transactions_search_vector_value(transactions)
# then
assert result == []
def test_generate_checkout_transactions_search_vector_value(checkout):
# given
transaction_psp_ref = "PSP-TRANS-123"
event_psp_ref = "EVENT-PSP-123"
transaction = TransactionItem.objects.create(
name="Credit card",
psp_reference=transaction_psp_ref,
available_actions=["refund"],
currency="USD",
checkout_id=checkout.pk,
charged_value=Decimal(10),
)
event = TransactionEvent.objects.create(
psp_reference=event_psp_ref,
currency="USD",
transaction=transaction,
amount_value=Decimal(10),
)
transaction_data = TransactionData(
transaction=transaction,
events=[event],
)
# when
result = generate_checkout_transactions_search_vector_value([transaction_data])
# then
assert len(result) == 3
search_vector_values = _extract_search_vector_values(result)
assert transaction_psp_ref in search_vector_values
assert event_psp_ref in search_vector_values
def test_generate_checkout_transactions_search_vector_value_without_psp_reference(
checkout,
):
# given
transaction = TransactionItem.objects.create(
name="Credit card",
psp_reference=None,
available_actions=["refund"],
currency="USD",
checkout_id=checkout.pk,
charged_value=Decimal(10),
)
transaction_data = TransactionData(
transaction=transaction,
events=[],
)
# when
result = generate_checkout_transactions_search_vector_value([transaction_data])
# then
# Should include only transaction global ID
assert len(result) == 1
def test_generate_checkout_transactions_search_vector_value_with_multiple_events(
checkout,
):
# given
transaction_psp_ref = "PSP-TRANS-123"
event1_psp_ref = "EVENT-PSP-1"
event2_psp_ref = "EVENT-PSP-2"
transaction = TransactionItem.objects.create(
name="Credit card",
psp_reference=transaction_psp_ref,
available_actions=["refund"],
currency="USD",
checkout_id=checkout.pk,
charged_value=Decimal(10),
)
event1 = TransactionEvent.objects.create(
psp_reference=event1_psp_ref,
currency="USD",
transaction=transaction,
amount_value=Decimal(10),
)
event2 = TransactionEvent.objects.create(
psp_reference=event2_psp_ref,
currency="USD",
transaction=transaction,
amount_value=Decimal(10),
)
event3 = TransactionEvent.objects.create(
psp_reference=None,
currency="USD",
transaction=transaction,
amount_value=Decimal(10),
)
transaction_data = TransactionData(
transaction=transaction,
events=[event1, event2, event3],
)
# when
result = generate_checkout_transactions_search_vector_value([transaction_data])
# then
assert len(result) == 4
search_vector_values = _extract_search_vector_values(result)
assert transaction_psp_ref in search_vector_values
assert event1_psp_ref in search_vector_values
assert event2_psp_ref in search_vector_values
def test_generate_checkout_transactions_search_vector_value_respects_max_limit(
checkout, settings
):
# given
limit = 5
settings.CHECKOUT_MAX_INDEXED_TRANSACTIONS = 5
transactions_data = []
for i in range(limit + 2):
transaction = TransactionItem(
name="Credit card",
psp_reference=f"PSP-TRANS-{i}",
available_actions=["refund"],
currency="USD",
checkout_id=checkout.pk,
charged_value=Decimal(10),
)
transactions_data.append(
TransactionData(
transaction=transaction,
events=[],
)
)
# when
result = generate_checkout_transactions_search_vector_value(transactions_data)
# then
# Should respect transaction limit (5) - 5 IDs + 5 psp_references = 10
assert len(result) == limit * 2
def test_update_checkouts_search_vector_handles_deleted_checkout(
checkout_with_item, checkout_JPY
):
# given
assert not checkout_with_item.search_vector
assert not checkout_JPY.search_vector
checkouts = [checkout_with_item, checkout_JPY]
def delete_checkout(*args, **kwargs):
Checkout.objects.filter(pk=checkout_JPY.pk).delete()
# when
with race_condition.RunAfter(
"saleor.checkout.search.indexing.load_checkout_data", delete_checkout
):
update_checkouts_search_vector(checkouts)
# then
checkout_with_item.refresh_from_db()
assert checkout_with_item.search_vector
assert checkout_with_item.search_index_dirty is False
assert not Checkout.objects.filter(pk=checkout_JPY.pk).exists()
def test_update_checkouts_search_vector_handles_deleted_checkout_before_lock(
checkout_with_item, checkout_JPY
):
# given
assert not checkout_with_item.search_vector
assert not checkout_JPY.search_vector
checkouts = [checkout_with_item, checkout_JPY]
def delete_checkout(*args, **kwargs):
Checkout.objects.filter(pk=checkout_JPY.pk).delete()
# when
with race_condition.RunBefore(
"saleor.checkout.search.indexing.checkout_qs_select_for_update", delete_checkout
):
update_checkouts_search_vector(checkouts)
# then
checkout_with_item.refresh_from_db()
assert checkout_with_item.search_vector
assert checkout_with_item.search_index_dirty is False
assert not Checkout.objects.filter(pk=checkout_JPY.pk).exists()
def test_update_checkouts_search_vector_resets_flag_on_prepare_exception(
checkout_with_item,
):
# given
checkout = checkout_with_item
checkout.search_index_dirty = True
checkout.save(update_fields=["search_index_dirty"])
assert not checkout.search_vector
# when
with patch(
"saleor.checkout.search.indexing.prepare_checkout_search_vector_value",
side_effect=ValueError("Test error"),
):
with pytest.raises(ValueError, match="Test error"):
update_checkouts_search_vector([checkout])
# then
checkout.refresh_from_db()
assert checkout.search_index_dirty is True
assert not checkout.search_vector
def test_update_checkouts_search_vector_resets_flag_on_load_data_exception(
checkout_with_item,
):
# given
checkout = checkout_with_item
checkout.search_index_dirty = True
checkout.save(update_fields=["search_index_dirty"])
assert not checkout.search_vector
# when
with patch(
"saleor.checkout.search.indexing.load_checkout_data",
side_effect=RuntimeError("Database error"),
):
with pytest.raises(RuntimeError, match="Database error"):
update_checkouts_search_vector([checkout])
# then
checkout.refresh_from_db()
assert checkout.search_index_dirty is True
assert not checkout.search_vector
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/checkout/tests/test_search_indexing.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 580,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/checkout/tests/test_search_loaders.py | from decimal import Decimal
from ...payment.models import Payment, TransactionItem
from ..search.loaders import load_checkout_data
def test_load_checkout_data(checkout_with_item, customer_user, address, address_usa):
# given
checkout_with_item.user = customer_user
checkout_with_item.billing_address = address
checkout_with_item.shipping_address = address_usa
checkout_with_item.save()
Payment.objects.create(
gateway="mirumee.payments.dummy",
is_active=True,
checkout=checkout_with_item,
total=Decimal("10.00"),
currency="USD",
)
TransactionItem.objects.create(
name="Credit card",
psp_reference="PSP-123",
available_actions=["refund"],
currency="USD",
checkout_id=checkout_with_item.pk,
charged_value=Decimal(10),
)
# when
result = load_checkout_data([checkout_with_item])
# then
assert checkout_with_item.pk in result
checkout_data = result[checkout_with_item.pk]
assert checkout_data.user == customer_user
assert checkout_data.billing_address == address
assert checkout_data.shipping_address == address_usa
assert len(checkout_data.payments) == 1
assert len(checkout_data.lines) == checkout_with_item.lines.count()
assert len(checkout_data.transactions) == 1
def test_load_checkout_data_empty_list():
# given
checkouts = []
# when
result = load_checkout_data(checkouts)
# then
assert result == {}
def test_load_checkout_data_with_no_relations(checkout):
# given
checkout.user = None
checkout.billing_address = None
checkout.shipping_address = None
checkout.save()
# when
result = load_checkout_data([checkout])
# then
assert checkout.pk in result
checkout_data = result[checkout.pk]
assert checkout_data.user is None
assert checkout_data.billing_address is None
assert checkout_data.shipping_address is None
assert checkout_data.payments == []
assert checkout_data.lines == []
assert checkout_data.transactions == []
def test_load_checkout_data_multiple_checkouts(checkout_with_item, checkout_JPY):
# given
checkouts = [checkout_with_item, checkout_JPY]
# when
result = load_checkout_data(checkouts)
# then
assert len(result) == 2
assert checkout_with_item.pk in result
assert checkout_JPY.pk in result
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/checkout/tests/test_search_loaders.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 68,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/graphql/payment/tests/queries/test_transactions.py | import pytest
from .....order.models import Order
from ....tests.utils import assert_no_permission, get_graphql_content
@pytest.fixture
def transactions_in_different_channels(
order_list,
checkout,
channel_USD,
channel_JPY,
channel_PLN,
transaction_item_generator,
):
order_list[0].channel = channel_PLN
order_list[1].channel = channel_JPY
order_list[2].channel = channel_USD
Order.objects.bulk_update(order_list, ["channel"])
checkout.channel = channel_USD
checkout.save(update_fields=["channel"])
transaction_1 = transaction_item_generator(
order_id=order_list[0].pk,
psp_reference="PSP-PLN",
currency="PLN",
)
transaction_2 = transaction_item_generator(
order_id=order_list[1].pk,
psp_reference="PSP-JPY",
currency="JPY",
)
transaction_3 = transaction_item_generator(
order_id=order_list[2].pk,
psp_reference="PSP-USD",
currency="USD",
)
transaction_4 = transaction_item_generator(
checkout_id=checkout.pk,
psp_reference="PSP-CHECKOUT-USD",
currency="USD",
)
return [transaction_1, transaction_2, transaction_3, transaction_4]
TRANSACTIONS_QUERY = """
query Transactions($where: TransactionWhereInput){
transactions(first: 10, where: $where) {
edges {
node {
id
pspReference
}
}
}
}
"""
def test_transactions_query_no_permission(
staff_api_client, transactions_in_different_channels
):
# given
variables = {}
# when
response = staff_api_client.post_graphql(TRANSACTIONS_QUERY, variables)
# then
assert_no_permission(response)
def test_transactions_query_by_app_with_manage_orders_returns_all(
transactions_in_different_channels,
app_api_client,
permission_manage_orders,
):
# given
# App with MANAGE_ORDERS permission should see all transactions
app_api_client.app.permissions.add(permission_manage_orders)
variables = {}
# when
response = app_api_client.post_graphql(TRANSACTIONS_QUERY, variables)
# then
content = get_graphql_content(response)
transactions = content["data"]["transactions"]["edges"]
assert len(transactions) == len(transactions_in_different_channels)
def test_transactions_query_by_app_with_handle_payments_returns_only_own(
order_with_lines,
app_api_client,
permission_manage_payments,
transaction_item_generator,
):
# given
# App with just HANDLE_PAYMENTS should only see transactions it created
app = app_api_client.app
app.permissions.add(permission_manage_payments)
# Transaction created by this app
own_transaction = transaction_item_generator(
order_id=order_with_lines.pk,
psp_reference="PSP-OWN",
currency="USD",
app=app,
)
# Transaction created by another app (or no app)
transaction_item_generator(
order_id=order_with_lines.pk,
psp_reference="PSP-OTHER",
currency="USD",
app=None,
)
variables = {}
# when
response = app_api_client.post_graphql(TRANSACTIONS_QUERY, variables)
# then
content = get_graphql_content(response)
transactions = content["data"]["transactions"]["edges"]
assert len(transactions) == 1
assert transactions[0]["node"]["pspReference"] == own_transaction.psp_reference
def test_transactions_query_by_app_with_handle_payments_no_own_transactions(
transactions_in_different_channels,
app_api_client,
permission_manage_payments,
):
# given
# App with just HANDLE_PAYMENTS sees nothing if it didn't create any transactions
app_api_client.app.permissions.add(permission_manage_payments)
variables = {}
# when
response = app_api_client.post_graphql(TRANSACTIONS_QUERY, variables)
# then
content = get_graphql_content(response)
transactions = content["data"]["transactions"]["edges"]
# None of the transactions_in_different_channels were created by this app
assert len(transactions) == 0
def test_transactions_query_by_app_with_both_permissions_returns_all(
transactions_in_different_channels,
app_api_client,
permission_manage_payments,
permission_manage_orders,
):
# given
# App with both MANAGE_ORDERS and HANDLE_PAYMENTS should see all transactions
app_api_client.app.permissions.add(permission_manage_payments)
app_api_client.app.permissions.add(permission_manage_orders)
variables = {}
# when
response = app_api_client.post_graphql(TRANSACTIONS_QUERY, variables)
# then
content = get_graphql_content(response)
transactions = content["data"]["transactions"]["edges"]
assert len(transactions) == len(transactions_in_different_channels)
def test_transactions_query_with_manage_orders_permission(
transactions_in_different_channels,
staff_api_client,
permission_group_manage_orders,
):
# given
staff_api_client.user.groups.add(permission_group_manage_orders)
variables = {}
# when
response = staff_api_client.post_graphql(TRANSACTIONS_QUERY, variables)
# then
content = get_graphql_content(response)
transactions = content["data"]["transactions"]["edges"]
assert len(transactions) == len(transactions_in_different_channels)
def test_transactions_query_filtered_by_accessible_channels_for_user(
transactions_in_different_channels,
staff_api_client,
permission_group_manage_orders,
channel_USD,
):
# given
permission_group_manage_orders.restricted_access_to_channels = True
permission_group_manage_orders.save(update_fields=["restricted_access_to_channels"])
permission_group_manage_orders.channels.add(channel_USD)
permission_group_manage_orders.user_set.add(staff_api_client.user)
variables = {}
# when
response = staff_api_client.post_graphql(TRANSACTIONS_QUERY, variables)
# then
content = get_graphql_content(response)
transactions = content["data"]["transactions"]["edges"]
# Should only see transactions from channel_USD
assert len(transactions) == 2
assert {node["node"]["pspReference"] for node in transactions} == {
"PSP-USD",
"PSP-CHECKOUT-USD",
}
def test_transactions_query_by_user_with_no_channel_access(
transactions_in_different_channels,
staff_api_client,
permission_group_manage_orders,
other_channel_USD,
):
# given
permission_group_manage_orders.channels.set([other_channel_USD])
permission_group_manage_orders.restricted_access_to_channels = True
permission_group_manage_orders.save(update_fields=["restricted_access_to_channels"])
permission_group_manage_orders.user_set.add(staff_api_client.user)
variables = {}
# when
response = staff_api_client.post_graphql(TRANSACTIONS_QUERY, variables)
# then
content = get_graphql_content(response)
transactions = content["data"]["transactions"]["edges"]
assert len(transactions) == 0
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/payment/tests/queries/test_transactions.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 195,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/graphql/payment/tests/queries/test_transactions_where.py | import graphene
import pytest
from .....payment.models import TransactionItem
from ....tests.utils import get_graphql_content
TRANSACTIONS_QUERY = """
query Transactions($where: TransactionWhereInput){
transactions(first: 10, where: $where) {
edges {
node {
id
pspReference
}
}
}
}
"""
def test_transactions_query_filter_by_ids(
staff_api_client,
permission_group_manage_orders,
order_with_lines,
transaction_item_generator,
):
# given
staff_api_client.user.groups.add(permission_group_manage_orders)
transaction_1 = transaction_item_generator(
order_id=order_with_lines.pk,
psp_reference="PSP ref1",
currency="USD",
)
transaction_2 = transaction_item_generator(
order_id=order_with_lines.pk,
psp_reference="PSP ref2",
currency="USD",
)
_transaction_3 = transaction_item_generator(
order_id=order_with_lines.pk,
psp_reference="PSP ref3",
currency="USD",
)
ids = [
graphene.Node.to_global_id("TransactionItem", transaction_1.token),
graphene.Node.to_global_id("TransactionItem", transaction_2.token),
]
variables = {"where": {"ids": ids}}
# when
response = staff_api_client.post_graphql(TRANSACTIONS_QUERY, variables)
# then
content = get_graphql_content(response)
transactions = content["data"]["transactions"]["edges"]
assert len(transactions) == 2
returned_ids = {node["node"]["id"] for node in transactions}
assert returned_ids == set(ids)
@pytest.mark.parametrize("value", [None, []])
def test_transactions_query_filter_by_ids_empty_values(
value, staff_api_client, permission_manage_payments, transaction_item_created_by_app
):
# given
variables = {"where": {"ids": value}}
# when
response = staff_api_client.post_graphql(
TRANSACTIONS_QUERY, variables, permissions=(permission_manage_payments,)
)
# then
content = get_graphql_content(response)
transactions = content["data"]["transactions"]["edges"]
assert len(transactions) == 0
@pytest.mark.parametrize(
("where", "expected_psp_refs"),
[
({"eq": "PSP ref1"}, ["PSP ref1"]),
({"eq": "Non-existing"}, []),
({"eq": None}, []),
({"eq": ""}, []),
({"oneOf": ["PSP ref1", "PSP ref2"]}, ["PSP ref1", "PSP ref2"]),
({"oneOf": ["PSP ref1"]}, ["PSP ref1"]),
({"oneOf": ["Non-existing 1", "Non-existing 2"]}, []),
({"oneOf": []}, []),
(None, []),
],
)
def test_transactions_query_filter_by_psp_reference(
where,
expected_psp_refs,
staff_api_client,
permission_group_manage_orders,
order_with_lines,
transaction_item_generator,
):
# given
staff_api_client.user.groups.add(permission_group_manage_orders)
permission_group_manage_orders.channels.add(order_with_lines.channel)
transaction_item_generator(
order_id=order_with_lines.pk,
psp_reference="PSP ref1",
currency="USD",
)
transaction_item_generator(
order_id=order_with_lines.pk,
psp_reference="PSP ref2",
currency="USD",
)
transaction_item_generator(
order_id=order_with_lines.pk,
psp_reference="PSP ref3",
currency="USD",
)
variables = {"where": {"pspReference": where}}
# when
response = staff_api_client.post_graphql(TRANSACTIONS_QUERY, variables)
# then
content = get_graphql_content(response)
transactions = content["data"]["transactions"]["edges"]
assert len(transactions) == len(expected_psp_refs)
returned_psp_refs = {node["node"]["pspReference"] for node in transactions}
assert returned_psp_refs == set(expected_psp_refs)
def test_transactions_query_combined_filters(
staff_api_client,
permission_group_manage_orders,
order_with_lines,
transaction_item_generator,
):
# given
staff_api_client.user.groups.add(permission_group_manage_orders)
transaction_1 = transaction_item_generator(
order_id=order_with_lines.pk,
psp_reference="PSP-TARGET",
currency="USD",
)
transaction_item_generator(
order_id=order_with_lines.pk,
psp_reference="PSP-OTHER",
currency="USD",
)
ids = [graphene.Node.to_global_id("TransactionItem", transaction_1.token)]
variables = {
"where": {
"AND": [
{"ids": ids},
{"pspReference": {"eq": transaction_1.psp_reference}},
]
}
}
# when
response = staff_api_client.post_graphql(TRANSACTIONS_QUERY, variables)
# then
content = get_graphql_content(response)
transactions = content["data"]["transactions"]["edges"]
assert len(transactions) == 1
assert transactions[0]["node"]["pspReference"] == transaction_1.psp_reference
@pytest.mark.parametrize(
("where", "expected_app_identifiers"),
[
({"eq": "app.identifier.1"}, ["app.identifier.1"]),
({"eq": "Non-existing"}, []),
({"eq": None}, []),
({"eq": ""}, []),
(
{"oneOf": ["app.identifier.1", "app.identifier.2"]},
["app.identifier.1", "app.identifier.2"],
),
({"oneOf": ["app.identifier.1"]}, ["app.identifier.1"]),
({"oneOf": ["Non-existing 1", "Non-existing 2"]}, []),
({"oneOf": []}, []),
(None, []),
],
)
def test_transactions_query_filter_by_app_identifier(
where,
expected_app_identifiers,
staff_api_client,
permission_group_manage_orders,
order_with_lines,
transaction_item_generator,
):
# given
staff_api_client.user.groups.add(permission_group_manage_orders)
app_identifier_1 = "app.identifier.1"
app_identifier_2 = "app.identifier.2"
app_identifier_3 = "app.identifier.3"
transaction_1 = transaction_item_generator(
order_id=order_with_lines.pk,
psp_reference="PSP ref1",
currency="USD",
)
transaction_2 = transaction_item_generator(
order_id=order_with_lines.pk,
psp_reference="PSP ref2",
currency="USD",
)
transaction_3 = transaction_item_generator(
order_id=order_with_lines.pk,
psp_reference="PSP ref3",
currency="USD",
)
transaction_1.app_identifier = app_identifier_1
transaction_2.app_identifier = app_identifier_2
transaction_3.app_identifier = app_identifier_3
TransactionItem.objects.bulk_update(
[transaction_1, transaction_2, transaction_3], ["app_identifier"]
)
variables = {"where": {"appIdentifier": where}}
# when
response = staff_api_client.post_graphql(TRANSACTIONS_QUERY, variables)
# then
content = get_graphql_content(response)
transactions = content["data"]["transactions"]["edges"]
assert len(transactions) == len(expected_app_identifiers)
def test_transactions_query_filter_by_app_identifier_combined_with_psp_reference(
staff_api_client,
permission_group_manage_orders,
order_with_lines,
transaction_item_generator,
):
# given
staff_api_client.user.groups.add(permission_group_manage_orders)
target_app_identifier = "my.app.identifier"
other_app_identifier = "other.app.identifier"
target_psp_reference = "PSP-TARGET"
other_psp_reference = "PSP-OTHER"
target_transaction = transaction_item_generator(
order_id=order_with_lines.pk,
psp_reference=target_psp_reference,
currency="USD",
)
transaction_2 = transaction_item_generator(
order_id=order_with_lines.pk,
psp_reference=other_psp_reference,
currency="USD",
)
transaction_3 = transaction_item_generator(
order_id=order_with_lines.pk,
psp_reference=target_psp_reference,
currency="USD",
)
target_transaction.app_identifier = target_app_identifier
transaction_2.app_identifier = target_app_identifier
transaction_3.app_identifier = other_app_identifier
TransactionItem.objects.bulk_update(
[target_transaction, transaction_2, transaction_3], ["app_identifier"]
)
variables = {
"where": {
"AND": [
{"appIdentifier": {"eq": target_app_identifier}},
{"pspReference": {"eq": target_psp_reference}},
]
}
}
# when
response = staff_api_client.post_graphql(TRANSACTIONS_QUERY, variables)
# then
content = get_graphql_content(response)
transactions = content["data"]["transactions"]["edges"]
assert len(transactions) == 1
assert transactions[0]["node"]["pspReference"] == target_transaction.psp_reference
def test_transactions_query_filter_respects_app_permissions(
app_api_client,
permission_manage_payments,
order_with_lines,
transaction_item_generator,
external_app,
):
# given
app = app_api_client.app
app.permissions.add(permission_manage_payments)
external_app.permissions.add(permission_manage_payments)
app_1_identifier = app.identifier
app_2_identifier = external_app.identifier
# Create transactions for App1
transaction_app1_1 = transaction_item_generator(
order_id=order_with_lines.pk,
psp_reference="APP1-PSP-REF-1",
currency="USD",
)
transaction_app1_2 = transaction_item_generator(
order_id=order_with_lines.pk,
psp_reference="APP1-PSP-REF-2",
currency="USD",
)
# Create transactions for App2
transaction_app2_1 = transaction_item_generator(
order_id=order_with_lines.pk,
psp_reference="APP2-PSP-REF-1",
currency="USD",
)
transaction_app2_2 = transaction_item_generator(
order_id=order_with_lines.pk,
psp_reference="APP2-PSP-REF-2",
currency="USD",
)
transaction_app1_1.app_identifier = app_1_identifier
transaction_app1_2.app_identifier = app_1_identifier
transaction_app2_1.app_identifier = app_2_identifier
transaction_app2_2.app_identifier = app_2_identifier
TransactionItem.objects.bulk_update(
[
transaction_app1_1,
transaction_app1_2,
transaction_app2_1,
transaction_app2_2,
],
["app_identifier"],
)
# Filter by App2 pspReference using App1 credentials
variables = {"where": {"pspReference": {"eq": transaction_app2_1.psp_reference}}}
# when
response = app_api_client.post_graphql(TRANSACTIONS_QUERY, variables)
# then
content = get_graphql_content(response)
transactions = content["data"]["transactions"]["edges"]
assert len(transactions) == 0
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/payment/tests/queries/test_transactions_where.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 311,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/product/deprecations.py | import functools
import warnings
from saleor.core.deprecations import SaleorDeprecationWarning
DEPRECATION_WARNING_MESSAGE = (
"Support for Digital Content is deprecated and will be removed in Saleor v3.23.0. "
"This functionality is legacy and undocumented, and is not part of the supported "
"API. Users should not rely on this behavior."
)
def deprecated_digital_content(func):
@functools.wraps(func)
def _inner(*args, **kwargs):
warnings.warn(
message=DEPRECATION_WARNING_MESSAGE,
category=SaleorDeprecationWarning,
stacklevel=1,
)
return func(*args, **kwargs)
return _inner
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/product/deprecations.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 18,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
saleor/saleor:saleor/graphql/core/federation/tests/test_resolvers.py | from typing import Any
import graphene
import pytest
QUERY = """
query Federation ($representations: [_Any!]!) {
_entities(representations: $representations) {
... on Order { number }
}
}
"""
# NOTE: happy path for the 'id' field is in ./test_schema.py
# (test_resolve_entity_should_return_object_when_found)
@pytest.mark.parametrize(
("_case", "fields", "expected_error_msg"),
[
(
"Error when the Graphene Type mismatches",
{"id": graphene.Node.to_global_id("User", 1)},
("Invalid ID: VXNlcjox. Expected: Order, received: User."),
),
(
"Error when ID isn't UUID",
{"id": graphene.Node.to_global_id("Order", 1)},
("['\u201c1\u201d is not a valid UUID.']"),
),
(
"Error when ID isn't a string",
{"id": 123},
("ID must be a string"),
),
(
"Error when ID null",
{"id": None},
("Missing required field: id"),
),
(
"Error when ID is blank",
{"id": ""},
("Missing required field: id"),
),
(
"Error when ID is missing",
{},
("Missing required field: id"),
),
(
"Error when ID isn't based64",
{"id": "foo"},
("Invalid ID: foo. Expected: Order."),
),
],
)
def test_resolve_federation_references(
_case,
fields: dict[str, Any],
expected_error_msg: str,
staff_api_client,
):
"""Ensure errors are handled when user passes invalid IDs."""
variables = {"representations": [{"__typename": "Order", **fields}]}
data = staff_api_client.post_graphql(QUERY, variables).json()
error_list = data["errors"]
assert len(error_list) == 1
assert data["errors"][0]["message"] == expected_error_msg
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/core/federation/tests/test_resolvers.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 64,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/graphql/core/federation/tests/test_schema.py | from typing import Any
import graphene
import pytest
from saleor.graphql.order.types import Order
VALID_QUERY = """
query Federation ($representations: [_Any!]!) {
_entities(representations: $representations) {
... on Order { number }
}
}
"""
# Query with 'Any' instead of 'Any!' - this must not be allowed, an object must be
# provided instead.
INVALID_QUERY_NULL_LIST_ITEM = """
query Federation ($representations: [_Any]!) {
_entities(representations: $representations) {
... on Order { number }
}
}
"""
# Query with '[Any!]' instead of '[Any!]!' - this must not be allowed, the
# 'representations' argument is required.
INVALID_QUERY_NULL_LIST = """
query Federation ($representations: [_Any!]) {
_entities(representations: $representations) {
... on Order { number }
}
}
"""
# Query without the 'representations' field is not allowed, it must provide it.
INVALID_QUERY_MISSING_INPUT_FIELD = """
query Federation {
_entities {
... on Order { number }
}
}
"""
@pytest.mark.parametrize(
("_case", "query", "variables", "expected_error_msg"),
[
(
"Error when missing 'representations' argument",
INVALID_QUERY_MISSING_INPUT_FIELD,
{},
('Argument "representations" of required type [_Any!]!" was not provided.'),
),
(
"Error when 'representations' is null",
INVALID_QUERY_NULL_LIST,
{"representations": None},
(
'Variable "representations" of type "[_Any!]" used in position '
'expecting type "[_Any!]!".'
),
),
(
"Error when an *item* in 'representations' is null",
INVALID_QUERY_NULL_LIST_ITEM,
{"representations": [None]},
(
'Variable "representations" of type "[_Any]!" used in position '
'expecting type "[_Any!]!".'
),
),
(
"Error when __typename is missing",
VALID_QUERY,
{"representations": [{"id": "foo"}]},
"Missing required field: __typename",
),
(
"Error when __typename value is not supported",
VALID_QUERY,
{"representations": [{"__typename": "Invalid!"}]},
"Invalid value or unsupported model for __typename",
),
(
"Error when __typename is incorrect type",
VALID_QUERY,
{"representations": [{"__typename": 1234}]},
"Invalid type for __typename: must be a string",
),
(
"Error when representations is incorrect type",
VALID_QUERY,
{"representations": [1]},
"Invalid type for 'representation' field: must be an object",
),
],
)
def test_resolve_entities_handles_errors_invalid_input(
_case,
query: str,
variables: dict[str, Any],
expected_error_msg: str,
staff_api_client,
):
"""Ensure invalid inputs are handled properly."""
data = staff_api_client.post_graphql(query, variables).json()
error_list = data["errors"]
assert len(error_list) == 1
error_dict = data["errors"][0]
assert error_dict["message"] == expected_error_msg
def test_resolve_entities_can_only_provide_fields(staff_api_client):
"""Ensure only GraphQL fields can be provided.
Should only be able to provide things like "id", "userEmail", ...
and not things like `__name__`.
"""
variables = {"representations": [{"__typename": "Order", "__name__": "foo"}]}
data = staff_api_client.post_graphql(VALID_QUERY, variables).json()
error_list = data["errors"]
assert len(error_list) == 1
error_dict = data["errors"][0]
assert error_dict["message"] == "Unknown field for Order: __name__"
# Sanity check: passing '__name__' to model should work
# WARNING: must NOT raise an error - this ensures that no one changes the
# underlying code of ``resolve_entities()`` to a ``getattr()``
# which would cause us to allow non-GraphQL fields
Order(__name__=1)
def test_resolve_entity_should_return_object_when_found(
staff_user,
staff_api_client,
order_unconfirmed,
permission_group_manage_orders,
):
"""Ensure a valid request returns the matching Order object."""
order = order_unconfirmed
permission_group_manage_orders.user_set.add(staff_user)
pk = graphene.Node.to_global_id("Order", order.pk)
variables = {"representations": [{"__typename": "Order", "id": pk}]}
data = staff_api_client.post_graphql(
VALID_QUERY,
variables,
).json()
assert "errors" not in data
assert data["data"] == {"_entities": [{"number": str(order.number)}]}
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/core/federation/tests/test_schema.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 136,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:.semgrep/correctness/celery/celery-migration-task-missing-dedicated-queue.py | from django.conf import settings
from saleor.celeryconf import app
from saleor.core.db.connection import allow_writer
# ruleid: celery-migration-task-missing-dedicated-queue
@app.task
def migration_task_without_queue_set():
pass
# ruleid: celery-migration-task-missing-dedicated-queue
@app.task
@allow_writer
def migration_task_without_queue_set_and_more_decorators():
pass
# ruleid: celery-migration-task-missing-dedicated-queue
@app.task()
def migration_task_with_no_args():
pass
# ruleid: celery-migration-task-missing-dedicated-queue
@app.task(bind=True)
def migration_task_with_arg(self):
pass
# ruleid: celery-migration-task-missing-dedicated-queue
@app.task(bind=True, retry_backoff=30, retry_kwargs={"max_retries": 5})
def migration_task_with_args_and_kwargs(
self, brand_data: dict, *, app_installation_id=None, app_id=None
):
pass
# ruleid: celery-migration-task-missing-dedicated-queue
@app.task(queue="some-queue")
def migration_task_with_queue_set_to_raw_value():
pass
# ruleid: celery-migration-task-missing-dedicated-queue
@app.task(bind=True, retry_backoff=30, queue="some-queue")
def migration_task_with_args_kwargs_and_queue_set_to_raw_value(self):
pass
# ok: celery-migration-task-missing-dedicated-queue
@app.task(queue=settings.DATA_MIGRATIONS_TASKS_QUEUE_NAME)
def migration_task_with_queue_set():
pass
# ok: celery-migration-task-missing-dedicated-queue
@app.task(queue=settings.DATA_MIGRATIONS_TASKS_QUEUE_NAME)
@allow_writer
def migration_task_with_queue_set_and_more_decorators():
pass
# ok: celery-migration-task-missing-dedicated-queue
@app.task(queue=settings.DATA_MIGRATIONS_TASKS_QUEUE_NAME, bind=True)
def migration_task_with_kwargs_and_queue_set_first(self):
pass
# ok: celery-migration-task-missing-dedicated-queue
@app.task(bind=True, queue=settings.DATA_MIGRATIONS_TASKS_QUEUE_NAME)
def migration_task_with_kwargs_and_queue_set_last(self):
pass
# ok: celery-migration-task-missing-dedicated-queue
@app.task(bind=True, queue=settings.DATA_MIGRATIONS_TASKS_QUEUE_NAME, retry_backoff=30)
def migration_task_with_kwargs_and_queue_set_in_the_middle(self):
pass
# ok: celery-migration-task-missing-dedicated-queue
@app.task(bind=True, queue=settings.DATA_MIGRATIONS_TASKS_QUEUE_NAME, retry_backoff=30)
def migration_task_with_multiline_kwargs_and_queue():
pass
| {
"repo_id": "saleor/saleor",
"file_path": ".semgrep/correctness/celery/celery-migration-task-missing-dedicated-queue.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 59,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
saleor/saleor:saleor/core/cleaners/html.py | import json
import os
import warnings
from copy import deepcopy
from dataclasses import dataclass, field
from typing import Self
import nh3
from ..deprecations import SaleorDeprecationWarning
@dataclass
class HtmlCleanerSettings:
allowed_schemes: set[str] = field(default_factory=set)
allowed_attributes: dict[str, set[str]] = field(
default_factory=lambda: deepcopy(nh3.ALLOWED_ATTRIBUTES)
)
# NOTE: nh3 doesn't expose the default values, however it's OK as
# the default value is empty https://github.com/rust-ammonia/ammonia/blob/6d803b5677006947da7d2f495dbae83090db4909/src/lib.rs#L447
allowed_attribute_values: dict[str, dict[str, set[str]]] = field(
default_factory=dict
)
# Configures the 'rel' attribute that will be added on links. `None` disables it.
#
# Recommended value: 'noopener noreferrer'
#
# - noopener: This prevents a particular type of XSS attack, and should usually be turned on for untrusted HTML.
# - noreferrer: This prevents the browser from sending the source URL to the website that is linked to.
# - nofollow: This prevents search engines from using this link for ranking, which disincentivizes spammers.
#
# Learn more: https://nh3.readthedocs.io/en/latest/#nh3.Cleaner
#
link_rel: str | None = "noopener noreferrer"
def reload(self) -> Self:
# NOTE: 'or None' is needed as blank string should be treated as None
self.link_rel = os.getenv("EDITOR_JS_LINK_REL") or None
if self.link_rel is None:
warnings.warn(
(
"EDITOR_JS_LINK_REL=None default will be removed in Saleor 3.23.0, "
'use EDITOR_JS_LINK_REL="noopener noreferrer" instead'
),
category=SaleorDeprecationWarning,
stacklevel=2,
)
if allowed_schemes_str := os.getenv("UNSAFE_EDITOR_JS_ALLOWED_URL_SCHEMES"):
# This is deprecated, each URL scheme must have a cleaner implemented
# we cannot continue to allow to add custom schemes without cleaners
# as this is risky.
warnings.warn(
(
"UNSAFE_EDITOR_JS_ALLOWED_URL_SCHEMES will be removed in Saleor 3.23.0, "
"open a feature request at https://github.com/saleor/saleor/issues "
"to add out of the box support for the URL scheme(s) you need"
),
category=SaleorDeprecationWarning,
stacklevel=3,
)
self.allowed_schemes = {x.strip() for x in allowed_schemes_str.split(",")}
if allowed_attributes_str := os.getenv("EDITOR_JS_ALLOWED_ATTRIBUTES"):
for html_tag, allowed_attributes in json.loads(
allowed_attributes_str
).items():
attr_list = self.allowed_attributes.setdefault(html_tag, set())
attr_list.update(allowed_attributes)
if allowed_values_str := os.getenv("EDITOR_JS_ALLOWED_ATTRIBUTE_VALUES"):
allowed_values: dict[str, dict[str, list[str]]] = json.loads(
allowed_values_str
)
# Converts raw values' list[str] to set[str] type as nh3 wants `set[str]`
for key, nested_dict in allowed_values.items():
self.allowed_attribute_values[key] = {
k: set(v) for k, v in nested_dict.items()
}
return self
@classmethod
def parse(cls) -> Self:
return cls().reload()
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/core/cleaners/html.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 75,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
saleor/saleor:saleor/core/cleaners/tests/test_urls.py | import pytest
from .. import clean_mailto, clean_tel
@pytest.mark.parametrize(
"input_url",
[
# All valid inputs (should be unchanged by the cleaner)
"tel:1234567890",
"tel:+1234567890",
"tel:+1-202-555-0123",
"tel:+1(202)555.0123",
"tel:*123#",
"tel:#21#",
"tel:+49(30)123456",
"tel:123;ext=456",
"tel:+1;phone-context=example.com",
"tel:+1;isub=12345",
"tel:+123%;ext=1",
"tel:1-2-3-4-5",
"tel:1...(2)...3",
"tel:((123))",
"tel:+1.-()",
"tel:",
"tel:+",
"tel:*",
"tel:#",
],
)
def test_clean_url_tel_scheme_valid(input_url: str):
"""Test valid `tel` URL inputs are unchanged (RFC 3966)."""
actual = clean_tel(input_url)
assert actual == input_url, "URL shouldn't have been changed"
@pytest.mark.parametrize(
("input_url", "expected_cleaned_url"),
[
# It must be 'tel:' not 'tel://'
("tel://", "tel:%2F%2F"),
('tel:+3300"<>', "tel:+3300%22%3C%3E"),
("tel:+3300\n", "tel:+3300%0A"),
# Ensures all parts/components are quoted
('tel:"\n#"\n;"\n', "tel:%22%0A#%22%0A;%22%0A"),
],
)
def test_clean_url_tel_scheme_invalid(input_url: str, expected_cleaned_url: str):
"""Test invalid characters in `tel` URLs are quoted."""
actual = clean_tel(input_url)
assert actual == expected_cleaned_url
@pytest.mark.parametrize(
("input_url", "changed_to"),
[
("mailto:", None),
("mailto:chris@[::1]", None), # IPv6 should work
(
# Should quote all parts
'mailto:"@example.com?"="',
"mailto:%22@example.com?%22=%22",
),
(
# Should quote all parts, including control characters
"mailto:\n@example.com?\n=\n",
"mailto:%0A@example.com?%0A=%0A",
),
# Based on https://www.rfc-editor.org/rfc/rfc6068#section-6
("mailto:chris@example.com", None),
("mailto:infobot@example.com?subject=current-issue", None),
("mailto:infobot@example.com?body=send%20current-issue", None),
(
"mailto:infobot@example.com?body=send%20current-issue%0D%0Asend%20index",
None,
),
("mailto:list@example.org?In-Reply-To=%3C3469A91.D10AF4C@example.com%3E", None),
("mailto:majordomo@example.com?body=subscribe%20bamboo-l", None),
("mailto:joe@example.com?cc=bob@example.com&body=hello", None),
("mailto:?to=addr1@an.example,addr2@an.example", None),
("mailto:addr1@an.example?to=addr2@an.example", None),
("mailto:%22not%40me%22@example.org", None),
("mailto:%22oh%5C%5Cno%22@example.org", None),
(
"mailto:%22%5C%5C%5C%22it's%5C%20ugly%5C%5C%5C%22%22@example.org",
# We don't allow single quotes (')
"mailto:%22%5C%5C%5C%22it%27s%5C%20ugly%5C%5C%5C%22%22@example.org",
),
("mailto:user@example.org?subject=caf%C3%A9", None),
("mailto:user@example.org?subject=%3D%3Futf-8%3FQ%3Fcaf%3DC3%3DA9%3F%3D", None),
("mailto:user@example.org?subject=%3D%3Fiso-8859-1%3FQ%3Fcaf%3DE9%3F%3D", None),
("mailto:user@example.org?subject=caf%C3%A9&body=caf%C3%A9", None),
(
"mailto:user@%E7%B4%8D%E8%B1%86.example.org?subject=Test&body=NATTO",
# We IDNA encode unicode characters
"mailto:user@xn--99zt52a.example.org?subject=Test&body=NATTO",
),
# Shouldn't be getting re-quoted when something was already quoted (%25)
("mailto:foo%25bar@example.com", None),
],
)
def test_clean_url_mailto_scheme_valid(input_url: str, changed_to: str | None):
"""Test valid `mailto` URL inputs are unchanged (RFC 6068)."""
changed_to = changed_to or input_url
actual = clean_mailto(input_url)
assert actual == changed_to
@pytest.mark.parametrize(
("_case", "input_url", "expected_error"),
[
(
"rejects mailto URIs that contain too many addresses",
"mailto:" + ",".join(["user@example.com"] * 11),
"Too many addresses in mailto URL",
),
(
"rejects invalid domains",
'mailto:foo@Exam"ple.com',
"Invalid characters found in hostname",
),
("rejects invalid IPv6", "mailto:chris@[not-an-ip]", "Invalid IPv6 address"),
],
)
def test_clean_url_mailto_scheme_invalid_urls(
_case: str, input_url: str, expected_error: str
):
with pytest.raises(ValueError, match=expected_error):
clean_mailto(input_url)
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/core/cleaners/tests/test_urls.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 120,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/core/cleaners/urls.py | import ipaddress
import urllib.parse
from typing import Protocol
import idna
MAILTO_SAFE_CHARS = ".-_+,"
class UrlCleaner(Protocol):
def __call__(self, dirty_url: str) -> str: ...
class URLCleanerError(ValueError):
"""Base exception class for all URL cleaner-related exceptions."""
class InvalidHostname(URLCleanerError):
"""Raised when an invalid domain or hostname is provided."""
class InvalidURL(URLCleanerError):
"""Raised when the URL syntax is wrong."""
class InvalidUsage(RuntimeError):
"""Raised when a cleaner function wasn't used properly, indicates a code bug."""
def normalize_host(dirty_domain: str) -> str:
"""Normalize a hostname/domain field."""
dirty_domain = dirty_domain.lower()
# If it's an IPv6 address, then try to parse it and return the normalized
# value from Python's ipaddress module
if dirty_domain.startswith("["):
dirty_domain = dirty_domain.strip("[]")
try:
clean_ip = str(ipaddress.IPv6Address(dirty_domain))
return f"[{clean_ip}]"
except ipaddress.AddressValueError as exc:
raise InvalidHostname("Invalid IPv6 address") from exc
# If it's not IPv6, then we use idna to validate & to normalize
try:
parts = [
idna.encode(
part,
strict=True,
std3_rules=True, # Follows RFC 1123 for the hostname
).decode("utf-8")
for part in dirty_domain.split(".")
]
except idna.IDNAError as exc:
raise InvalidHostname("Invalid characters found in hostname") from exc
return ".".join(parts)
def clean_tel(dirty_url: str) -> str:
"""Clean a 'tel' URL (e.g., 'tel:+3312345678')."""
scheme, _, dirty_tel = dirty_url.partition(":")
# This should never happen, as a safeguard we are raising
if scheme != "tel":
raise InvalidUsage(f"Expected url scheme to be 'tel', found {scheme} instead")
# Keeps special characters listed by RFC 3966 as is, and encodes anything else
# (such as dangerous characters like "<>)
cleaned_path = urllib.parse.quote(dirty_tel, safe="+-.()*#;=%")
return f"tel:{cleaned_path}"
def clean_mailto(
dirty_url: str,
*,
max_address_count: int = 10,
max_header_count: int = 10,
) -> str:
"""Clean a mailto URL based on RFC 6068 (leniently)."""
scheme, _, dirty_url = dirty_url.partition(":")
if scheme != "mailto":
raise InvalidUsage(
f"Expected url scheme to be 'mailto', found {scheme} instead"
)
# raw_dirty_addr_list is the address list before the '?' character, e.g., from:
# `foo@example.com?body=text`
raw_dirty_addr_list, _, raw_dirty_qs = dirty_url.partition("?")
# Retrieves the list of email addresses before the '?' delimiter (if any)
#
# Note: an empty list of addresses is allowed (per RFC 6068), i.e.,
# the user can do `mailto:?<headers>` instead of
# `mailto:foo@example.com?<headers>` - this is used for things like:
# `mailto:?To=foo@example.com,bar@example.com,...`
#
# Thus we shouldn't raise if we see an empty list.
if raw_dirty_addr_list:
dirty_addr_list = raw_dirty_addr_list.split(",", maxsplit=max_address_count + 1)
if len(dirty_addr_list) > max_address_count:
raise InvalidURL("Too many addresses in mailto URL")
else:
dirty_addr_list = []
cleaned_addresses = []
for dirty_addr in dirty_addr_list:
# Example: user@domain
dirty_local_part, _, dirty_domain = dirty_addr.partition("@")
if not all((dirty_local_part, dirty_domain)):
raise InvalidURL("Invalid email address")
dirty_local_part = urllib.parse.unquote(dirty_local_part)
clean_local_part = urllib.parse.quote(dirty_local_part, safe=MAILTO_SAFE_CHARS)
dirty_domain = urllib.parse.unquote(dirty_domain)
clean_domain = normalize_host(dirty_domain)
cleaned_addresses.append(f"{clean_local_part}@{clean_domain}")
cleaned_url = "mailto:"
if cleaned_addresses:
cleaned_url += ",".join(cleaned_addresses)
# Clean the email headers (query), the RFC specs are similar to HTTP URLs
# thus we can use urllib
if raw_dirty_qs:
headers_dict = urllib.parse.parse_qs(
raw_dirty_qs, max_num_fields=max_header_count
)
cleaned_raw_headers = urllib.parse.urlencode(
headers_dict,
doseq=True,
safe=f"{MAILTO_SAFE_CHARS}@",
quote_via=urllib.parse.quote,
)
cleaned_url += f"?{cleaned_raw_headers}"
return cleaned_url
URL_SCHEME_CLEANERS: dict[str, UrlCleaner] = {"mailto": clean_mailto, "tel": clean_tel}
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/core/cleaners/urls.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 108,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
saleor/saleor:saleor/core/management/commands/clean_editorjs_fields.py | import difflib
import json
import traceback
from django.core.exceptions import ValidationError
from django.core.management.base import BaseCommand, CommandError
from django.db import models
from ....attribute.models import AttributeValue, AttributeValueTranslation
from ....discount.models import (
Promotion,
PromotionRule,
PromotionRuleTranslation,
PromotionTranslation,
)
from ....page.models import Page, PageTranslation
from ....product.models import (
Category,
CategoryTranslation,
Collection,
CollectionTranslation,
Product,
ProductTranslation,
)
from ....shipping.models import ShippingMethod, ShippingMethodTranslation
from ...utils.editorjs import clean_editor_js
# ((<model class>, <field to clean>), ...)
MODELS: tuple[tuple[type[models.Model], str], ...] = (
# Product module
(Product, "description"),
(ProductTranslation, "description"),
(Collection, "description"),
(CollectionTranslation, "description"),
(Category, "description"),
(CategoryTranslation, "description"),
# Page module
(Page, "content"),
(PageTranslation, "content"),
# Shipping module
(ShippingMethod, "description"),
(ShippingMethodTranslation, "description"),
# Discount module
(Promotion, "description"),
(PromotionTranslation, "description"),
(PromotionRule, "description"),
(PromotionRuleTranslation, "description"),
# Attribute module
(AttributeValue, "rich_text"),
(AttributeValueTranslation, "rich_text"),
)
class Command(BaseCommand):
help = "Runs the Editorjs cleaner against all rows and all models."
def add_arguments(self, parser):
parser.add_argument(
"--apply",
dest="is_dry_run",
action="store_false",
help=(
"When provided, it applies the changes instead of only printing what "
"it would do (dry run)"
),
)
parser.add_argument(
"--stop-on-error",
action="store_true",
help=(
"When provided, the script doesn't continue migrating the data "
"when invalid data is found (e.g., invalid EditorJS syntax)"
),
)
parser.add_argument(
"--progress",
dest="report_frequency",
type=int,
default=1000,
metavar="N",
help="Reports the progress (in percents) every N rows processed",
)
parser.add_argument(
"--verbose",
action="store_true",
help="Enable debug logs and send more details",
)
model_names = [model.__name__ for [model, _field] in MODELS]
model_filter = parser.add_mutually_exclusive_group()
model_filter.add_argument(
"--only",
type=str,
choices=model_names,
action="extend",
nargs="+",
metavar="MODEL",
help=(
"Only clean given models. "
"See Available Models for the list of allowed values."
),
)
model_filter.add_argument(
"--exclude",
type=str,
choices=model_names,
action="extend",
nargs="+",
metavar="MODEL",
help=(
"Exclude given models from cleaning. "
"See Available Models for the list of allowed values."
),
)
parser.epilog = "Available Models: " + ", ".join(model_names)
def handle(
self,
is_dry_run: bool,
verbose: bool,
stop_on_error: bool,
report_frequency: int,
only: list[str],
exclude: list[str],
**_options,
):
self.is_dry_run = is_dry_run
self.verbose = verbose
self.stop_on_error = stop_on_error
# Reports the % progress every N rows (where N is `report_frequency`)
self.report_frequency = report_frequency
# Makes django use the default color for stderr, otherwise everything is red
# despite we don't log an error
self.stderr.style_func = None
self.differ = difflib.Differ()
for [model_cls, field] in MODELS:
# Skip model if --only was provided but this model's name wasn't provided in
# --only
if only and model_cls.__name__ not in only:
if self.verbose:
self.stderr.write(f"Skipped: {model_cls.__name__} (not in --only)")
continue
if exclude and model_cls.__name__ in exclude:
if self.verbose:
self.stderr.write(
f"Skipped: {model_cls.__name__} (not in --exclude)"
)
continue
self.clean_model(model_cls, field)
self.stderr.write(
"To apply the changes, rerun this command with --apply", self.style.NOTICE
)
def clean_model(self, cls, field: str):
table_name = cls.__name__
if self.is_dry_run is False:
self.stderr.write(f"Cleaning {table_name} table...")
elif self.verbose:
self.stderr.write(f"Checking {table_name}...")
# Excludes 'null'::jsonb (see https://code.djangoproject.com/ticket/35381)
qs = (
cls.objects.all()
.only("pk", field)
.filter(~models.Q(**{field: models.Value(None, models.JSONField())}))
)
obj_count = qs.count()
for processed_count, row in enumerate(qs.iterator(chunk_size=100), start=1):
# Sends a progress update every N rows to prevent the command from looking
# stuck if there are lot of objects to process
if (
processed_count == obj_count
or (processed_count % self.report_frequency) == 0
):
progress_perc = round(((processed_count) / obj_count) * 100)
self.stderr.write(f"Progress for {table_name}: {progress_perc}%")
contents = getattr(row, field)
# Skip row if type is invalid
if isinstance(contents, dict) is False:
self.stderr.write(
(
f"Expected a dict object but got instead {type(contents)} "
f"for table {table_name} with pk={row.pk!r}"
),
self.style.NOTICE,
)
continue
# Dump to string before cleaning as the object will be mutated
before = json.dumps(contents, indent=2)
# Perform the cleaning
try:
cleaned = clean_editor_js(contents)
except (KeyError, ValidationError, ValueError) as exc:
msg = f"Found invalid data for row #{row.pk} ({table_name})"
if self.stop_on_error is True:
raise CommandError(msg) from exc
self.stderr.write(f"ERROR: {msg}", self.style.ERROR)
self.stderr.write(
"\n".join(traceback.format_exception_only(exc)).strip(),
self.style.ERROR,
)
continue
after = json.dumps(cleaned, indent=2)
if after == before:
continue
if self.is_dry_run is False:
row.save(update_fields=[field])
else:
self.stdout.write(
f"Row #{row.pk} would be changed ({table_name}):",
self.style.WARNING,
)
for line in self.differ.compare(
before.splitlines(),
after.splitlines(),
):
style = None
if line:
if line[0] == "-":
style = self.style.ERROR # red color for deleted lines
elif line[0] == "+":
style = self.style.SUCCESS # green color for added lines
self.stdout.write(f"\t{line}", style, ending="\n")
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/core/management/commands/clean_editorjs_fields.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 213,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
saleor/saleor:saleor/core/management/commands/remove_invalid_files.py | """Remove files with invalid/dangerous MIME types from storage.
This command scans specific directories in storage for files with invalid
MIME types. By default, it runs in dry-run mode showing what would be deleted
without actually removing files.
"""
import os
from django.conf import settings
from django.core.files.storage import default_storage
from django.core.management.base import BaseCommand
from ....graphql.core.validators.file import detect_mime_type
class Command(BaseCommand):
help = "Removes files with invalid/dangerous MIME types from storage."
# Directories to scan for invalid files
DIRECTORIES_TO_SCAN = [
"file_upload/", # Files uploaded via FileUpload mutation
"digital_contents/", # Files uploaded via DigitalContentCreate mutation
]
def add_arguments(self, parser):
parser.add_argument(
"--apply",
action="store_true",
help=(
"Apply the changes and delete invalid files. "
"Without this flag, the command runs in dry-run mode."
),
)
def handle(self, **options):
is_dry_run = not options.get("apply", False)
if is_dry_run:
self.stdout.write(
self.style.WARNING(
"Running in DRY-RUN mode. No files will be deleted. "
"Use --apply to actually delete files."
)
)
total_checked = 0
total_invalid = 0
for directory in self.DIRECTORIES_TO_SCAN:
self.stdout.write(f"\nScanning directory: {directory}")
checked, invalid = self._scan_directory(directory, is_dry_run)
total_checked += checked
total_invalid += invalid
# Summary
self.stdout.write("\n" + "=" * 60)
self.stdout.write(f"Total files checked: {total_checked}")
self.stdout.write(
self.style.ERROR(f"Total invalid files found: {total_invalid}")
)
if is_dry_run and total_invalid > 0:
self.stdout.write(
self.style.WARNING(
"\nReview the list of files above. "
"If it looks correct, rerun with --apply to delete them."
)
)
elif total_invalid > 0:
self.stdout.write(self.style.SUCCESS("\nInvalid files have been deleted."))
else:
self.stdout.write(self.style.SUCCESS("\nNo invalid files found."))
def _scan_directory(self, directory, is_dry_run):
"""Scan a directory for files with invalid mime types."""
checked = 0
invalid = 0
try:
# List all files in the directory
directories, files = default_storage.listdir(directory)
except FileNotFoundError:
self.stdout.write(self.style.WARNING(f" Directory not found: {directory}"))
return 0, 0
# Process files in current directory
for filename in files:
file_path = os.path.join(directory, filename)
checked += 1
if self._is_invalid_file(file_path):
invalid += 1
self._handle_invalid_file(file_path, is_dry_run)
# Recursively scan subdirectories
for subdir in directories:
subdir_path = os.path.join(directory, subdir)
sub_checked, sub_invalid = self._scan_directory(subdir_path, is_dry_run)
checked += sub_checked
invalid += sub_invalid
return checked, invalid
def _is_invalid_file(self, file_path):
"""Check if file has invalid mime type by reading actual file content."""
if not file_path:
return False
if not default_storage.exists(file_path):
return False
if self._has_no_extension(file_path):
return True
# verify file extension to prevent unnecessary file reads
if not self._has_allowed_extension(file_path):
return True
return self._verify_file_content(file_path)
def _has_no_extension(self, file_path):
"""Check if file has no extension."""
_, ext = os.path.splitext(file_path)
return not ext.lower()
def _has_allowed_extension(self, file_path):
"""Check if file extension is in allowed list."""
_, ext = os.path.splitext(file_path)
ext = ext.lower()
for _mime_type, extensions in settings.ALLOWED_MIME_TYPES.items():
if ext in extensions:
return True
return False
def _verify_file_content(self, file_path):
"""Verify file content matches allowed MIME types."""
try:
with default_storage.open(file_path, "rb") as file:
try:
actual_mime_type = detect_mime_type(file)
if not self._is_mime_type_allowed(actual_mime_type):
self.stdout.write(
self.style.WARNING(
f" File {file_path} has mime type {actual_mime_type} "
f"not in allowed list"
)
)
return True
if not self._extension_matches_mime_type(
file_path, actual_mime_type
):
_, ext = os.path.splitext(file_path)
self.stdout.write(
self.style.WARNING(
f" File {file_path} has extension {ext.lower()} but actual "
f"mime type is {actual_mime_type}"
)
)
return True
return False
except Exception:
return True
except Exception as e:
self.stdout.write(
self.style.WARNING(f" Failed to read file {file_path}: {str(e)}")
)
return False
def _is_mime_type_allowed(self, mime_type):
"""Check if MIME type is in allowed list."""
return mime_type in settings.ALLOWED_MIME_TYPES
def _extension_matches_mime_type(self, file_path, mime_type):
"""Check if file extension matches the detected MIME type."""
_, ext = os.path.splitext(file_path)
ext = ext.lower()
expected_extensions = settings.ALLOWED_MIME_TYPES.get(mime_type, [])
return ext in expected_extensions
def _handle_invalid_file(self, file_path, is_dry_run):
"""Handle an invalid file - log or delete based on mode."""
if is_dry_run:
self.stdout.write(self.style.WARNING(f" Would delete: {file_path}"))
else:
self.stdout.write(f" Deleting: {file_path}")
try:
if default_storage.exists(file_path):
default_storage.delete(file_path)
self.stdout.write(self.style.SUCCESS(" ✓ Deleted successfully"))
else:
self.stdout.write(self.style.WARNING(" ⚠ File does not exist"))
except Exception as e:
self.stdout.write(self.style.ERROR(f" ✗ Failed to delete: {str(e)}"))
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/core/management/commands/remove_invalid_files.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 163,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
saleor/saleor:saleor/core/tests/commands/test_clean_editorjs_fields.py | # ruff: noqa: UP031 # printf-style string formatting are more readable for JSON
import random
import re
from copy import deepcopy
from io import StringIO
from unittest import mock
import pytest
from django.core.management import call_command
from django.core.management.base import CommandError
from ....attribute.models.base import AttributeValue
from ....page.models import Page
from ....product.models import Category, Collection
from ...management.commands.clean_editorjs_fields import MODELS
MODELS_NAMES = [model_cls.__name__ for [model_cls, _field] in MODELS]
def dirty():
"""Return a dirty EditorJS object that needs cleaning."""
return {
"blocks": [{"type": "paragraph", "data": {"text": "<img src=x onerror=y>"}}]
}
def cleaned():
"""Return a cleaned EditorJS object."""
return {"blocks": [{"type": "paragraph", "data": {"text": '<img src="x">'}}]}
def create_dirty_category(data: None | dict = None) -> Category:
with mock.patch.object(
Category.description.field, "_sanitizer_method", side_effect=lambda x: x
):
original_data = deepcopy(data) if data else dirty()
category = Category(
name="my-category",
slug=f"my-category-{random.random()}",
# Note: we call dirty() again as it will mutate,
# original_data thus needs to have a different pointer address
description=data or dirty(),
)
category.save()
assert category.description == original_data, (
"The description shouldn't have changed. Was the cleaner mocked properly?"
)
return category
def test_handles_errors():
"""Ensure that error caused by invalid data are handled."""
# Creates an invalid category in order to cause the cleaner to return an error
data = {"blocks": [{"type": "invalid", "data": {"foo": "bar"}}]}
category = create_dirty_category(data)
assert category.description == data
out_fp = StringIO()
call_command("clean_editorjs_fields", "--only=Category", stderr=out_fp)
expected_error = f"Found invalid data for row #{category.pk} (Category)"
assert expected_error in out_fp.getvalue()
# Should fail if '--stop-on-error' was provided
with pytest.raises(CommandError, match=re.escape(expected_error)):
call_command(
"clean_editorjs_fields", "--stop-on-error", "--only=Category", stderr=out_fp
)
def test_detects_dirty_rows():
"""Ensure rows that would be modified are shown in the command's output."""
category = create_dirty_category()
out_fp = StringIO()
call_command("clean_editorjs_fields", "--only=Category", stdout=out_fp)
# Should have detected a difference
actual_output = out_fp.getvalue()
expected_output = (
"""Row #%d would be changed (Category):
\t {
\t "blocks": [
\t {
\t "data": {
\t- "text": "<img src=x onerror=y>"
\t? ^^^^^^^^^^
\t+ "text": "<img src=\\"x\\">"
\t? ++ ^^
\t },
\t "type": "paragraph"
\t }
\t ]
\t }"""
% category.pk
)
assert expected_output in actual_output
# Then, should successfully clean it
assert "onerror" in category.description["blocks"][0]["data"]["text"]
call_command("clean_editorjs_fields", "--only=Category", "--apply")
category.refresh_from_db(fields=["description"])
assert "onerror" not in category.description["blocks"][0]["data"]["text"]
def test_track_progress():
"""Ensures progress is only reported every N rows and at 100% done."""
# We create clean categories as we are not trying to check the cleaning behavior
# it saves a bit of compute time
Category.objects.bulk_create(
[
Category(
slug=f"category-{i}",
name=f"category-{i}",
description={},
lft=4,
rght=5,
tree_id=0,
level=0,
)
for i in range(10)
]
)
out_fp = StringIO()
call_command(
"clean_editorjs_fields",
"--progress=4",
"--only=Category",
stderr=out_fp,
)
lines = [
line
for line in out_fp.getvalue().splitlines()
if line.startswith("Progress for") # Excludes unrelated logs
]
# Should report three times: 2 times because of --progress=4 (every 4 rows)
# and a 3rd time when it reaches the last row (100%)
assert lines == [
"Progress for Category: 40%",
"Progress for Category: 80%",
"Progress for Category: 100%",
]
@pytest.mark.parametrize(
("cmd_args", "expected_models"),
[
(
# Given
("--only", "Category", "Collection"),
# Then, only the following should be scanned
("Category", "Collection"),
),
(
# Given
("--exclude", "Category", "Collection"),
# Then, only the following should be scanned
list({*MODELS_NAMES} - {"Category", "Collection"}),
),
],
)
def test_filter_models(cmd_args: tuple[str, ...], expected_models: list[str]):
"""Ensures progress is only reported every N rows and at 100% done."""
out_fp = StringIO()
call_command(
"clean_editorjs_fields",
"--verbose",
*cmd_args,
stderr=out_fp,
)
lines = sorted(
[
line
for line in out_fp.getvalue().splitlines()
if line.startswith("Checking ") # Excludes unrelated logs
]
)
expected_lines = sorted([f"Checking {model}..." for model in expected_models])
assert lines == expected_lines
@pytest.mark.parametrize(
("model_cls", "editorjs_field", "create_entry"),
# This test takes a sample of 3 models:
# - Page - uses '.content'
# - Collection - uses '.description'
# - AttributeValue - uses '.rich_text'
[
(
Page,
"content", # => page.content - the EditorJS DB field
lambda request: Page(
slug="x",
title="x",
page_type=request.getfixturevalue("page_type"),
content=dirty(),
),
),
(
Collection,
"description", # => collection.description - the EditorJS DB field
lambda _request: Collection(slug="x", name="x", description=dirty()),
),
(
AttributeValue,
"rich_text", # => AttributeValue.description - the EditorJS DB field
lambda request: AttributeValue(
slug="x",
name="x",
rich_text=dirty(),
attribute=request.getfixturevalue("rich_text_attribute"),
),
),
],
)
def test_can_clean_all_models(
request,
model_cls,
editorjs_field: str,
create_entry,
):
"""Ensure rows that would be modified are shown in the command's output."""
with mock.patch.object(
# Temporarily disables the cleaner so it doesn't clean the dirty input
getattr(model_cls, editorjs_field).field,
"_sanitizer_method",
side_effect=lambda x: x,
):
entry = create_entry(request)
entry.save() # insert into the DB
# Ensures the field is indeed dirty before we try to clean it
assert getattr(entry, editorjs_field) == dirty()
# Start the cleaning
call_command("clean_editorjs_fields", "--apply", f"--only={model_cls.__name__}")
# Should have cleaned the model properly
entry.refresh_from_db(fields=[editorjs_field])
assert getattr(entry, editorjs_field) == cleaned()
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/core/tests/commands/test_clean_editorjs_fields.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 212,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/checkout/tests/test_migrations_tasks.py | from unittest import mock
from ..migrations.tasks.saleor3_21 import (
BILLING_FIELD,
SHIPPING_FIELD,
fix_shared_address_instances_task,
)
from ..models import Checkout
def test_fix_shared_billing_addresses(checkouts_list, order, address, address_usa):
# given
checkout1 = checkouts_list[0]
checkout2 = checkouts_list[1]
checkout3 = checkouts_list[2]
# these two share address with order
checkout1.billing_address = address
checkout2.billing_address = address
new_checkout_address = address.get_copy()
checkout1.shipping_address = new_checkout_address
checkout2.shipping_address = new_checkout_address
# this one has unique address
checkout3.billing_address = address_usa
Checkout.objects.bulk_update(
[checkout1, checkout2, checkout3], ["billing_address", "shipping_address"]
)
order.billing_address = address
new_order_address = address.get_copy()
order.shipping_address = new_order_address
order.save(update_fields=["billing_address", "shipping_address"])
# when
fix_shared_address_instances_task()
# then
for checkout in [checkout1, checkout2]:
checkout.refresh_from_db()
assert checkout.billing_address_id != address.id
assert checkout.billing_address.as_data() == address.as_data()
assert checkout.shipping_address_id == new_checkout_address.id
checkout3.refresh_from_db()
assert checkout3.billing_address_id == address_usa.id
def test_fix_shared_shipping_addresses(checkouts_list, order, address, address_usa):
# given
checkout1 = checkouts_list[0]
checkout2 = checkouts_list[1]
checkout3 = checkouts_list[2]
# these two share address with order
checkout1.shipping_address = address
checkout2.shipping_address = address
new_checkout_address = address.get_copy()
checkout1.billing_address = new_checkout_address
checkout2.billing_address = new_checkout_address
# this one has unique address
checkout3.shipping_address = address_usa
Checkout.objects.bulk_update(
[checkout1, checkout2, checkout3], ["shipping_address", "billing_address"]
)
new_order_address = address.get_copy()
order.billing_address = new_order_address
order.shipping_address = address
order.save(update_fields=["shipping_address", "billing_address"])
# when
fix_shared_address_instances_task(field=SHIPPING_FIELD)
# then
for checkout in [checkout1, checkout2]:
checkout.refresh_from_db()
assert checkout.shipping_address_id != address.id
assert checkout.shipping_address.as_data() == address.as_data()
assert checkout.billing_address_id == new_checkout_address.id
checkout3.refresh_from_db()
assert checkout3.shipping_address_id == address_usa.id
def test_no_checkouts_with_shared_addresses(checkout, order, address):
# given
checkout.billing_address = address
checkout.save(update_fields=["billing_address"])
# address linked to order is different instance, but same data
order_address = address.get_copy()
order.billing_address = order_address
order.save(update_fields=["billing_address"])
assert checkout.billing_address_id != order.billing_address_id
# when
fix_shared_address_instances_task()
# then
checkout.refresh_from_db()
assert checkout.billing_address_id == address.id
@mock.patch(
"saleor.checkout.migrations.tasks.saleor3_21.fix_shared_address_instances_task.delay"
)
def test_task_switches_fields(mock_delay, checkout, order, address):
# given
# only shared shipping address exists
checkout.shipping_address = address
checkout.save(update_fields=["shipping_address"])
order.shipping_address = address
order.save(update_fields=["shipping_address"])
# when
# Trigger with default billing field
fix_shared_address_instances_task()
# then
# Should call itself with shipping field because no billing addresses were found to fix
mock_delay.assert_called_once_with(field=SHIPPING_FIELD)
@mock.patch("saleor.checkout.migrations.tasks.saleor3_21.BATCH_SIZE", 1)
@mock.patch(
"saleor.checkout.migrations.tasks.saleor3_21.fix_shared_address_instances_task.delay"
)
def test_task_recursion(mock_delay, checkouts_list, order, address):
# given
# we have 2 checkouts sharing same address with order
# batch size is 1, so it should process first checkout and then call delay
checkout1 = checkouts_list[0]
checkout2 = checkouts_list[1]
order.billing_address = address
order.save(update_fields=["billing_address"])
checkout1.billing_address = address
checkout2.billing_address = address
Checkout.objects.bulk_update([checkout1, checkout2], ["billing_address"])
# when
fix_shared_address_instances_task()
# then
checkout1.refresh_from_db()
checkout2.refresh_from_db()
# only one should be fixed in first batch
fixed_checkouts = [
c for c in [checkout1, checkout2] if c.billing_address_id != address.id
]
assert len(fixed_checkouts) == 1
# ensure it triggered next batch
mock_delay.assert_called_once_with(field=BILLING_FIELD)
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/checkout/tests/test_migrations_tasks.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 124,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/core/tests/test_sqs.py | from datetime import timedelta
from unittest.mock import MagicMock, Mock, patch
import pytest
from django.utils import timezone
from freezegun import freeze_time
from ..sqs import Channel
@pytest.mark.parametrize(
("eta_offset", "expected_delay"),
[
(timedelta(seconds=359, microseconds=84649), 359),
# max delay for SQS
(timedelta(minutes=16), 900),
# no negative delays
(-timedelta(minutes=10), 0),
],
)
@freeze_time("'2025-12-22T09:34:40.915351+00:00'")
def test_sqs_channel_put_delay_seconds(eta_offset, expected_delay):
# given
mock_sqs_client = MagicMock()
mock_session = Mock()
mock_session.client.return_value = mock_sqs_client
# Create a mock connection object with all required attributes
mock_connection = Mock()
mock_connection._used_channel_ids = []
mock_connection.channel_max = 5
mock_connection.client.transport_options = {}
queue_name = "test-queue"
message = {
# eta is set to 359.084649 seconds later than the frozen time above
"headers": {"eta": (timezone.now() + eta_offset).isoformat()},
"properties": {},
}
with patch("boto3.session.Session", return_value=mock_session):
channel = Channel(mock_connection)
# when
channel._put(queue_name, message)
# then
mock_sqs_client.send_message.assert_called_once()
call_kwargs = mock_sqs_client.send_message.call_args.kwargs
assert "DelaySeconds" in call_kwargs
assert call_kwargs["DelaySeconds"] == expected_delay
@freeze_time("'2025-12-22T09:34:40.915351+00:00'")
def test_sqs_channel_skips_delay_seconds_for_fifo():
# given
mock_sqs_client = MagicMock()
mock_session = Mock()
mock_session.client.return_value = mock_sqs_client
# Create a mock connection object with all required attributes
mock_connection = Mock()
mock_connection._used_channel_ids = []
mock_connection.channel_max = 5
mock_connection.client.transport_options = {}
queue_name = "test-queue.fifo"
message = {
# eta is set to 359.084649 seconds later than the frozen time above
"headers": {"eta": (timezone.now() + timedelta(seconds=10)).isoformat()},
"properties": {},
}
with patch("boto3.session.Session", return_value=mock_session):
channel = Channel(mock_connection)
# when
channel._put(queue_name, message)
# then
mock_sqs_client.send_message.assert_called_once()
call_kwargs = mock_sqs_client.send_message.call_args.kwargs
# DelaySeconds is not supported for FIFO queues
assert "DelaySeconds" not in call_kwargs
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/core/tests/test_sqs.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 68,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/app/tests/tasks/test_saleor3_23.py | from saleor.app.migrations.tasks.saleor3_23 import (
fill_app_extension_settings_task,
)
from saleor.app.models import App, AppExtension
def test_skip_settings_if_filled():
# given
app = App.objects.create(name="Test App", is_active=True)
extension = AppExtension.objects.create(
app=app,
label="Widget Extension",
url="http://example.com/widget",
mount="product_details_widgets",
target="widget",
http_target_method="POST",
settings={"some": "data"},
)
# when
# Run migration
fill_app_extension_settings_task()
# then
extension.refresh_from_db()
# Non empty should be ignored
assert extension.settings == {"some": "data"}
def test_fill_settings_json_for_new_tab_target():
# given
app = App.objects.create(name="Test App", is_active=True)
# Create new_tab extension with http_target_method
extension = AppExtension.objects.create(
app=app,
label="New Tab Extension",
url="http://example.com/newtab",
mount="product_overview_create",
target="new_tab",
http_target_method="GET",
settings={},
)
# when
fill_app_extension_settings_task()
# then
extension.refresh_from_db()
assert extension.settings == {"newTabTarget": {"method": "GET"}}
def test_fill_settings_json_skips_non_widget_non_new_tab_targets():
# given
app = App.objects.create(name="Test App", is_active=True)
# Create popup extension
extension = AppExtension.objects.create(
app=app,
label="Popup Extension",
url="http://example.com/popup",
mount="product_overview_create",
target="popup",
http_target_method="GET",
settings={"original": "settings"},
)
# when
# Run migration
fill_app_extension_settings_task()
# then
extension.refresh_from_db()
# Settings should remain unchanged for non-widget/non-new_tab targets
assert extension.settings == {"original": "settings"}
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/app/tests/tasks/test_saleor3_23.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 61,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/graphql/account/mutations/staff/utils.py | CUSTOMER_UPDATE_FIELDS = {
"first_name",
"last_name",
"email",
"is_active",
"note",
"external_reference",
"is_confirmed",
"metadata",
"private_metadata",
}
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/account/mutations/staff/utils.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 11,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
saleor/saleor:saleor/giftcard/gateway.py | from decimal import Decimal
from typing import Annotated
from uuid import uuid4
import pydantic
from django.db import transaction
from django.db.models import Exists, F, OuterRef, Q
from django.utils import timezone
from ..checkout.models import Checkout
from ..core.prices import quantize_price
from ..graphql.payment.mutations.transaction.utils import (
create_transaction_event_requested,
)
from ..order.models import Order
from ..payment import TransactionAction, TransactionEventType
from ..payment.interface import (
TransactionSessionData,
TransactionSessionResult,
)
from ..payment.models import TransactionEvent, TransactionItem
from ..payment.utils import (
create_transaction_event_from_request_and_webhook_response,
)
from .const import GIFT_CARD_PAYMENT_GATEWAY_ID
from .models import GiftCard
class GiftCardPaymentGatewayDataSchema(pydantic.BaseModel):
code: Annotated[
str,
pydantic.Field(
description="Gift card code.",
min_length=8,
max_length=16,
),
]
class GiftCardPaymentGatewayException(Exception):
def __init__(self, msg=None):
if msg is None:
msg = "Gift card payment gateway error"
super().__init__(msg)
def transaction_initialize_session_with_gift_card_payment_method(
transaction_session_data: "TransactionSessionData",
source_object: Checkout | Order,
) -> "TransactionSessionResult":
"""Initialize session for gift card payment method.
Attach payment method app identifier to the transaction, validate transaction data and gift card.
Since gift card funds can be authorized to only one checkout at the time this function also detaches
gift card from any checkouts it has been previously attached to.
"""
attach_app_identifier_to_transaction(transaction_session_data)
gift_card = None
try:
validate_transaction_session_data(transaction_session_data, source_object)
gift_card = validate_and_get_gift_card(transaction_session_data)
except GiftCardPaymentGatewayException as exc:
return TransactionSessionResult(
app_identifier=GIFT_CARD_PAYMENT_GATEWAY_ID,
response={
"result": TransactionEventType.AUTHORIZATION_FAILURE.upper(),
"pspReference": str(uuid4()),
"amount": transaction_session_data.action.amount,
"message": exc,
},
)
else:
return TransactionSessionResult(
app_identifier=GIFT_CARD_PAYMENT_GATEWAY_ID,
response={
"result": TransactionEventType.AUTHORIZATION_SUCCESS.upper(),
"pspReference": str(uuid4()),
"amount": transaction_session_data.action.amount,
"message": f"Gift card (ending: {gift_card.display_code}).",
"actions": [TransactionAction.CANCEL.upper()],
},
)
finally:
detach_gift_card_from_previous_checkout_transactions(gift_card)
attach_gift_card_to_transaction(transaction_session_data, gift_card)
def attach_app_identifier_to_transaction(
transaction_session_data: "TransactionSessionData",
):
"""Attach gift card payment gateway identifier to a transaction."""
transaction_session_data.transaction.app_identifier = GIFT_CARD_PAYMENT_GATEWAY_ID
transaction_session_data.transaction.save(update_fields=["app_identifier"])
def validate_transaction_session_data(
transaction_session_data: "TransactionSessionData",
source_object: Checkout | Order,
):
"""Validate whether object that the gift card is being attached to is a checkout and validate whether transaction data is correct."""
if not isinstance(source_object, Checkout):
raise GiftCardPaymentGatewayException(
msg=f"Cannot initialize transaction for payment gateway: {GIFT_CARD_PAYMENT_GATEWAY_ID} and object type other than Checkout."
)
try:
GiftCardPaymentGatewayDataSchema.model_validate(
transaction_session_data.payment_gateway_data.data
)
except pydantic.ValidationError as exc:
raise GiftCardPaymentGatewayException(
msg="Incorrect payment gateway data."
) from exc
def validate_and_get_gift_card(
transaction_session_data: "TransactionSessionData",
):
"""Check for the existence of given gift card and lock it for use in a database transaction. Check whether gift card has enough funds to cover transaction amount."""
try:
gift_card = (
GiftCard.objects.active(date=timezone.now().date())
.filter(
code=transaction_session_data.payment_gateway_data.data["code"], # type: ignore[call-overload, index]
currency=transaction_session_data.action.currency,
)
.select_for_update()
.get()
)
except GiftCard.DoesNotExist as exc:
raise GiftCardPaymentGatewayException(
msg="Gift card code is not valid."
) from exc
if transaction_session_data.action.amount > gift_card.current_balance_amount:
raise GiftCardPaymentGatewayException(
msg=f"Gift card has insufficient amount ({quantize_price(gift_card.current_balance_amount, gift_card.currency)}) "
f"to cover requested amount ({quantize_price(transaction_session_data.action.amount, transaction_session_data.action.currency)})."
)
return gift_card
def attach_gift_card_to_transaction(
transaction_session_data: "TransactionSessionData",
gift_card: GiftCard | None,
):
"""Attach gift card to a transaction."""
if not gift_card:
return
transaction_session_data.transaction.gift_card = gift_card
transaction_session_data.transaction.save(update_fields=["gift_card"])
def detach_gift_card_from_previous_checkout_transactions(
gift_card: GiftCard | None,
):
"""Find all gift card payment gateway transactions tied to a checkout and perform authorization cancellation for the same amount as authorization was granted.
The function is used to ensure a single gift card does not authorize funds to more than a single checkout at the time.
"""
if not gift_card:
return
# Detach gift card from the previously attached transaction items and create authorization cancellation events.
transactions_to_cancel_qs = TransactionItem.objects.filter(
# must cancel transactions for gift card payment gateway
Q(app_identifier=GIFT_CARD_PAYMENT_GATEWAY_ID),
# must cancel transactions for this gift card
Q(gift_card=gift_card),
# must cancel transactions where checkout identifier is not empty
Q(checkout_id__isnull=False),
# must cancel transactions where order identifier is empty (transaction is not
# tied to an order yet)
Q(order_id__isnull=True),
)
for transaction_item in transactions_to_cancel_qs:
request_event = create_transaction_event_requested(
transaction_item,
transaction_item.amount_authorized.amount,
TransactionAction.CANCEL,
app_identifier=GIFT_CARD_PAYMENT_GATEWAY_ID,
)
response: dict[str, str | Decimal | list | None] = {
"result": TransactionEventType.CANCEL_SUCCESS.upper(),
"pspReference": str(uuid4()),
"amount": transaction_item.amount_authorized.amount,
"message": f"Gift card (code ending with: {gift_card.display_code}) has been authorized as payment method in a different checkout or has been authorized in the same checkout again.",
"actions": [],
}
create_transaction_event_from_request_and_webhook_response(
request_event,
None,
transaction_webhook_response=response,
)
transactions_to_cancel_qs.update(gift_card=None)
def charge_gift_card_transactions(
order: "Order",
):
"""Find all gift card payment gateway transactions tied to an order and attempt to charge funds from gift cards.
If gift card cannot be found or has insufficient funds the charge request fails.
"""
# Order object may already have prefetched related objects.
# Prefetched payment transactions cause logic called a few layers beneath to operate on outdated
# order transactions therfore here the cache is dropped.
if hasattr(order, "_prefetched_objects_cache"):
order._prefetched_objects_cache.pop("payment_transactions", None)
# Ensure that gift card transaction is not attempted to be charged more than once.
gift_card_transactions = order.payment_transactions.filter(
~Exists(
TransactionEvent.objects.filter(
transaction=OuterRef("pk"), type=TransactionEventType.CHARGE_REQUEST
)
),
app_identifier=GIFT_CARD_PAYMENT_GATEWAY_ID,
gift_card__isnull=False,
authorized_value__gt=Decimal(0),
charged_value=Decimal(0),
)
for gift_card_transaction in gift_card_transactions:
request_event = create_transaction_event_requested(
gift_card_transaction,
gift_card_transaction.amount_authorized.amount,
TransactionAction.CHARGE,
app_identifier=GIFT_CARD_PAYMENT_GATEWAY_ID,
)
response = {
"result": TransactionEventType.CHARGE_FAILURE.upper(),
"pspReference": str(uuid4()),
"amount": gift_card_transaction.amount_authorized.amount,
"message": "Gift card could not be found.",
}
if not gift_card_transaction.gift_card_id:
create_transaction_event_from_request_and_webhook_response(
request_event,
None,
transaction_webhook_response=response,
)
continue
try:
with transaction.atomic():
gift_card = (
GiftCard.objects.filter(id=gift_card_transaction.gift_card_id)
.select_for_update()
.get()
)
if (
gift_card_transaction.authorized_value
> gift_card.current_balance_amount
):
response["message"] = (
f"Gift card has insufficient amount ({quantize_price(gift_card.current_balance_amount, gift_card.currency)}) "
f"to cover requested amount ({quantize_price(gift_card_transaction.authorized_value, gift_card_transaction.currency)})."
)
else:
gift_card.current_balance_amount -= (
gift_card_transaction.authorized_value
)
gift_card.save(update_fields=["current_balance_amount"])
response["result"] = TransactionEventType.CHARGE_SUCCESS.upper()
response["message"] = (
f"Gift card (ending: {gift_card.display_code})."
)
response["actions"] = [TransactionAction.REFUND.upper()]
except GiftCard.DoesNotExist:
# Gift card must have been just deleted.
# Eat the exception, failure response dict is already prepared.
pass
create_transaction_event_from_request_and_webhook_response(
request_event,
None,
transaction_webhook_response=response,
)
def cancel_gift_card_transaction(
transaction_item: "TransactionItem", request_event: "TransactionEvent"
):
"""Perform authorization cancellation for the same amount as authorization was granted.
If checkout no longer exists or CANCEL action is not available for the transaction authorization cancellation fails.
"""
response: dict[str, str | Decimal | list | None]
amount = request_event.amount_value
if (
not transaction_item.checkout
or TransactionAction.CANCEL not in transaction_item.available_actions
):
response = {
"result": TransactionEventType.CANCEL_FAILURE.upper(),
"pspReference": str(uuid4()),
"amount": amount,
}
else:
response = {
"result": TransactionEventType.CANCEL_SUCCESS.upper(),
"pspReference": str(uuid4()),
"amount": amount,
}
if amount >= transaction_item.authorized_value:
response["actions"] = []
create_transaction_event_from_request_and_webhook_response(
request_event,
None,
transaction_webhook_response=response,
)
def refund_gift_card_transaction(
transaction_item: "TransactionItem", request_event: "TransactionEvent"
):
"""Refund funds to a gift card which previously were charged from the same gift card.
If gift card no longer exists refund fails.
"""
amount = request_event.amount_value
response: dict[str, str | Decimal | list | None] = {
"result": TransactionEventType.REFUND_FAILURE.upper(),
"pspReference": str(uuid4()),
"amount": amount,
"message": "Gift card could not be found.",
}
if not transaction_item.gift_card_id:
create_transaction_event_from_request_and_webhook_response(
request_event,
None,
transaction_webhook_response=response,
)
return
try:
with transaction.atomic():
gift_card = (
GiftCard.objects.filter(id=transaction_item.gift_card_id)
.select_for_update()
.get()
)
gift_card.current_balance_amount = F("current_balance_amount") + amount
gift_card.save(update_fields=["current_balance_amount"])
except GiftCard.DoesNotExist:
# Gift card must have been just deleted.
# Eat the exception, failure response dict is already prepared.
pass
else:
response = {
"result": TransactionEventType.REFUND_SUCCESS.upper(),
"pspReference": str(uuid4()),
"amount": amount,
}
if amount >= transaction_item.charged_value:
response["actions"] = []
create_transaction_event_from_request_and_webhook_response(
request_event,
None,
transaction_webhook_response=response,
)
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/giftcard/gateway.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 325,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
saleor/saleor:saleor/tests/e2e/gift_cards/test_gift_cards.py | import pytest
from ....graphql.order.enums import OrderChargeStatusEnum
from ....tests import race_condition
from ...e2e.utils import assign_permissions
from ..checkout.utils.checkout_add_promo_code import checkout_add_promo_code
from ..checkout.utils.checkout_complete import checkout_complete
from ..checkout.utils.checkout_create import checkout_create
from ..orders.utils.order_query import order_query
from ..product.utils.preparing_product import prepare_product
from ..shop.utils import prepare_shop
from ..transactions.utils.transaction_initialize import (
transaction_initialize_for_gift_card_payment_gateway,
)
from .utils.gift_card_create import create_gift_card
from .utils.gift_card_query import get_gift_card
def _prepare_shop(staff_api_client):
shop_data, _ = prepare_shop(
staff_api_client,
channels=[
{
"shipping_zones": [
{
"shipping_methods": [{}],
},
],
"order_settings": {
"allowUnpaidOrders": False,
"automaticallyConfirmAllNewOrders": True,
},
"checkout_settings": {
"automaticallyCompleteFullyPaidCheckouts": False,
},
}
],
)
return shop_data
def _prepare_product(staff_api_client, shop_data):
channel_id = shop_data[0]["id"]
warehouse_id = shop_data[0]["warehouse_id"]
variant_price = 10
(
_,
product_variant_id,
_,
) = prepare_product(
staff_api_client,
warehouse_id,
channel_id,
variant_price,
is_shipping_required=False,
)
return product_variant_id, variant_price
def _prepare_checkout(api_client, lines, channel_slug):
return checkout_create(
api_client,
lines,
channel_slug,
email="jon.doe@saleor.io",
)
@pytest.mark.e2e
def test_gift_card_added_via_add_promo_code_and_transaction_does_not_use_the_same_funds_twice(
api_client,
staff_api_client,
permission_manage_product_types_and_attributes,
permission_manage_gift_card,
permission_manage_orders,
shop_permissions,
):
permissions = [
permission_manage_product_types_and_attributes,
permission_manage_gift_card,
permission_manage_orders,
*shop_permissions,
]
assign_permissions(staff_api_client, permissions)
# Step 1 - create gift card
gift_card_initial_balance = 100
gift_card_data = create_gift_card(staff_api_client, gift_card_initial_balance)
# Step 2 - prepare channel and product
shop_data = _prepare_shop(staff_api_client)
channel_slug = shop_data[0]["slug"]
product_variant_id, variant_price = _prepare_product(staff_api_client, shop_data)
# Step 3 - create checkout
checkout_data = _prepare_checkout(
api_client,
[
{
"variantId": product_variant_id,
"quantity": 1,
},
],
channel_slug,
)
# Step 4 - add gift card to checkout with addPromoCode
checkout_add_promo_code(api_client, checkout_data["id"], gift_card_data["code"])
# Step 5 - add gift card to checkout via transaction
transaction_initialize_for_gift_card_payment_gateway(
api_client,
checkout_data["id"],
gift_card_data["code"],
variant_price,
)
# Step 6 - complete checkout
order_data = checkout_complete(api_client, checkout_data["id"])
# Step 7 - check gift card funds
gift_card_data = get_gift_card(staff_api_client, gift_card_data["id"])
# Two times variant price because:
# - gift card attached to checkout with checkoutAddPromoCode
# - gift card attached to checkout with transaction
assert gift_card_data["currentBalance"]["amount"] == gift_card_initial_balance - (
2 * variant_price
)
# Step 8 - check order payment status and gift card transaction
order_data = order_query(staff_api_client, order_data["id"])
assert order_data["chargeStatus"] == OrderChargeStatusEnum.OVERCHARGED.name
assert len(order_data["transactions"]) == 1
assert order_data["transactions"][0]["authorizedAmount"]["amount"] == 0
assert order_data["transactions"][0]["chargedAmount"]["amount"] == 10
@pytest.mark.e2e
def test_gift_card_detach_gift_card_from_checkout_as_gift_card_transactions_are_about_to_get_charged(
api_client,
staff_api_client,
permission_manage_product_types_and_attributes,
permission_manage_gift_card,
permission_manage_orders,
shop_permissions,
):
permissions = [
permission_manage_product_types_and_attributes,
permission_manage_gift_card,
permission_manage_orders,
*shop_permissions,
]
assign_permissions(staff_api_client, permissions)
# Step 1 - create gift card
gift_card_initial_balance = 100
gift_card_data = create_gift_card(staff_api_client, gift_card_initial_balance)
# Step 2 - prepare channel and product
shop_data = _prepare_shop(staff_api_client)
channel_slug = shop_data[0]["slug"]
product_variant_id, variant_price = _prepare_product(staff_api_client, shop_data)
# Step 3 - create checkout
checkout_data = _prepare_checkout(
api_client,
[
{
"variantId": product_variant_id,
"quantity": 1,
},
],
channel_slug,
)
# Step 4 - add gift card to checkout via transaction
transaction_initialize_for_gift_card_payment_gateway(
api_client,
checkout_data["id"],
gift_card_data["code"],
variant_price,
)
# Step 5 - complete checkout
def attach_gift_card_to_another_checkout(*args, **kwargs):
another_checkout_data = _prepare_checkout(
api_client,
[],
channel_slug,
)
transaction_initialize_for_gift_card_payment_gateway(
api_client,
another_checkout_data["id"],
gift_card_data["code"],
1,
)
with race_condition.RunBefore(
# At this point gift card will not get detached because when
# charge_gift_card_transactions is executed the transaction is no longer
# attached to a checkout (but to an order)
"saleor.giftcard.gateway.charge_gift_card_transactions",
attach_gift_card_to_another_checkout,
):
order_data = checkout_complete(api_client, checkout_data["id"])
# Step 6 - check gift card funds
gift_card_data = get_gift_card(staff_api_client, gift_card_data["id"])
assert (
gift_card_data["currentBalance"]["amount"]
== gift_card_initial_balance - variant_price
)
# Step 7 - check order payment status and gift card transaction
order_data = order_query(staff_api_client, order_data["id"])
assert order_data["chargeStatus"] == OrderChargeStatusEnum.FULL.name
assert len(order_data["transactions"]) == 1
assert order_data["transactions"][0]["authorizedAmount"]["amount"] == 0
assert order_data["transactions"][0]["chargedAmount"]["amount"] == variant_price
@pytest.mark.e2e
def test_gift_card_detach_gift_card_from_checkout_as_checkout_gets_completed(
api_client,
staff_api_client,
permission_manage_product_types_and_attributes,
permission_manage_gift_card,
permission_manage_orders,
shop_permissions,
):
permissions = [
permission_manage_product_types_and_attributes,
permission_manage_gift_card,
permission_manage_orders,
*shop_permissions,
]
assign_permissions(staff_api_client, permissions)
# Step 1 - create gift card
gift_card_initial_balance = 100
gift_card_data = create_gift_card(staff_api_client, gift_card_initial_balance)
# Step 2 - prepare channel and product
shop_data = _prepare_shop(staff_api_client)
channel_slug = shop_data[0]["slug"]
product_variant_id, variant_price = _prepare_product(staff_api_client, shop_data)
# Step 3 - create checkout
checkout_data = _prepare_checkout(
api_client,
[
{
"variantId": product_variant_id,
"quantity": 1,
},
],
channel_slug,
)
# Step 4 - add gift card to checkout via transaction
transaction_initialize_for_gift_card_payment_gateway(
api_client,
checkout_data["id"],
gift_card_data["code"],
variant_price,
)
# Step 5 - complete checkout
def attach_gift_card_to_another_checkout(*args, **kwargs):
another_checkout_data = _prepare_checkout(
api_client,
[],
channel_slug,
)
transaction_initialize_for_gift_card_payment_gateway(
api_client,
another_checkout_data["id"],
gift_card_data["code"],
1,
)
with race_condition.RunBefore(
# Complete checkout logic can get started because checkout's authorization
# status is set to full at that time. Just before order gets created and transactions are charged the
# gift card is detached from the checkout.
"saleor.checkout.complete_checkout.create_order_from_checkout",
attach_gift_card_to_another_checkout,
):
order_data = checkout_complete(api_client, checkout_data["id"])
# Step 6 - check gift card funds
gift_card_data = get_gift_card(staff_api_client, gift_card_data["id"])
assert gift_card_data["currentBalance"]["amount"] == gift_card_initial_balance
# Step 7 - check order payment status and gift card transaction
order_data = order_query(staff_api_client, order_data["id"])
assert order_data["chargeStatus"] == OrderChargeStatusEnum.NONE.name
assert len(order_data["transactions"]) == 1
assert order_data["transactions"][0]["authorizedAmount"]["amount"] == 0
assert order_data["transactions"][0]["chargedAmount"]["amount"] == 0
@pytest.mark.e2e
def test_gift_card_simultaneous_complete_checkout_of_two_checkouts_using_gift_card_differently(
api_client,
staff_api_client,
permission_manage_product_types_and_attributes,
permission_manage_gift_card,
permission_manage_orders,
shop_permissions,
):
permissions = [
permission_manage_product_types_and_attributes,
permission_manage_gift_card,
permission_manage_orders,
*shop_permissions,
]
assign_permissions(staff_api_client, permissions)
# Step 1 - create gift card
gift_card_initial_balance = 100
gift_card_data = create_gift_card(staff_api_client, gift_card_initial_balance)
# Step 2 - prepare channel and product
shop_data = _prepare_shop(staff_api_client)
channel_slug = shop_data[0]["slug"]
product_variant_id, variant_price = _prepare_product(staff_api_client, shop_data)
# Step 3 - create checkout
checkout_data = _prepare_checkout(
api_client,
[
{
"variantId": product_variant_id,
"quantity": 1,
},
],
channel_slug,
)
# Step 4 - add gift card to checkout with addPromoCode
checkout_add_promo_code(api_client, checkout_data["id"], gift_card_data["code"])
# Step 5 - create another checkout
another_checkout_data = _prepare_checkout(
api_client,
[
{
"variantId": product_variant_id,
"quantity": 1,
},
],
channel_slug,
)
# Step 6 - add gift card to another checkout via transaction
transaction_initialize_for_gift_card_payment_gateway(
api_client,
another_checkout_data["id"],
gift_card_data["code"],
variant_price,
)
# Step 6 - complete checkout
order_data = None
def do_complete_checkout(*args, **kwargs):
nonlocal order_data
order_data = checkout_complete(api_client, checkout_data["id"])
with race_condition.RunBefore(
"saleor.giftcard.gateway.charge_gift_card_transactions",
do_complete_checkout,
):
another_order_data = checkout_complete(api_client, another_checkout_data["id"])
# Step 6 - check gift card funds
gift_card_data = get_gift_card(staff_api_client, gift_card_data["id"])
assert gift_card_data["currentBalance"]["amount"] == gift_card_initial_balance - (
2 * variant_price
)
# Step 7 - check order payment status and gift card transaction
order_data = order_query(staff_api_client, order_data["id"])
assert order_data["chargeStatus"] == OrderChargeStatusEnum.FULL.name
assert len(order_data["transactions"]) == 0
another_order_data = order_query(staff_api_client, another_order_data["id"])
assert another_order_data["chargeStatus"] == OrderChargeStatusEnum.FULL.name
assert len(another_order_data["transactions"]) == 1
assert another_order_data["transactions"][0]["authorizedAmount"]["amount"] == 0
assert (
another_order_data["transactions"][0]["chargedAmount"]["amount"]
== variant_price
)
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/tests/e2e/gift_cards/test_gift_cards.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 344,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/tests/e2e/gift_cards/utils/gift_card_query.py | from ...utils import get_graphql_content
GIFT_CARD_QUERY = """
query GiftCard($id: ID!) {
giftCard(id: $id) {
id
code
initialBalance {
amount
currency
}
currentBalance {
amount
currency
}
}
}
"""
def get_gift_card(
staff_api_client,
gift_card_id,
):
variables = {
"id": gift_card_id,
}
response = staff_api_client.post_graphql(
GIFT_CARD_QUERY,
variables,
)
content = get_graphql_content(response)
return content["data"]["giftCard"]
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/tests/e2e/gift_cards/utils/gift_card_query.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 30,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/graphql/checkout/dataloaders/checkout_delivery.py | from itertools import chain
from uuid import UUID
from promise import Promise
from ....checkout.delivery_context import get_or_fetch_checkout_deliveries
from ....checkout.models import CheckoutDelivery
from ....core.db.connection import allow_writer_in_context
from ...core.dataloaders import DataLoader
from ...utils import get_user_or_app_from_context
from .checkout_infos import CheckoutInfoByCheckoutTokenLoader
class CheckoutDeliveryByIdLoader(DataLoader[UUID, CheckoutDelivery]):
context_key = "checkout_delivery_by_id"
def batch_load(self, keys):
shipping_methods = CheckoutDelivery.objects.using(
self.database_connection_name
).in_bulk(keys)
return [shipping_methods.get(key) for key in keys]
class CheckoutDeliveriesOnlyValidByCheckoutIdAndWebhookSyncLoader(
DataLoader[tuple[UUID, bool], list[CheckoutDelivery]]
):
context_key = (
"checkout_deliveries_only_valid_by_checkout_id_and_webhook_sync_loader"
)
def batch_load(self, keys):
requestor = get_user_or_app_from_context(self.context)
checkout_infos = CheckoutInfoByCheckoutTokenLoader(self.context).load_many(
[checkout_id for (checkout_id, _) in keys]
)
def refresh_delivery_dataloader(
deliveries: list[list[CheckoutDelivery]],
) -> list[list[CheckoutDelivery]]:
for delivery in chain.from_iterable(deliveries):
CheckoutDeliveryByIdLoader(self.context).clear(delivery.id)
CheckoutDeliveryByIdLoader(self.context).prime(delivery.id, delivery)
return deliveries
def with_checkout_infos(checkout_infos):
results = []
with allow_writer_in_context(self.context):
for checkout_info, (_, allow_sync_webhooks) in zip(
checkout_infos, keys, strict=True
):
results.append(
get_or_fetch_checkout_deliveries(
checkout_info=checkout_info,
requestor=requestor,
allow_sync_webhooks=allow_sync_webhooks,
)
)
return Promise.all(results).then(refresh_delivery_dataloader)
return checkout_infos.then(with_checkout_infos)
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/checkout/dataloaders/checkout_delivery.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 49,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
saleor/saleor:saleor/graphql/core/tests/garbage_collection/test_errors.py | import gc
import json
from unittest import mock
import pytest
from django.core.serializers.json import DjangoJSONEncoder
from django.test import override_settings
from graphql.error import GraphQLError
from ....api import backend, schema
from ....tests.utils import get_graphql_content
from ....views import GraphQLView
from .utils import (
clean_up_after_garbage_collection_test,
disable_gc_for_garbage_collection_test,
)
def raise_graphql_error(*args, **kwargs):
raise GraphQLError("Exception in resolver")
PRODUCTS_QUERY_PERMISSION_REQUIRED = """
query FetchProducts($channel: String!){
products(first: 10, channel: $channel) {
edges {
node {
id
name
channelListings{
id
}
}
}
}
}
"""
# Group all tests that require garbage collection so that they do not run concurrently.
# This is necessary to ensure that tests don't interfere with each other.
# Without grouping we could receive false positive results.
@pytest.mark.xdist_group(name="garbage_collection")
def test_permission_error(rf, product, channel_USD):
try:
# given
# Disable automatic garbage collection and set debugging flag.
disable_gc_for_garbage_collection_test()
# Prepare request body with GraphQL query and variables.
variables = {"channel": channel_USD.slug}
data = {"query": PRODUCTS_QUERY_PERMISSION_REQUIRED, "variables": variables}
data = json.dumps(data, cls=DjangoJSONEncoder)
# when
# Execute the query with permission error.
content = get_graphql_content(
GraphQLView(backend=backend, schema=schema).handle_query(
rf.post(path="/graphql/", data=data, content_type="application/json")
),
ignore_errors=True,
)
# Enforce garbage collection to populate the garbage list for inspection.
gc.collect()
# then
# Ensure that the garbage list is empty. The garbage list is only valid
# until the next collection cycle so we can only make assertions about it
# before re-enabling automatic collection.
assert gc.garbage == []
# Ensure that the query returned the expected error.
assert content["data"]["products"]["edges"][0]["node"]["name"] == product.name
assert (
content["errors"][0]["extensions"]["exception"]["code"]
== "PermissionDenied"
)
# Restore garbage collection settings to their original state. This should always be run to avoid interfering
# with other tests to ensure that code should be executed in the `finally' block.
finally:
clean_up_after_garbage_collection_test()
PRODUCTS_QUERY = """
query FetchProducts($first: Int, $channel: String!){
products(first: $first, channel: $channel) {
edges {
node {
id
name
variants {
id
}
}
}
}
}
"""
# Group all tests that require garbage collection so that they do not run concurrently.
# This is necessary to ensure that tests don't interfere with each other.
# Without grouping we could receive false positive results.
@pytest.mark.xdist_group(name="garbage_collection")
@override_settings(GRAPHQL_QUERY_MAX_COMPLEXITY=5)
def test_query_cost_error(rf, product, channel_USD):
try:
# given
# Disable automatic garbage collection and set debugging flag.
disable_gc_for_garbage_collection_test()
# Prepare request body with GraphQL query and variables.
variables = {"channel": channel_USD.slug, "first": 10}
data = {"query": PRODUCTS_QUERY, "variables": variables}
data = json.dumps(data, cls=DjangoJSONEncoder)
# when
# Execute the query with cost error.
content = get_graphql_content(
GraphQLView(backend=backend, schema=schema).handle_query(
rf.post(path="/graphql/", data=data, content_type="application/json")
),
ignore_errors=True,
)
# Enforce garbage collection to populate the garbage list for inspection.
gc.collect()
# then
# Ensure that the garbage list is empty. The garbage list is only valid
# until the next collection cycle so we can only make assertions about it
# before re-enabling automatic collection.
assert gc.garbage == []
# Ensure that the query returned the expected error.
assert (
content["errors"][0]["extensions"]["exception"]["code"] == "QueryCostError"
)
# Restore garbage collection settings to their original state. This should always be run to avoid interfering
# with other tests to ensure that code should be executed in the `finally' block.
finally:
clean_up_after_garbage_collection_test()
# Group all tests that require garbage collection so that they do not run concurrently.
# This is necessary to ensure that tests don't interfere with each other.
# Without grouping we could receive false positive results.
@pytest.mark.xdist_group(name="garbage_collection")
def test_exception_in_resolver(rf, product, channel_USD):
try:
# given
# Disable automatic garbage collection and set debugging flag.
disable_gc_for_garbage_collection_test()
# Prepare request body with GraphQL query and variables.
variables = {"channel": channel_USD.slug, "first": 10}
data = {"query": PRODUCTS_QUERY, "variables": variables}
data = json.dumps(data, cls=DjangoJSONEncoder)
# when
# Execute the query with GraphQLError in the resolver.
with mock.patch(
"saleor.graphql.product.schema.resolve_products",
side_effect=raise_graphql_error,
) as _mocked_resolver:
content = get_graphql_content(
GraphQLView(backend=backend, schema=schema).handle_query(
rf.post(
path="/graphql/", data=data, content_type="application/json"
)
),
ignore_errors=True,
)
# Enforce garbage collection to populate the garbage list for inspection.
gc.collect()
# then
# Ensure that the garbage list is empty. The garbage list is only valid
# until the next collection cycle so we can only make assertions about it
# before re-enabling automatic collection.
assert gc.garbage == []
# Ensure that the query returned the expected error.
assert content["errors"][0]["extensions"]["exception"]["code"] == "GraphQLError"
# Restore garbage collection settings to their original state. This should always be run to avoid interfering
# with other tests to ensure that code should be executed in the `finally' block.
finally:
clean_up_after_garbage_collection_test()
# Group all tests that require garbage collection so that they do not run concurrently.
# This is necessary to ensure that tests don't interfere with each other.
# Without grouping we could receive false positive results.
@pytest.mark.xdist_group(name="garbage_collection")
def test_exception_in_dataloader(rf, product, channel_USD):
try:
# given
# Disable automatic garbage collection and set debugging flag.
disable_gc_for_garbage_collection_test()
# Prepare request body with GraphQL query and variables.
variables = {"channel": channel_USD.slug, "first": 10}
data = {"query": PRODUCTS_QUERY, "variables": variables}
data = json.dumps(data, cls=DjangoJSONEncoder)
# when
# Execute the query with GraphQLError in the dataloader.
with mock.patch(
"saleor.graphql.product.schema.ChannelBySlugLoader.batch_load",
side_effect=raise_graphql_error,
) as _mocked_dataloader:
content = get_graphql_content(
GraphQLView(backend=backend, schema=schema).handle_query(
rf.post(
path="/graphql/", data=data, content_type="application/json"
)
),
ignore_errors=True,
)
# Enforce garbage collection to populate the garbage list for inspection.
gc.collect()
# then
# Ensure that the garbage list is empty. The garbage list is only valid
# until the next collection cycle so we can only make assertions about it
# before re-enabling automatic collection.
assert gc.garbage == []
# Ensure that the query returned the expected error.
assert content["errors"][0]["extensions"]["exception"]["code"] == "GraphQLError"
# Restore garbage collection settings to their original state. This should always be run to avoid interfering
# with other tests to ensure that code should be executed in the `finally' block.
finally:
clean_up_after_garbage_collection_test()
STOCK_UPDATE = """
mutation ProductVariantStocksUpdate($variantId: ID!, $stocks: [StockInput!]!){
productVariantStocksUpdate(variantId: $variantId, stocks: $stocks){
productVariant {
quantityAvailable
stocks {
id
}
}
errors {
code
}
}
}
"""
# Group all tests that require garbage collection so that they do not run concurrently.
# This is necessary to ensure that tests don't interfere with each other.
# Without grouping we could receive false positive results.
@pytest.mark.xdist_group(name="garbage_collection")
def test_input_validation_error(rf, product, channel_USD):
try:
# given
# Disable automatic garbage collection and set debugging flag.
disable_gc_for_garbage_collection_test()
# Prepare request body with GraphQL query and variables.
variables = {
"variantId": "",
"stocks": [{"warehouse": "", "quantity": 99999999999}],
}
data = {"query": STOCK_UPDATE, "variables": variables}
data = json.dumps(data, cls=DjangoJSONEncoder)
# when
# Execute the mutation with input validation error.
content = get_graphql_content(
GraphQLView(backend=backend, schema=schema).handle_query(
rf.post(path="/graphql/", data=data, content_type="application/json")
),
ignore_errors=True,
)
# Enforce garbage collection to populate the garbage list for inspection.
gc.collect()
# then
# Ensure that the garbage list is empty. The garbage list is only valid
# until the next collection cycle so we can only make assertions about it
# before re-enabling automatic collection.
assert gc.garbage == []
# Ensure that the query returned the expected error.
assert content["errors"][0]["extensions"]["exception"]["code"] == "GraphQLError"
# Restore garbage collection settings to their original state. This should always be run to avoid interfering
# with other tests to ensure that code should be executed in the `finally' block.
finally:
clean_up_after_garbage_collection_test()
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/core/tests/garbage_collection/test_errors.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 253,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/graphql/error.py | from django.core.exceptions import ValidationError
from graphql.error import GraphQLError
from pydantic import ValidationError as PydanticValidationError
def pydantic_to_validation_error(
exc: PydanticValidationError, default_error_code: str = "invalid"
) -> ValidationError:
"""Convert Pydantic ValidationError to Django ValidationError.
Each pydantic error can carry a per-error code by raising
PydanticCustomError with ``{"error_code": ...}`` in its context dict.
Falls back to ``"required"`` for missing-field errors, and to
``default_error_code`` for all other built-in constraint errors.
"""
errors: dict[str, ValidationError] = {}
for error in exc.errors():
field = str(error["loc"][0]) if error["loc"] else "input"
if error.get("type") == "missing":
code = "required"
else:
code = error.get("ctx", {}).get("error_code", default_error_code)
errors[field] = ValidationError(
error["msg"],
code=code,
)
return ValidationError(errors)
def clear_traceback_locals(tb):
"""Clear all locals from all connected frames.
Similar to `clear_frames()` method of the `traceback` module but clears locals from all frames in tracebacks.
"""
frame = tb.tb_frame
while frame:
try:
frame.clear()
except RuntimeError:
# Ignore the exception raised if the frame is still executing.
break
frame = frame.f_back
def clear_exception_locals(exception):
# Clear locals from the exception.
if exception.__traceback__:
clear_traceback_locals(exception.__traceback__)
# Clear exception stored as the `__cause__` of processed exception.
if exception.__cause__:
clear_exception_locals(exception.__cause__)
# Clear exception stored as the `original_error` of processed exception.
# This is used in `GraphQLError` from `GraphQL-Core-Legacy`.
if isinstance(exception, GraphQLError) and hasattr(exception, "original_error"):
clear_exception_locals(exception.original_error)
def clear_errors(errors):
"""Clear locals from exceptions in errors.
We can't to use the `clear_frames()` method of the `traceback` module. This function uses `tb_next` attribute
to clear all locals from all frames in tracebacks. Errors generated by `GraphQL-Core-Legacy` don't have
properly connected tracebacks, so we need to clear locals that go through frames.
"""
for error in errors:
clear_exception_locals(error)
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/error.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 55,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
saleor/saleor:saleor/graphql/graphql_core.py | from graphql.execution import executor
from graphql.execution.base import ExecutionResult
original_complete_value_catching_error = executor.complete_value_catching_error
def _patched_complete_value_catching_error(*args, **kwargs):
info = args[3]
from saleor.core.db.connection import allow_writer_in_context
with allow_writer_in_context(info.context):
return original_complete_value_catching_error(*args, **kwargs)
def patch_executor():
"""Patch `complete_value_catching_error` function to allow writer DB in mutations.
The `complete_value_catching_error` function is called when resolving a field in
GraphQL. This patch wraps each call with `allow_writer_in_context` context manager.When a ValidationError is raised, the execution context does not store the error.
This allows to use writer DB in resolvers, when they are called via mutation, while
they will still raise or log error when a resolver is run in a query.
"""
executor.complete_value_catching_error = _patched_complete_value_catching_error
def __del_execution_context__(self):
# When a `ValidationError` is raised, the execution context does not store the error.
if hasattr(self, "errors"):
del self.errors
def patch_execution_context():
"""Patch `__del__` method of `ExecutionContext` to delete `errors` attribute.
The `errors` attribute is used to store errors that occurred during the execution
of the query. This patch ensures that the attribute is deleted when the execution
context is deleted. This is to avoid reference cycles, as the attribute can hold
references to objects that are no longer needed.
"""
executor.ExecutionContext.__del__ = __del_execution_context__ # type: ignore[attr-defined]
def __del_execution_result__(self):
del self.errors
def patch_execution_result():
"""Patch `__del__` method of `ExecutionResult` to delete `errors` attribute.
The `errors` attribute is used to store errors that occurred during the execution
of the query. This patch ensures that the attribute is deleted when the execution
result is deleted. This is to avoid reference cycles, as the attribute can hold
references to objects that are no longer needed.
"""
ExecutionResult.__del__ = __del_execution_result__ # type: ignore[attr-defined]
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/graphql_core.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 38,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
saleor/saleor:saleor/channel/tasks/saleor3_22.py | from ....celeryconf import app
from ....core.db.connection import allow_writer
from ...models import Channel
@app.task
@allow_writer()
def set_automatic_completion_delay_task():
Channel.objects.filter(
automatically_complete_fully_paid_checkouts=True,
automatic_completion_delay__isnull=True,
).update(automatic_completion_delay=0)
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/channel/tasks/saleor3_22.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 10,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
saleor/saleor:saleor/asgi/asgi_handler.py | """Code copied from Django Software Foundation (https://djangoproject.com/) which is licensed under the BSD 3-Clause.
Original code: https://github.com/django/django/blob/001c2f546b4053acb04f16d6b704f7b4fbca1c45/django/core/handlers/asgi.py
Modifications: we added a fix for a memory leak
(https://code.djangoproject.com/ticket/36700).
Copyright (c) Django Software Foundation and individual contributors.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of Django nor the names of its contributors may be used
to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import asyncio
import django
from asgiref.sync import sync_to_async
from django.core import signals
from django.core.exceptions import RequestAborted
from django.core.handlers.asgi import ASGIHandler, get_script_prefix
from django.urls import set_script_prefix
def get_asgi_application():
django.setup(set_prefix=False)
return PatchedASGIHandler()
class PatchedASGIHandler(ASGIHandler):
async def handle(self, scope, receive, send):
"""
Handles the ASGI request. Called via the __call__ method.
""" # noqa: D200, D212, D401
# Receive the HTTP request body as a stream object.
try:
body_file = await self.read_body(receive)
except RequestAborted:
return
# Request is complete and can be served.
set_script_prefix(get_script_prefix(scope))
await signals.request_started.asend(sender=self.__class__, scope=scope)
# Get the request and check for basic issues.
request, error_response = self.create_request(scope, body_file)
if request is None:
body_file.close()
await self.send_response(error_response, send) # type: ignore[arg-type]
await sync_to_async(error_response.close)() # type: ignore[union-attr]
return
async def process_request(request, send):
response = await self.run_get_response(request)
try:
await self.send_response(response, send)
except asyncio.CancelledError:
# Client disconnected during send_response (ignore exception).
pass
return response
# Try to catch a disconnect while getting response.
tasks = [
# Check the status of these tasks and (optionally) terminate them
# in this order. The listen_for_disconnect() task goes first
# because it should not raise unexpected errors that would prevent
# us from cancelling process_request().
asyncio.create_task(self.listen_for_disconnect(receive)),
asyncio.create_task(process_request(request, send)),
]
await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED)
# Now wait on both tasks (they may have both finished by now).
for task in tasks:
if task.done():
try:
task.result()
except RequestAborted:
# Ignore client disconnects.
pass
except AssertionError:
body_file.close()
raise
else:
# Allow views to handle cancellation.
task.cancel()
try:
await task
except asyncio.CancelledError:
# Task re-raised the CancelledError as expected.
pass
try:
response = tasks[1].result()
except asyncio.CancelledError:
await signals.request_finished.asend(sender=self.__class__)
else:
await sync_to_async(response.close)()
# https://code.djangoproject.com/ticket/36700
# Tasks need to be cleared to prevent cycles is memory. Task `self.listen_for_disconnect(receive)` always
# ends with `RequestAborted()` when connection are closed.
# Request aborted exception holds reference to frame witch `tasks` as local variable. If tasks are not cleared,
# reference cycle is created: Task -> RequestAbortedException -> Traceback -> Frame -> Locals -> Task.
tasks.clear()
body_file.close()
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/asgi/asgi_handler.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 107,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
saleor/saleor:saleor/graphql/checkout/tests/test_checkout_discount_expiration.py | from datetime import timedelta
from unittest import mock
import graphene
import pytest
from django.utils import timezone
from freezegun import freeze_time
from ....discount.utils.checkout import (
create_or_update_discount_objects_from_promotion_for_checkout,
)
from ...tests.utils import get_graphql_content
QUERY_CHECKOUT_BASIC_FIELDS = """
query getCheckout($id: ID) {
checkout(id: $id) {
id
token
email
}
}
"""
QUERY_CHECKOUT_LINES_ONLY_ID = """
query getCheckout($id: ID) {
checkout(id: $id) {
token
lines {
id
isGift
}
}
}
"""
QUERY_CHECKOUT_LINES_WITH_PRICES = """
query getCheckout($id: ID) {
checkout(id: $id) {
token
lines {
id
isGift
quantity
variant {
id
}
unitPrice {
gross {
amount
}
}
totalPrice {
currency
gross {
amount
}
}
undiscountedUnitPrice {
amount
currency
}
undiscountedTotalPrice {
amount
currency
}
}
}
}
"""
def test_checkout_basic_fields_no_recalculation(
user_api_client, checkout_with_item_and_gift_promotion, gift_promotion_rule, product
):
"""Test that querying only basic checkout fields performs NO recalculation.
This test ensures that when querying only basic fields (id, token, email)
that don't involve lines or pricing, the system will:
1. NOT recalculate discounts (keep discount_expiration unchanged)
2. NOT recalculate taxes (keep price_expiration unchanged)
3. Return data without triggering any expensive calculations
"""
# given
checkout = checkout_with_item_and_gift_promotion
query = QUERY_CHECKOUT_BASIC_FIELDS
initial_price_expiration = timezone.now() - timedelta(minutes=5)
initial_discount_expiration = timezone.now() - timedelta(minutes=5)
checkout.price_expiration = initial_price_expiration
checkout.discount_expiration = initial_discount_expiration
checkout.save(update_fields=["price_expiration", "discount_expiration"])
variables = {"id": graphene.Node.to_global_id("Checkout", checkout.pk)}
# Change gift promotion to simulate that data is stale
new_gift_variant = product.variants.last()
gift_promotion_rule.gifts.set([new_gift_variant])
gift_promotion_rule.order_predicate = {"discountedObjectPredicate": {}}
gift_promotion_rule.save(update_fields=["order_predicate"])
# when
response = user_api_client.post_graphql(query, variables)
content = get_graphql_content(response)
data = content["data"]["checkout"]
# then
assert data["id"] == graphene.Node.to_global_id("Checkout", checkout.pk)
assert data["token"] == str(checkout.token)
assert data["email"] == checkout.email
checkout.refresh_from_db()
# Verify that NO recalculation happened - both expirations should remain unchanged
assert checkout.discount_expiration == initial_discount_expiration
assert checkout.price_expiration == initial_price_expiration
def test_checkout_gift_promotion_changed_only_line_id(
user_api_client, checkout_with_item_and_gift_promotion, gift_promotion_rule, product
):
"""Test that querying only line.id triggers discount recalculation but NOT tax recalculation.
This test ensures that when a gift promotion changes and we only query line IDs,
the system will:
1. Recalculate discounts (update discount_expiration)
2. Update gift lines to reflect the new promotion
3. NOT recalculate taxes (keep price_expiration unchanged)
This prevents unnecessary expensive tax calculations when only IDs are needed.
"""
# given
checkout = checkout_with_item_and_gift_promotion
query = QUERY_CHECKOUT_LINES_ONLY_ID
initial_price_expiration = timezone.now() - timedelta(minutes=5)
checkout.price_expiration = initial_price_expiration
checkout.discount_expiration = initial_price_expiration
checkout.save(update_fields=["price_expiration", "discount_expiration"])
variables = {"id": graphene.Node.to_global_id("Checkout", checkout.pk)}
lines_count = checkout.lines.count()
# Get initial gift line
initial_gift_line = checkout.lines.filter(is_gift=True).first()
initial_gift_line_variant = initial_gift_line.variant
assert initial_gift_line is not None
# Change gift promotion to a different variant
new_gift_variant = product.variants.last()
gift_promotion_rule.gifts.set([new_gift_variant])
gift_promotion_rule.order_predicate = {"discountedObjectPredicate": {}}
gift_promotion_rule.save(update_fields=["order_predicate"])
# Set promotion end date
end_date = timezone.now() + timedelta(minutes=10)
gift_promotion_rule.promotion.end_date = end_date
gift_promotion_rule.promotion.save(update_fields=["end_date"])
# when
response = user_api_client.post_graphql(query, variables)
content = get_graphql_content(response)
data = content["data"]["checkout"]
# then
assert data["token"] == str(checkout.token)
assert len(data["lines"]) == lines_count
# Verify the gift line was updated to the new variant
gift_line = checkout.lines.filter(is_gift=True).first()
assert gift_line.variant != initial_gift_line_variant
assert gift_line.variant == new_gift_variant
# Verify the response contains the updated gift line
gift_lines = [line for line in data["lines"] if line["isGift"] is True]
assert len(gift_lines) == 1
gift_line_data = gift_lines[0]
assert gift_line_data["id"] == graphene.Node.to_global_id(
"CheckoutLine", gift_line.pk
)
# Verify discount_expiration was updated (discount recalculation happened)
# Should be set to promotion end_date as it's sooner than now + checkout ttl
checkout.refresh_from_db()
assert checkout.discount_expiration == end_date
# Verify price_expiration was NOT updated (tax recalculation did NOT happen)
assert checkout.price_expiration == initial_price_expiration
@mock.patch(
"saleor.checkout.calculations.create_or_update_discount_objects_from_promotion_for_checkout",
wraps=create_or_update_discount_objects_from_promotion_for_checkout,
)
def test_checkout_gift_promotion_changed_with_line_prices(
mocked_discount_creation,
user_api_client,
checkout_with_item_and_gift_promotion,
gift_promotion_rule,
product,
):
"""Test that querying line prices triggers both discount and tax recalculation.
This test ensures that when a gift promotion changes and we query line prices,
the system will:
1. Recalculate discounts (update discount_expiration)
2. Update gift lines to reflect the new promotion
3. Recalculate taxes (update price_expiration)
4. Return correct pricing for gift lines (unit price = 0, undiscounted price > 0)
This ensures accurate pricing information when price fields are requested.
"""
# given
checkout = checkout_with_item_and_gift_promotion
query = QUERY_CHECKOUT_LINES_WITH_PRICES
initial_price_expiration = timezone.now() - timedelta(minutes=5)
checkout.price_expiration = initial_price_expiration
checkout.discount_expiration = initial_price_expiration
checkout.save(update_fields=["price_expiration", "discount_expiration"])
variables = {"id": graphene.Node.to_global_id("Checkout", checkout.pk)}
lines_count = checkout.lines.count()
# Get initial gift line
initial_gift_line = checkout.lines.filter(is_gift=True).first()
initial_gift_line_variant = initial_gift_line.variant
assert initial_gift_line is not None
# Change gift promotion to a different variant
new_gift_variant = product.variants.last()
gift_promotion_rule.gifts.set([new_gift_variant])
gift_promotion_rule.order_predicate = {"discountedObjectPredicate": {}}
gift_promotion_rule.save(update_fields=["order_predicate"])
# Set promotion end date
end_date = timezone.now() + timedelta(minutes=10)
gift_promotion_rule.promotion.end_date = end_date
gift_promotion_rule.promotion.save(update_fields=["end_date"])
# when
response = user_api_client.post_graphql(query, variables)
content = get_graphql_content(response)
data = content["data"]["checkout"]
# then
assert data["token"] == str(checkout.token)
assert len(data["lines"]) == lines_count
# Verify the gift line was updated to the new variant
gift_line = checkout.lines.filter(is_gift=True).first()
assert gift_line.variant != initial_gift_line_variant
assert gift_line.variant == new_gift_variant
# Verify the response contains the updated gift line
gift_lines = [line for line in data["lines"] if line["isGift"] is True]
assert len(gift_lines) == 1
gift_line_data = gift_lines[0]
assert gift_line_data["id"] == graphene.Node.to_global_id(
"CheckoutLine", gift_line.pk
)
# Verify gift line has correct pricing
assert gift_line_data["unitPrice"]["gross"]["amount"] == 0
assert gift_line_data["totalPrice"]["gross"]["amount"] == 0
# undiscountedUnitPrice should have the original price of the variant
assert gift_line_data["undiscountedUnitPrice"]["amount"] > 0
# Verify both expirations were updated (full recalculation happened)
# Both price expiration should be the same
checkout.refresh_from_db()
assert checkout.price_expiration > timezone.now()
assert checkout.discount_expiration == checkout.price_expiration
# Ensure that discount recalculation called only once
mocked_discount_creation.assert_called_once()
def test_checkout_gift_promotion_removed_only_line_id(
user_api_client, checkout_with_item_and_gift_promotion, gift_promotion_rule
):
"""Test that querying only line.id triggers discount recalculation when gift promotion is removed.
This test ensures that when a gift promotion is removed and we only query line IDs,
the system will:
1. Recalculate discounts (update discount_expiration)
2. Remove gift lines that are no longer valid
3. NOT recalculate taxes (keep price_expiration unchanged)
This prevents unnecessary expensive tax calculations when only IDs are needed.
"""
# given
checkout = checkout_with_item_and_gift_promotion
query = QUERY_CHECKOUT_LINES_ONLY_ID
initial_price_expiration = timezone.now() - timedelta(minutes=5)
checkout.price_expiration = initial_price_expiration
checkout.discount_expiration = initial_price_expiration
checkout.save(update_fields=["price_expiration", "discount_expiration"])
variables = {"id": graphene.Node.to_global_id("Checkout", checkout.pk)}
initial_lines_count = checkout.lines.count()
initial_gift_line = checkout.lines.filter(is_gift=True).first()
assert initial_gift_line is not None
# Remove the gift promotion
gift_promotion_rule.delete()
# when
response = user_api_client.post_graphql(query, variables)
content = get_graphql_content(response)
data = content["data"]["checkout"]
# then
assert data["token"] == str(checkout.token)
# Gift line should be removed
assert len(data["lines"]) == initial_lines_count - 1
# Verify no gift lines remain
gift_lines = [line for line in data["lines"] if line["isGift"] is True]
assert len(gift_lines) == 0
# Verify gift line was removed from database
gift_line_exists = checkout.lines.filter(is_gift=True).exists()
assert not gift_line_exists
# Verify discount_expiration was updated (discount recalculation happened)
checkout.refresh_from_db()
assert checkout.discount_expiration > timezone.now()
# Verify price_expiration was NOT updated (tax recalculation did NOT happen)
assert checkout.price_expiration == initial_price_expiration
@mock.patch(
"saleor.checkout.calculations.create_or_update_discount_objects_from_promotion_for_checkout",
wraps=create_or_update_discount_objects_from_promotion_for_checkout,
)
def test_checkout_gift_promotion_removed_with_line_prices(
mocked_discount_creation,
user_api_client,
checkout_with_item_and_gift_promotion,
gift_promotion_rule,
):
"""Test that querying line prices triggers both discount and tax recalculation when gift promotion is removed.
This test ensures that when a gift promotion is removed and we query line prices,
the system will:
1. Recalculate discounts (update discount_expiration)
2. Remove gift lines that are no longer valid
3. Recalculate taxes (update price_expiration)
This ensures accurate pricing information when price fields are requested.
"""
# given
checkout = checkout_with_item_and_gift_promotion
query = QUERY_CHECKOUT_LINES_WITH_PRICES
initial_price_expiration = timezone.now() - timedelta(minutes=5)
checkout.price_expiration = initial_price_expiration
checkout.discount_expiration = initial_price_expiration
checkout.save(update_fields=["price_expiration", "discount_expiration"])
variables = {"id": graphene.Node.to_global_id("Checkout", checkout.pk)}
initial_lines_count = checkout.lines.count()
initial_gift_line = checkout.lines.filter(is_gift=True).first()
assert initial_gift_line is not None
# Remove the gift promotion
gift_promotion_rule.delete()
# when
response = user_api_client.post_graphql(query, variables)
content = get_graphql_content(response)
data = content["data"]["checkout"]
# then
assert data["token"] == str(checkout.token)
# Gift line should be removed
assert len(data["lines"]) == initial_lines_count - 1
# Verify no gift lines remain
gift_lines = [line for line in data["lines"] if line["isGift"] is True]
assert len(gift_lines) == 0
# Verify gift line was removed from database
gift_line_exists = checkout.lines.filter(is_gift=True).exists()
assert not gift_line_exists
# Verify both expirations were updated (full recalculation happened)
checkout.refresh_from_db()
assert checkout.discount_expiration > timezone.now()
assert checkout.price_expiration > timezone.now()
# Ensure that discount recalculation called only once
mocked_discount_creation.assert_called_once()
def test_checkout_gift_promotion_added_only_line_id(
user_api_client, checkout_with_item, gift_promotion_rule, product
):
"""Test that querying only line.id triggers discount recalculation when gift promotion is added.
This test ensures that when a gift promotion is added to a checkout without gift lines
and we only query line IDs, the system will:
1. Recalculate discounts (update discount_expiration)
2. Add new gift lines based on the promotion
3. NOT recalculate taxes (keep price_expiration unchanged)
This prevents unnecessary expensive tax calculations when only IDs are needed.
"""
# given
checkout = checkout_with_item
query = QUERY_CHECKOUT_LINES_ONLY_ID
initial_price_expiration = timezone.now() - timedelta(minutes=5)
checkout.price_expiration = initial_price_expiration
checkout.discount_expiration = initial_price_expiration
checkout.save(update_fields=["price_expiration", "discount_expiration"])
variables = {"id": graphene.Node.to_global_id("Checkout", checkout.pk)}
initial_lines_count = checkout.lines.count()
# Verify no gift lines initially exist
initial_gift_line = checkout.lines.filter(is_gift=True).first()
assert initial_gift_line is None
# Add gift to the promotion rule
gift_variant = product.variants.first()
gift_promotion_rule.gifts.set([gift_variant])
gift_promotion_rule.order_predicate = {"discountedObjectPredicate": {}}
gift_promotion_rule.save(update_fields=["order_predicate"])
# Set promotion end date
end_date = timezone.now() + timedelta(days=20)
gift_promotion_rule.promotion.end_date = end_date
gift_promotion_rule.promotion.save(update_fields=["end_date"])
# when
response = user_api_client.post_graphql(query, variables)
content = get_graphql_content(response)
data = content["data"]["checkout"]
# then
assert data["token"] == str(checkout.token)
# Gift line should be added
assert len(data["lines"]) == initial_lines_count + 1
# Verify gift line was added
gift_lines = [line for line in data["lines"] if line["isGift"] is True]
assert len(gift_lines) == 1
# Verify gift line exists in database
gift_line = checkout.lines.filter(is_gift=True).first()
assert gift_line is not None
assert gift_line.variant == gift_variant
# Verify the response contains the added gift line
gift_line_data = gift_lines[0]
assert gift_line_data["id"] == graphene.Node.to_global_id(
"CheckoutLine", gift_line.pk
)
# Verify discount_expiration was updated (discount recalculation happened)
# Ensure it's not set to promotion end date as it's later than now + checkout ttl
checkout.refresh_from_db()
assert checkout.discount_expiration > timezone.now() < end_date
# Verify price_expiration was NOT updated (tax recalculation did NOT happen)
assert checkout.price_expiration == initial_price_expiration
@mock.patch(
"saleor.checkout.calculations.create_or_update_discount_objects_from_promotion_for_checkout",
wraps=create_or_update_discount_objects_from_promotion_for_checkout,
)
def test_checkout_gift_promotion_added_with_line_prices(
mocked_discount_creation,
user_api_client,
checkout_with_item,
gift_promotion_rule,
product,
):
"""Test that querying line prices triggers both discount and tax recalculation when gift promotion is added.
This test ensures that when a gift promotion is added to a checkout without gift lines
and we query line prices, the system will:
1. Recalculate discounts (update discount_expiration)
2. Add new gift lines based on the promotion
3. Recalculate taxes (update price_expiration)
4. Return correct pricing for gift lines (unit price = 0, undiscounted price > 0)
This ensures accurate pricing information when price fields are requested.
"""
# given
checkout = checkout_with_item
query = QUERY_CHECKOUT_LINES_WITH_PRICES
initial_price_expiration = timezone.now() - timedelta(minutes=5)
checkout.price_expiration = initial_price_expiration
checkout.discount_expiration = initial_price_expiration
checkout.save(update_fields=["price_expiration", "discount_expiration"])
variables = {"id": graphene.Node.to_global_id("Checkout", checkout.pk)}
initial_lines_count = checkout.lines.count()
# Verify no gift lines initially exist
initial_gift_line = checkout.lines.filter(is_gift=True).first()
assert initial_gift_line is None
# Add gift to the promotion rule
gift_variant = product.variants.first()
gift_promotion_rule.gifts.set([gift_variant])
gift_promotion_rule.order_predicate = {"discountedObjectPredicate": {}}
gift_promotion_rule.save(update_fields=["order_predicate"])
# when
response = user_api_client.post_graphql(query, variables)
content = get_graphql_content(response)
data = content["data"]["checkout"]
# then
assert data["token"] == str(checkout.token)
# Gift line should be added
assert len(data["lines"]) == initial_lines_count + 1
# Verify gift line was added
gift_lines = [line for line in data["lines"] if line["isGift"] is True]
assert len(gift_lines) == 1
# Verify gift line exists in database
gift_line = checkout.lines.filter(is_gift=True).first()
assert gift_line is not None
assert gift_line.variant == gift_variant
# Verify the response contains the added gift line
gift_line_data = gift_lines[0]
assert gift_line_data["id"] == graphene.Node.to_global_id(
"CheckoutLine", gift_line.pk
)
# Verify gift line has correct pricing
assert gift_line_data["unitPrice"]["gross"]["amount"] == 0
assert gift_line_data["totalPrice"]["gross"]["amount"] == 0
# undiscountedUnitPrice should have the original price of the variant
assert gift_line_data["undiscountedUnitPrice"]["amount"] > 0
# Verify both expirations were updated (full recalculation happened)
checkout.refresh_from_db()
assert checkout.discount_expiration > timezone.now()
assert checkout.price_expiration > timezone.now()
# Ensure that discount recalculation called only once
mocked_discount_creation.assert_called_once()
@pytest.mark.parametrize(
("price_expiration", "discount_expiration"),
[
(
timezone.now() + timedelta(minutes=20),
timezone.now() - timedelta(minutes=20),
),
(
timezone.now() + timedelta(minutes=30),
timezone.now() + timedelta(minutes=30),
),
],
)
@freeze_time("2025-11-12 12:00:00")
def test_checkout_lines_with_prices_price_expiration_in_future_no_recalculation(
product,
price_expiration,
discount_expiration,
user_api_client,
checkout_with_item_and_gift_promotion,
gift_promotion_rule,
):
"""Test that querying line prices does NOT trigger recalculation if price_expiration is in the future.
This test ensures that when price_expiration and discount_expiration are set to a future time,
querying line prices will:
1. NOT recalculate discounts (keep discount_expiration unchanged)
2. NOT recalculate taxes (keep price_expiration unchanged)
3. Return correct pricing for gift lines
"""
# given
checkout = checkout_with_item_and_gift_promotion
query = QUERY_CHECKOUT_LINES_WITH_PRICES
checkout.price_expiration = price_expiration
checkout.discount_expiration = discount_expiration
checkout.save(update_fields=["price_expiration", "discount_expiration"])
variables = {"id": graphene.Node.to_global_id("Checkout", checkout.pk)}
lines_count = checkout.lines.count()
# Get initial gift line
initial_gift_line = checkout.lines.filter(is_gift=True).first()
initial_gift_line_variant = initial_gift_line.variant
assert initial_gift_line is not None
# Change gift promotion to a different variant
new_gift_variant = product.variants.last()
gift_promotion_rule.gifts.set([new_gift_variant])
gift_promotion_rule.order_predicate = {"discountedObjectPredicate": {}}
gift_promotion_rule.save(update_fields=["order_predicate"])
# Set promotion end date
end_date = timezone.now() + timedelta(minutes=20)
gift_promotion_rule.promotion.end_date = end_date
gift_promotion_rule.promotion.save(update_fields=["end_date"])
# when
response = user_api_client.post_graphql(query, variables)
content = get_graphql_content(response)
data = content["data"]["checkout"]
# then
assert data["token"] == str(checkout.token)
assert len(data["lines"]) == lines_count
# Verify NO recalculation happened - both expirations should remain unchanged
checkout.refresh_from_db()
assert checkout.discount_expiration == discount_expiration
assert checkout.price_expiration == price_expiration
# Gift line should NOT be updated to the new variant
assert len(checkout.lines.filter(is_gift=True)) == 1
gift_line = checkout.lines.filter(is_gift=True).first()
assert gift_line.variant == initial_gift_line_variant
# Verify the response contains the original gift line
gift_lines = [line for line in data["lines"] if line["isGift"] is True]
assert len(gift_lines) == 1
gift_line_data = gift_lines[0]
assert gift_line_data["id"] == graphene.Node.to_global_id(
"CheckoutLine", gift_line.pk
)
# Verify gift line has correct pricing
assert gift_line_data["unitPrice"]["gross"]["amount"] == 0
assert gift_line_data["totalPrice"]["gross"]["amount"] == 0
assert gift_line_data["undiscountedUnitPrice"]["amount"] > 0
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/checkout/tests/test_checkout_discount_expiration.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 525,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/tests/e2e/checkout/test_checkout_with_gift_promotion_updates.py | """E2E test for gift promotion with discount price expiration.
This test verifies the complete flow of gift promotions with dynamic updates:
1. Create checkout with a product line
2. Create a gift promotion (order promotion with gift reward)
3. Update shipping - gift line should be added
4. Update gift promotion - change the gift variant
5. Update shipping again - gift line should update with new variant and prices
6. Complete checkout successfully
"""
import pytest
from ..product.utils.preparing_product import prepare_product
from ..promotions.utils.promotion_create import create_promotion
from ..promotions.utils.promotion_rule_create import create_promotion_rule
from ..promotions.utils.promotion_rule_update import update_promotion_rule
from ..shop.utils.preparing_shop import prepare_default_shop
from ..utils import assign_permissions
from .utils import (
checkout_complete,
checkout_create,
checkout_delivery_method_update,
checkout_dummy_payment_create,
checkout_lines_add,
)
def get_checkout_with_lines(api_client, checkout_id):
"""Query checkout with detailed line information including gift lines."""
query = """
query Checkout($checkoutId: ID!){
checkout(id: $checkoutId){
id
totalPrice{
gross{
amount
}
}
lines {
id
isGift
quantity
variant {
id
name
}
unitPrice {
gross {
amount
}
}
totalPrice {
gross {
amount
}
}
undiscountedUnitPrice {
amount
}
}
}
}
"""
variables = {"checkoutId": checkout_id}
response = api_client.post_graphql(query, variables)
content = response.json()
assert "errors" not in content, f"GraphQL errors: {content.get('errors')}"
return content["data"]["checkout"]
@pytest.mark.e2e
def test_checkout_gift_promotion_update_flow(
e2e_staff_api_client,
e2e_not_logged_api_client,
permission_manage_product_types_and_attributes,
permission_manage_orders,
permission_manage_checkouts,
permission_manage_discounts,
shop_permissions,
):
"""Test gift promotion update flow in checkout.
Checks:
- Gift line is added when promotion is active
- Gift line updates when the promotion's gift variant changes
- Discount recalculation reflects updated gift variant and prices
- Checkout completes successfully with correct gift line in order
"""
# Setup permissions
permissions = [
*shop_permissions,
permission_manage_product_types_and_attributes,
permission_manage_orders,
permission_manage_checkouts,
permission_manage_discounts,
]
assign_permissions(e2e_staff_api_client, permissions)
# Prepare shop data
shop_data = prepare_default_shop(e2e_staff_api_client)
channel_id = shop_data["channel"]["id"]
channel_slug = shop_data["channel"]["slug"]
warehouse_id = shop_data["warehouse"]["id"]
shipping_method_id = shop_data["shipping_method"]["id"]
# Prepare regular product (to be purchased)
variant_price = 50
(
_product_id,
product_variant_id,
_product_variant_price,
) = prepare_product(
e2e_staff_api_client,
warehouse_id,
channel_id,
variant_price,
product_type_slug="regular-product",
)
# Prepare another product (to be purchased)
variant_price = 25
(
_product_id_2,
product_variant_id_2,
_product_variant_price_2,
) = prepare_product(
e2e_staff_api_client,
warehouse_id,
channel_id,
variant_price,
product_type_slug="new-product",
)
# Prepare two gift products (different variants for testing updates)
gift1_price = 20
(
gift1_product_id,
gift1_variant_id,
_gift1_price,
) = prepare_product(
e2e_staff_api_client,
warehouse_id,
channel_id,
gift1_price,
product_type_slug="gift-product-1",
)
gift2_price = 30
(
gift2_product_id,
gift2_variant_id,
_gift2_price,
) = prepare_product(
e2e_staff_api_client,
warehouse_id,
channel_id,
gift2_price,
product_type_slug="gift-product-2",
)
# Step 1 - Create checkout with regular product
lines = [
{"variantId": product_variant_id, "quantity": 1},
]
checkout_data = checkout_create(
e2e_not_logged_api_client,
lines,
channel_slug,
email="testuser@example.com",
)
checkout_id = checkout_data["id"]
assert checkout_data["isShippingRequired"] is True
# Verify only one line (no gift yet)
checkout_lines = get_checkout_with_lines(e2e_not_logged_api_client, checkout_id)
assert len(checkout_lines["lines"]) == 1
assert checkout_lines["lines"][0]["isGift"] is False
# Step 2 - Create gift promotion (order promotion with gift reward)
promotion_name = "Gift Promotion Test"
promotion = create_promotion(
e2e_staff_api_client,
promotion_name,
promotion_type="ORDER",
)
promotion_id = promotion["id"]
# Create promotion rule with gift reward
order_predicate = {
"discountedObjectPredicate": {"baseSubtotalPrice": {"range": {"gte": 10}}}
}
promotion_rule_input = {
"promotion": promotion_id,
"channels": [channel_id],
"name": "Gift Rule 1",
"rewardType": "GIFT",
"gifts": [gift1_variant_id],
"orderPredicate": order_predicate,
}
promotion_rule = create_promotion_rule(
e2e_staff_api_client,
promotion_rule_input,
)
promotion_rule_id = promotion_rule["id"]
# Step 3 - Update shipping method - this should trigger discount recalculation
# and add the gift line
checkout_data = checkout_delivery_method_update(
e2e_not_logged_api_client,
checkout_id,
shipping_method_id,
)
assert checkout_data["deliveryMethod"]["id"] == shipping_method_id
# Verify gift line was added
checkout_lines = get_checkout_with_lines(e2e_not_logged_api_client, checkout_id)
assert len(checkout_lines["lines"]) == 2
# Find gift line
gift_lines = [line for line in checkout_lines["lines"] if line["isGift"]]
regular_lines = [line for line in checkout_lines["lines"] if not line["isGift"]]
assert len(gift_lines) == 1
assert len(regular_lines) == 1
# Verify gift line details
gift_line = gift_lines[0]
assert gift_line["variant"]["id"] == gift1_variant_id
assert gift_line["unitPrice"]["gross"]["amount"] == 0 # Gift should be free
assert gift_line["totalPrice"]["gross"]["amount"] == 0
assert gift_line["undiscountedUnitPrice"]["amount"] == gift1_price
# Step 4 - Update gift promotion - change to different gift variant
promotion_rule_update_input = {
"addGifts": [gift2_variant_id],
"removeGifts": [gift1_variant_id],
}
update_promotion_rule(
e2e_staff_api_client,
promotion_rule_id,
promotion_rule_update_input,
)
# Step 6 - Fetch checkout after promotion update.
# The gift line should still be the old variant (gift1), as checkout prices
# do not expired.
checkout_lines = get_checkout_with_lines(e2e_not_logged_api_client, checkout_id)
assert len(checkout_lines["lines"]) == 2
gift_lines = [line for line in checkout_lines["lines"] if line["isGift"]]
assert len(gift_lines) == 1
# Gift line should still be the original variant
gift_line = gift_lines[0]
assert gift_line["variant"]["id"] == gift1_variant_id
assert gift_line["unitPrice"]["gross"]["amount"] == 0
assert gift_line["totalPrice"]["gross"]["amount"] == 0
assert gift_line["undiscountedUnitPrice"]["amount"] == gift1_price
# Step 6 - Add new checkout line - this should trigger discount recalculation
# and update the gift line to new variant
checkout_data = checkout_lines_add(
e2e_not_logged_api_client,
checkout_id,
[{"variantId": product_variant_id_2, "quantity": 1}],
)
# Verify gift line was updated (new variant, new line ID)
checkout_lines = get_checkout_with_lines(e2e_not_logged_api_client, checkout_id)
assert len(checkout_lines["lines"]) == 3
gift_lines = [line for line in checkout_lines["lines"] if line["isGift"]]
assert len(gift_lines) == 1
# Verify the gift line changed
updated_gift_line = gift_lines[0]
# Verify the gift line changed to the new variant
assert updated_gift_line["variant"]["id"] == gift2_variant_id
# Verify new variant details
assert updated_gift_line["variant"]["id"] == gift2_variant_id
assert updated_gift_line["unitPrice"]["gross"]["amount"] == 0 # Still free
assert updated_gift_line["totalPrice"]["gross"]["amount"] == 0
assert updated_gift_line["undiscountedUnitPrice"]["amount"] == gift2_price
# Find and validate regular line for product_variant_id
regular_line_1 = next(
line
for line in checkout_lines["lines"]
if not line["isGift"] and line["variant"]["id"] == product_variant_id
)
assert regular_line_1["unitPrice"]["gross"]["amount"] == 50
# Find and validate regular line for product_variant_id_2
regular_line_2 = next(
line
for line in checkout_lines["lines"]
if not line["isGift"] and line["variant"]["id"] == product_variant_id_2
)
assert regular_line_2["unitPrice"]["gross"]["amount"] == 25
# Step 7 - Complete checkout
total_gross_amount = checkout_data["totalPrice"]["gross"]["amount"]
# Create payment (only for regular product, gift is free)
checkout_dummy_payment_create(
e2e_not_logged_api_client,
checkout_id,
total_gross_amount,
)
# Complete checkout
order_data = checkout_complete(
e2e_not_logged_api_client,
checkout_id,
)
# Verify order was created successfully
assert order_data["status"] == "UNFULFILLED"
assert order_data["total"]["gross"]["amount"] == total_gross_amount
# Verify order has both lines (regular + gift)
order_lines = order_data["lines"]
assert len(order_lines) == 3
# Verify gift line in order
order_gift_lines = [line for line in order_lines if line["isGift"]]
assert len(order_gift_lines) == 1
order_gift_line = order_gift_lines[0]
assert order_gift_line["variant"]["id"] == gift2_variant_id
assert order_gift_line["unitPrice"]["gross"]["amount"] == 0
assert order_gift_line["totalPrice"]["gross"]["amount"] == 0
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/tests/e2e/checkout/test_checkout_with_gift_promotion_updates.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 294,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/core/sqs.py | import uuid
from datetime import datetime
from typing import Any
from django.utils import timezone
from kombu.asynchronous.aws.sqs.message import AsyncMessage
from kombu.transport.SQS import Channel as SqsChannel
from kombu.transport.SQS import Transport as SqsTransport
from kombu.utils.json import dumps
class Channel(SqsChannel):
def _put(self, queue: str, message: dict[str, Any], **kwargs):
"""Put message onto queue."""
q_url = self._new_queue(queue)
kwargs = {"QueueUrl": q_url}
if "properties" in message:
if "message_attributes" in message["properties"]:
# we don't want to want to have the attribute in the body
kwargs["MessageAttributes"] = message["properties"].pop(
"message_attributes"
)
# Mitigation of https://github.com/celery/kombu/issues/2400
# Allows passing MessageGroupId for non-FIFO queues
if "MessageGroupId" in message["properties"]:
kwargs["MessageGroupId"] = message["properties"]["MessageGroupId"]
if queue.endswith(".fifo"):
if "MessageGroupId" not in kwargs:
kwargs["MessageGroupId"] = "default"
if "MessageDeduplicationId" in message["properties"]:
kwargs["MessageDeduplicationId"] = message["properties"][
"MessageDeduplicationId"
]
else:
kwargs["MessageDeduplicationId"] = str(uuid.uuid4())
elif headers := message.get("headers"):
if eta := headers.get("eta"):
datetime_eta = datetime.fromisoformat(eta)
delay_in_seconds = max(
0, int((datetime_eta - timezone.now()).total_seconds())
)
# 900 is max delay for SQS
kwargs["DelaySeconds"] = min(delay_in_seconds, 900)
if self.sqs_base64_encoding:
body = AsyncMessage().encode(dumps(message))
else:
body = dumps(message)
kwargs["MessageBody"] = body
c = self.sqs(queue=self.canonical_queue_name(queue))
if message.get("redelivered"):
c.change_message_visibility(
QueueUrl=q_url,
ReceiptHandle=message["properties"]["delivery_tag"],
VisibilityTimeout=self.wait_time_seconds,
)
else:
c.send_message(**kwargs)
class Transport(SqsTransport):
Channel = Channel
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/core/sqs.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 56,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
saleor/saleor:saleor/core/context.py | from promise import Promise
def with_promise_context(func):
"""Execute function within Promise context.
Allow to use dataloaders inside the function.
"""
def wrapper(*args, **kwargs):
def promise_executor(_):
return func(*args, **kwargs)
# Create promise chain
promise = Promise.resolve(None).then(promise_executor)
return promise.get()
return wrapper
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/core/context.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 12,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
saleor/saleor:saleor/graphql/attribute/mutations/utils.py | from django.db.models import Exists, OuterRef
from ....attribute import models
from ....page import models as page_models
from ....product import models as product_models
def get_product_ids_to_search_index_update_for_attribute_values(
values: list[models.AttributeValue],
) -> list[int]:
"""Get product IDs that need search index updates when attribute values are changed.
Finds all products that has the given attribute values assigned or their variants
have the given attribute values assigned.
"""
if not values:
return []
assigned_variant_values = models.AssignedVariantAttributeValue.objects.filter(
value_id__in=[v.id for v in values]
)
assigned_attributes = models.AssignedVariantAttribute.objects.filter(
Exists(assigned_variant_values.filter(assignment_id=OuterRef("id")))
)
variants = product_models.ProductVariant.objects.filter(
Exists(assigned_attributes.filter(variant_id=OuterRef("id")))
)
assigned_product_values = models.AssignedProductAttributeValue.objects.filter(
value_id__in=[v.id for v in values]
)
product_ids = product_models.Product.objects.filter(
Exists(assigned_product_values.filter(product_id=OuterRef("id")))
| Exists(variants.filter(product_id=OuterRef("id")))
).values_list("id", flat=True)
return list(product_ids)
def get_page_ids_to_search_index_update_for_attribute_values(
values: list[models.AttributeValue],
) -> list[int]:
"""Get page IDs that need search index updates when attribute values are changed.
Finds all pages that has the given attribute values assigned.
"""
if not values:
return []
assigned_values = models.AssignedPageAttributeValue.objects.filter(
value_id__in=[v.id for v in values]
)
page_ids = page_models.Page.objects.filter(
Exists(assigned_values.filter(page_id=OuterRef("id")))
).values_list("id", flat=True)
return list(page_ids)
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/attribute/mutations/utils.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 45,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
saleor/saleor:saleor/graphql/page/tests/queries/test_pages_search.py | import graphene
import pytest
from .....attribute.utils import associate_attribute_values_to_instance
from .....page.models import Page
from .....page.search import update_pages_search_vector
from .....tests.utils import dummy_editorjs
from ....tests.utils import get_graphql_content
QUERY_PAGES_WITH_SEARCH = """
query ($search: String) {
pages(first: 5, search:$search) {
totalCount
edges {
node {
id
}
}
}
}
"""
@pytest.mark.parametrize(
("search", "count"),
[
("Author1", 1),
("about", 1),
("Author", 2),
],
)
def test_pages_query_with_search_by_title(
search, count, staff_api_client, permission_manage_pages, page_type
):
# given
query = QUERY_PAGES_WITH_SEARCH
pages = Page.objects.bulk_create(
[
Page(
title="Author1",
slug="slug_author_1",
content=dummy_editorjs("Content for page 1"),
page_type=page_type,
),
Page(
title="Author2",
slug="slug_author_2",
content=dummy_editorjs("Content for page 2"),
page_type=page_type,
),
Page(
title="About",
slug="slug_about",
content=dummy_editorjs("About test content"),
page_type=page_type,
),
]
)
update_pages_search_vector(pages)
variables = {"search": search}
staff_api_client.user.user_permissions.add(permission_manage_pages)
# when
response = staff_api_client.post_graphql(query, variables)
# then
content = get_graphql_content(response)
assert content["data"]["pages"]["totalCount"] == count
@pytest.mark.parametrize(
("search", "count"),
[
("slug_author_1", 1),
("1", 1),
("slug_author", 2),
],
)
def test_pages_query_with_search_by_slug(
search, count, staff_api_client, permission_manage_pages, page_type
):
# given
query = QUERY_PAGES_WITH_SEARCH
pages = Page.objects.bulk_create(
[
Page(
title="Author1",
slug="slug_author_1",
content=dummy_editorjs("Content for page 1"),
page_type=page_type,
),
Page(
title="Author2",
slug="slug_author_2",
content=dummy_editorjs("Content for page 2"),
page_type=page_type,
),
Page(
title="About",
slug="slug_about",
content=dummy_editorjs("About test content"),
page_type=page_type,
),
]
)
update_pages_search_vector(pages)
variables = {"search": search}
staff_api_client.user.user_permissions.add(permission_manage_pages)
# when
response = staff_api_client.post_graphql(query, variables)
# then
content = get_graphql_content(response)
assert content["data"]["pages"]["totalCount"] == count
@pytest.mark.parametrize(
("search", "count"),
[
("content", 3),
("Description", 1),
],
)
def test_pages_query_with_search_by_content(
search, count, staff_api_client, permission_manage_pages, page_type
):
# given
query = QUERY_PAGES_WITH_SEARCH
pages = Page.objects.bulk_create(
[
Page(
title="Author1",
slug="slug_author_1",
content=dummy_editorjs("Content for page 1. Description."),
page_type=page_type,
),
Page(
title="Author2",
slug="slug_author_2",
content=dummy_editorjs("Content for page 2"),
page_type=page_type,
),
Page(
title="About",
slug="slug_about",
content=dummy_editorjs("About test content"),
page_type=page_type,
),
]
)
update_pages_search_vector(pages)
variables = {"search": search}
staff_api_client.user.user_permissions.add(permission_manage_pages)
# when
response = staff_api_client.post_graphql(query, variables)
# then
content = get_graphql_content(response)
assert content["data"]["pages"]["totalCount"] == count
@pytest.mark.parametrize(
("search", "count"),
[
("page-type", 2),
("page type", 3),
("test-page-type", 1),
("Example", 2),
],
)
def test_pages_query_with_search_by_page_type(
search,
count,
staff_api_client,
permission_manage_pages,
page_type_list,
):
# given
query = QUERY_PAGES_WITH_SEARCH
pages = Page.objects.bulk_create(
[
Page(
title="Author1",
slug="slug_author_1",
content=dummy_editorjs("Content for page 1. Description."),
page_type=page_type_list[0],
),
Page(
title="Author2",
slug="slug_author_2",
content=dummy_editorjs("Content for page 2"),
page_type=page_type_list[1],
),
Page(
title="About",
slug="slug_about",
content=dummy_editorjs("About test content"),
page_type=page_type_list[2],
),
]
)
update_pages_search_vector(pages)
variables = {"search": search}
staff_api_client.user.user_permissions.add(permission_manage_pages)
# when
response = staff_api_client.post_graphql(query, variables)
# then
content = get_graphql_content(response)
assert content["data"]["pages"]["totalCount"] == count
@pytest.mark.parametrize(
("search", "count"),
[
("10", 2), # size value
("help", 1), # tag value
],
)
def test_pages_query_with_search_by_attributes(
search,
count,
staff_api_client,
permission_manage_pages,
page_type_list,
size_page_attribute,
tag_page_attribute,
):
# given
query = QUERY_PAGES_WITH_SEARCH
pages = Page.objects.bulk_create(
[
Page(
title="Author1",
slug="slug_author_1",
content=dummy_editorjs("Content for page 1. Description."),
page_type=page_type_list[0],
),
Page(
title="Author2",
slug="slug_author_2",
content=dummy_editorjs("Content for page 2"),
page_type=page_type_list[1],
),
Page(
title="About",
slug="slug_about",
content=dummy_editorjs("About test content"),
page_type=page_type_list[2],
),
]
)
for page_type in page_type_list:
page_type.page_attributes.add(size_page_attribute, tag_page_attribute)
page_1, _page_2, page_3 = pages
size_value = size_page_attribute.values.first()
tag_value = tag_page_attribute.values.last()
associate_attribute_values_to_instance(
page_1, {size_page_attribute.id: [size_value]}
)
associate_attribute_values_to_instance(
page_3,
{size_page_attribute.id: [size_value], tag_page_attribute.id: [tag_value]},
)
update_pages_search_vector(pages)
variables = {"search": search}
staff_api_client.user.user_permissions.add(permission_manage_pages)
# when
response = staff_api_client.post_graphql(query, variables)
# then
content = get_graphql_content(response)
assert content["data"]["pages"]["totalCount"] == count
QUERY_PAGES_WITH_FILTER = """
query ($filter: PageFilterInput) {
pages(first: 5, filter:$filter) {
totalCount
edges {
node {
id
}
}
}
}
"""
@pytest.mark.parametrize(
("page_filter", "count"),
[
({"search": "Author1"}, 1),
({"search": "about"}, 1),
({"search": "test"}, 3),
({"search": "slug"}, 3),
({"search": "Author"}, 2),
],
)
def test_pages_query_with_filter(
page_filter, count, staff_api_client, permission_manage_pages, page_type
):
# given
query = QUERY_PAGES_WITH_FILTER
pages = Page.objects.bulk_create(
[
Page(
title="Author1",
slug="slug_author_1",
content=dummy_editorjs("Content for page 1"),
page_type=page_type,
),
Page(
title="Author2",
slug="slug_author_2",
content=dummy_editorjs("Content for page 2"),
page_type=page_type,
),
Page(
title="About",
slug="slug_about",
content=dummy_editorjs("About test content"),
page_type=page_type,
),
]
)
update_pages_search_vector(pages)
variables = {"filter": page_filter}
staff_api_client.user.user_permissions.add(permission_manage_pages)
# when
response = staff_api_client.post_graphql(query, variables)
# then
content = get_graphql_content(response)
assert content["data"]["pages"]["totalCount"] == count
def test_pages_search_sorted_by_rank_exact_match_prioritized(
staff_api_client, permission_manage_pages, page_type
):
# given
pages = Page.objects.bulk_create(
[
Page(
title="Authoring Guide",
slug="authoring-guide",
content=dummy_editorjs("How to write content"),
page_type=page_type,
),
Page(
title="Author",
slug="author-page",
content=dummy_editorjs("About the author"),
page_type=page_type,
),
Page(
title="Contact",
slug="contact",
content=dummy_editorjs("Contact information"),
page_type=page_type,
),
Page(
title="Authority",
slug="authority",
content=dummy_editorjs("Authority overview"),
page_type=page_type,
),
]
)
update_pages_search_vector(pages)
variables = {
"filter": {"search": "author"},
}
staff_api_client.user.user_permissions.add(permission_manage_pages)
# when
response = staff_api_client.post_graphql(QUERY_PAGES_WITH_FILTER, variables)
# then
content = get_graphql_content(response)
data = content["data"]["pages"]
assert data["totalCount"] == 3
returned_ids = [edge["node"]["id"] for edge in data["edges"]]
expected_ids = [
graphene.Node.to_global_id("Page", page.pk)
for page in [pages[1], pages[0], pages[3]]
]
assert set(returned_ids) == set(expected_ids)
# Exact title match "Author" should rank highest
assert returned_ids[0] == expected_ids[0]
# Contact page should not appear
assert graphene.Node.to_global_id("Page", pages[2].pk) not in returned_ids
# Both prefix matches should appear
assert graphene.Node.to_global_id("Page", pages[0].pk) in returned_ids
assert graphene.Node.to_global_id("Page", pages[3].pk) in returned_ids
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/page/tests/queries/test_pages_search.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 368,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/page/search.py | from collections import defaultdict
from typing import NamedTuple
from django.conf import settings
from django.db import transaction
from django.db.models import Value
from ..attribute.models import Attribute, AttributeValue
from ..attribute.search import get_search_vectors_for_attribute_values
from ..core.context import with_promise_context
from ..core.db.connection import allow_writer
from ..core.postgres import FlatConcatSearchVector, NoValidationSearchVector
from ..core.utils.editorjs import clean_editor_js
from ..graphql.attribute.dataloaders.assigned_attributes import (
AttributesByPageIdAndLimitLoader,
AttributeValuesByPageIdAndAttributeIdAndLimitLoader,
)
from ..graphql.core.context import SaleorContext
from ..graphql.page.dataloaders import PageTypeByIdLoader
from .lock_objects import page_qs_select_for_update
from .models import Page, PageType
class AttributeValueData(NamedTuple):
values: list[AttributeValue]
attribute: Attribute | None
@allow_writer()
@with_promise_context
def update_pages_search_vector(
pages: list[Page],
page_id_to_title_map: dict[int, str] | None = None,
) -> None:
"""Update search vector for the given pages."""
page_type_map, page_id_to_values_map, page_id_to_title_map = _load_page_data(pages)
# Update search vector for each page
for page in pages:
page_type = page_type_map[page.page_type_id]
values_data = page_id_to_values_map[page.id]
page.search_vector = FlatConcatSearchVector(
*prepare_page_search_vector_value(
page, page_type, values_data, page_id_to_title_map
)
)
page.search_index_dirty = False
# Save updates
with transaction.atomic():
_locked_pages = (
page_qs_select_for_update()
.filter(id__in=[page.id for page in pages])
.values_list("id", flat=True)
)
Page.objects.bulk_update(pages, ["search_vector", "search_index_dirty"])
def _load_page_data(
pages: list[Page],
) -> tuple[dict[int, PageType], dict[int, list[AttributeValueData]], dict[int, str]]:
"""Load all required data for pages using dataloaders."""
context = SaleorContext()
page_ids = [page.id for page in pages]
# Load page types
page_types = (
PageTypeByIdLoader(context)
.load_many([page.page_type_id for page in pages])
.get()
)
page_type_map = {pt.id: pt for pt in page_types if pt}
# Load attributes and attribute values
page_id_to_values_map, page_id_to_title_map = _load_attribute_data(
context, page_ids
)
return page_type_map, page_id_to_values_map, page_id_to_title_map
def _load_attribute_data(
context: SaleorContext, page_ids: list[int]
) -> tuple[dict[int, list[AttributeValueData]], dict[int, str]]:
@with_promise_context
def load_all_data():
# Load attributes
attributes_promise = AttributesByPageIdAndLimitLoader(context).load_many(
[(page_id, settings.PAGE_MAX_INDEXED_ATTRIBUTES) for page_id in page_ids]
)
def with_attributes(attributes):
# Build attribute map and queries
attribute_map = {
attr.id: attr for page_attrs in attributes for attr in page_attrs
}
page_id_attribute_id_with_limit = [
(page_id, attribute.id, settings.PAGE_MAX_INDEXED_ATTRIBUTE_VALUES)
for page_id, attrs in zip(page_ids, attributes, strict=True)
for attribute in attrs[: settings.PAGE_MAX_INDEXED_ATTRIBUTES]
]
# Load attribute values
attribute_values_promise = (
AttributeValuesByPageIdAndAttributeIdAndLimitLoader(context).load_many(
page_id_attribute_id_with_limit
)
)
return attribute_values_promise.then(
lambda attribute_values: (
attribute_map,
page_id_attribute_id_with_limit,
attribute_values,
)
)
return attributes_promise.then(with_attributes)
# Execute within Promise context
attribute_map, page_id_attribute_id_with_limit, attribute_values = load_all_data()
# Build page to values mapping
page_id_to_values_map = defaultdict(list)
value_ids = []
for values, (page_id, attr_id, _) in zip(
attribute_values, page_id_attribute_id_with_limit, strict=True
):
attribute = attribute_map.get(attr_id)
page_id_to_values_map[page_id].append(AttributeValueData(values, attribute))
value_ids.extend([value.id for value in values])
page_id_to_title_map = _get_page_to_title_map(value_ids)
return page_id_to_values_map, page_id_to_title_map
def _get_page_to_title_map(value_ids: list[int]):
db_conn = settings.DATABASE_CONNECTION_REPLICA_NAME
value_to_page_id = (
AttributeValue.objects.using(db_conn)
.filter(id__in=value_ids, reference_page_id__isnull=False)
.values_list("id", "reference_page_id")
)
page_id_to_title_map = dict(
Page.objects.using(db_conn)
.filter(id__in=[page_id for _, page_id in value_to_page_id])
.values_list("id", "title")
)
return page_id_to_title_map
def prepare_page_search_vector_value(
page: Page,
page_type: PageType,
values_data: list[AttributeValueData],
page_id_to_title_map: dict[int, str],
) -> list[NoValidationSearchVector]:
"""Prepare the search vector value for a page."""
search_vectors = [
NoValidationSearchVector(Value(page.title), config="simple", weight="A"),
NoValidationSearchVector(Value(page.slug), config="simple", weight="A"),
]
if content := page.content:
search_vectors += [
NoValidationSearchVector(
Value(clean_editor_js(content, to_string=True)),
config="simple",
weight="A",
)
]
# add page type data
search_vectors.extend(
[
NoValidationSearchVector(
Value(page_type.name), config="simple", weight="B"
),
NoValidationSearchVector(
Value(page_type.slug), config="simple", weight="B"
),
]
)
search_vectors.extend(
generate_attributes_search_vector_value(
values_data, page_id_to_title_map=page_id_to_title_map
)
)
return search_vectors
def generate_attributes_search_vector_value(
values_data: list[AttributeValueData],
*,
page_id_to_title_map: dict[int, str] | None = None,
) -> list[NoValidationSearchVector]:
"""Prepare `search_vector` value for assigned attributes.
Method should receive assigned attributes with prefetched `values`
"""
search_vectors = []
for value_data in values_data:
attribute = value_data.attribute
values = value_data.values
if not values or not attribute:
continue
search_vectors += get_search_vectors_for_attribute_values(
attribute, values, page_id_to_title_map=page_id_to_title_map, weight="B"
)
return search_vectors
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/page/search.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 179,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
saleor/saleor:saleor/page/tasks.py | from django.conf import settings
from django.db import transaction
from ..celeryconf import app
from ..core.db.connection import allow_writer
from .lock_objects import page_qs_select_for_update
from .models import Page
from .search import update_pages_search_vector
# Results in update time ~1s, consumes ~25 MB
UPDATE_SEARCH_BATCH_SIZE = 200
@app.task
@allow_writer()
def mark_pages_search_vector_as_dirty(page_ids: list[int]):
"""Mark pages as needing search index updates."""
if not page_ids:
return
with transaction.atomic():
ids = page_qs_select_for_update().filter(pk__in=page_ids).values("id")
Page.objects.filter(id__in=ids).update(search_index_dirty=True)
@app.task(
queue=settings.UPDATE_SEARCH_VECTOR_INDEX_QUEUE_NAME,
expires=settings.BEAT_UPDATE_SEARCH_EXPIRE_AFTER_SEC,
)
def update_pages_search_vector_task():
pages = list(
Page.objects.using(settings.DATABASE_CONNECTION_REPLICA_NAME).filter(
search_index_dirty=True
)[:UPDATE_SEARCH_BATCH_SIZE]
)
if not pages:
return
update_pages_search_vector(pages)
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/page/tasks.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 31,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
saleor/saleor:saleor/page/tests/test_search.py | import pytest
from ...attribute.models import AttributeValue
from ...attribute.utils import associate_attribute_values_to_instance
from ..models import Page
from ..search import (
update_pages_search_vector,
)
def test_update_pages_search_vector_multiple_pages(page_list):
"""Test updating search vector for multiple pages."""
# given
assert all(page.search_index_dirty for page in page_list)
# when
update_pages_search_vector(page_list)
# then
for page in page_list:
page.refresh_from_db()
assert page.search_vector is not None
assert page.search_index_dirty is False
def test_update_pages_search_vector_empty_list(db):
"""Test updating search vector with empty page IDs list."""
# given
pages = []
# when/then - should not raise any errors
update_pages_search_vector(pages)
@pytest.fixture
def page_list_with_attributes(
page_type_list,
size_page_attribute,
tag_page_attribute,
page_type_product_reference_attribute,
product_list,
):
for page_type in page_type_list:
page_type.page_attributes.add(
page_type_product_reference_attribute,
size_page_attribute,
tag_page_attribute,
)
product_1, product_2, product_3 = product_list
attribute_values = AttributeValue.objects.bulk_create(
[
AttributeValue(
attribute=page_type_product_reference_attribute,
name=f"Product {product_1.pk}",
slug=f"product-{product_1.pk}",
reference_product=product_1,
),
AttributeValue(
attribute=page_type_product_reference_attribute,
name=f"Product {product_2.pk}",
slug=f"product-{product_2.pk}",
reference_product=product_2,
),
AttributeValue(
attribute=page_type_product_reference_attribute,
name=f"Product {product_3.pk}",
slug=f"product-{product_3.pk}",
reference_product=product_3,
),
]
)
page_list = list(Page.objects.all())
size_attribute_value = size_page_attribute.values.first()
tag_attribute_value = tag_page_attribute.values.first()
for i, page in enumerate(page_list):
associate_attribute_values_to_instance(
page,
{
page_type_product_reference_attribute.pk: [attribute_values[i]],
size_page_attribute.pk: [size_attribute_value],
tag_page_attribute.pk: [tag_attribute_value],
},
)
return page_list
def test_update_pages_search_vector_constant_queries(
page_list_with_attributes, django_assert_num_queries
):
"""Ensure that data loaders are working correctly and number of db queries is constant."""
# given
page_list = page_list_with_attributes
# when & then
# Expected query breakdown (10 total):
# 1. Load page types (1 query)
# 2. Load page data for select_for_update (1 query)
# 3. Load page-attribute relationships (1 query)
# 4. Load attributes (1 query)
# 5. Load attribute value assignments - batched (1 query)
# 6. Load attribute values - batched (1 query)
# 7. Load reference page titles (1 query)
# 8. Transaction savepoint (1 query)
# 9. Bulk update (1 query)
# 10. Release savepoint (1 query)
expected_queries = 10
with django_assert_num_queries(expected_queries): # Expected number of queries
update_pages_search_vector(page_list[: len(page_list) - 1])
with django_assert_num_queries(
expected_queries
): # Same number of queries for more pages
update_pages_search_vector(page_list)
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/page/tests/test_search.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 99,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/page/tests/test_tasks.py | from unittest.mock import patch
from ..models import Page
from ..tasks import mark_pages_search_vector_as_dirty, update_pages_search_vector_task
@patch("saleor.page.tasks.update_pages_search_vector")
def test_update_pages_search_vector(update_pages_search_vector_mock, page_list):
"""Test updating search vector with batch size of 1."""
# given
assert all(page.search_index_dirty for page in page_list)
# when
update_pages_search_vector_task()
# then
assert update_pages_search_vector_mock.called
@patch("saleor.page.tasks.update_pages_search_vector")
def test_update_pages_search_vector_nothing_to_update(
update_pages_search_vector_mock, page_list
):
"""Test updating search vector with batch size of 1."""
# given
Page.objects.all().update(search_index_dirty=False)
# when
update_pages_search_vector_task()
# then
assert not update_pages_search_vector_mock.called
def test_mark_pages_search_vector_as_dirty(page_list):
# given
page_ids = [page.id for page in page_list]
Page.objects.all().update(search_index_dirty=False)
# when
mark_pages_search_vector_as_dirty(page_ids)
# then
assert all(
Page.objects.filter(id__in=page_ids).values_list(
"search_index_dirty", flat=True
)
)
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/page/tests/test_tasks.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 35,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/page/tests/test_utils.py | from unittest.mock import patch
from ..models import Page
from ..utils import mark_pages_search_vector_as_dirty_in_batches
@patch("saleor.page.utils.MARK_SEARCH_VECTOR_DIRTY_BATCH_SIZE", 1)
def test_mark_pages_search_vector_as_dirty_in_batches(page_list):
# given
page_ids = [page.id for page in page_list]
Page.objects.all().update(search_index_dirty=False)
# when
mark_pages_search_vector_as_dirty_in_batches(page_ids)
# then
assert all(
Page.objects.filter(id__in=page_ids).values_list(
"search_index_dirty", flat=True
)
)
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/page/tests/test_utils.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 16,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/product/utils/search_helpers.py | from ..tasks import mark_products_search_vector_as_dirty
# Results in update time ~0.2s, consumes ~30 MB
MARK_SEARCH_VECTOR_DIRTY_BATCH_SIZE = 1000
def mark_products_search_vector_as_dirty_in_batches(product_ids: list[int]):
"""Mark products as needing search index updates."""
for i in range(0, len(product_ids), MARK_SEARCH_VECTOR_DIRTY_BATCH_SIZE):
batch_ids = product_ids[i : i + MARK_SEARCH_VECTOR_DIRTY_BATCH_SIZE]
mark_products_search_vector_as_dirty.delay(batch_ids)
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/product/utils/search_helpers.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 8,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
saleor/saleor:saleor/graphql/page/tests/queries/pages_with_where/test_with_where_references_collections.py | import graphene
import pytest
from ......attribute import AttributeEntityType, AttributeInputType, AttributeType
from ......attribute.models import Attribute, AttributeValue
from ......attribute.utils import associate_attribute_values_to_instance
from .....core.utils import to_global_id_or_none
from .....tests.utils import get_graphql_content
from .shared import QUERY_PAGES_WITH_WHERE
@pytest.mark.parametrize(
("filter_type", "expected_count"),
[("containsAny", 2), ("containsAll", 1)],
)
def test_pages_query_with_attr_slug_and_attribute_value_reference_to_collections(
filter_type,
expected_count,
staff_api_client,
page_list,
page_type,
product_type_collection_reference_attribute,
collection_list,
):
# given
page_type.page_attributes.add(product_type_collection_reference_attribute)
first_collection = collection_list[0]
second_collection = collection_list[1]
attribute_value_1, attribute_value_2 = AttributeValue.objects.bulk_create(
[
AttributeValue(
attribute=product_type_collection_reference_attribute,
name=f"Collection {first_collection.pk}",
slug=f"collection-{first_collection.pk}",
reference_collection=first_collection,
),
AttributeValue(
attribute=product_type_collection_reference_attribute,
name=f"Collection {second_collection.pk}",
slug=f"collection-{second_collection.pk}",
reference_collection=second_collection,
),
]
)
page_with_both_references = page_list[0]
associate_attribute_values_to_instance(
page_with_both_references,
{
product_type_collection_reference_attribute.pk: [
attribute_value_1,
attribute_value_2,
]
},
)
page_with_single_reference = page_list[1]
associate_attribute_values_to_instance(
page_with_single_reference,
{product_type_collection_reference_attribute.pk: [attribute_value_2]},
)
variables = {
"where": {
"attributes": [
{
"slug": "collection-reference",
"value": {
"reference": {
"collectionSlugs": {
filter_type: [
first_collection.slug,
second_collection.slug,
]
}
}
},
}
]
}
}
# when
response = staff_api_client.post_graphql(
QUERY_PAGES_WITH_WHERE,
variables,
)
# then
content = get_graphql_content(response)
pages_nodes = content["data"]["pages"]["edges"]
assert len(pages_nodes) == expected_count
assert pages_nodes[0]["node"]["id"] == graphene.Node.to_global_id(
"Page", page_list[0].pk
)
@pytest.mark.parametrize(
("filter_type", "expected_count"),
[("containsAny", 2), ("containsAll", 1)],
)
def test_pages_query_with_attribute_value_reference_to_collection(
filter_type,
expected_count,
staff_api_client,
page_list,
page_type,
product_type_collection_reference_attribute,
collection_list,
):
# given
second_collection_reference_attribute = Attribute.objects.create(
slug="second-collection-reference",
name="collection reference",
type=AttributeType.PRODUCT_TYPE,
input_type=AttributeInputType.REFERENCE,
entity_type=AttributeEntityType.COLLECTION,
)
page_type.page_attributes.add(product_type_collection_reference_attribute)
page_type.page_attributes.add(second_collection_reference_attribute)
first_collection = collection_list[0]
second_collection = collection_list[1]
attribute_value_1, attribute_value_2 = AttributeValue.objects.bulk_create(
[
AttributeValue(
attribute=product_type_collection_reference_attribute,
name=f"Collection {first_collection.pk}",
slug=f"collection-{first_collection.pk}",
reference_collection=first_collection,
),
AttributeValue(
attribute=second_collection_reference_attribute,
name=f"Collection {second_collection.pk}",
slug=f"collection-{second_collection.pk}",
reference_collection=second_collection,
),
]
)
page_with_both_references = page_list[0]
associate_attribute_values_to_instance(
page_with_both_references,
{
product_type_collection_reference_attribute.pk: [
attribute_value_1,
],
second_collection_reference_attribute.pk: [attribute_value_2],
},
)
page_with_single_reference = page_list[1]
associate_attribute_values_to_instance(
page_with_single_reference,
{second_collection_reference_attribute.pk: [attribute_value_2]},
)
variables = {
"where": {
"attributes": [
{
"value": {
"reference": {
"collectionSlugs": {
filter_type: [
first_collection.slug,
second_collection.slug,
]
}
}
},
}
]
}
}
# when
response = staff_api_client.post_graphql(
QUERY_PAGES_WITH_WHERE,
variables,
)
# then
content = get_graphql_content(response)
pages_nodes = content["data"]["pages"]["edges"]
assert len(pages_nodes) == expected_count
assert pages_nodes[0]["node"]["id"] == graphene.Node.to_global_id(
"Page", page_list[0].pk
)
@pytest.mark.parametrize(
("filter_type", "expected_count"), [("containsAny", 3), ("containsAll", 2)]
)
def test_pages_query_with_attr_slug_and_attribute_value_referenced_collection_ids(
filter_type,
expected_count,
staff_api_client,
page_list,
page_type,
product_type_collection_reference_attribute,
collection_list,
):
# given
page_type.page_attributes.add(
product_type_collection_reference_attribute,
)
first_collection = collection_list[0]
second_collection = collection_list[1]
third_collection = collection_list[2]
first_attr_value, second_attr_value, third_attr_value = (
AttributeValue.objects.bulk_create(
[
AttributeValue(
attribute=product_type_collection_reference_attribute,
name=f"collection {first_collection.pk}",
slug=f"collection-{first_collection.pk}",
reference_collection=first_collection,
),
AttributeValue(
attribute=product_type_collection_reference_attribute,
name=f"collection {second_collection.pk}",
slug=f"collection-{second_collection.pk}",
reference_collection=second_collection,
),
AttributeValue(
attribute=product_type_collection_reference_attribute,
name=f"collection {third_collection.pk}",
slug=f"collection-{third_collection.pk}",
reference_collection=third_collection,
),
]
)
)
fist_page_with_all_ids = page_list[0]
second_page_with_all_ids = page_list[1]
page_with_single_id = page_list[2]
associate_attribute_values_to_instance(
fist_page_with_all_ids,
{
product_type_collection_reference_attribute.pk: [
first_attr_value,
second_attr_value,
third_attr_value,
],
},
)
associate_attribute_values_to_instance(
second_page_with_all_ids,
{
product_type_collection_reference_attribute.pk: [
first_attr_value,
second_attr_value,
third_attr_value,
],
},
)
associate_attribute_values_to_instance(
page_with_single_id,
{
product_type_collection_reference_attribute.pk: [
first_attr_value,
],
},
)
referenced_first_global_id = to_global_id_or_none(first_collection)
referenced_second_global_id = to_global_id_or_none(second_collection)
referenced_third_global_id = to_global_id_or_none(third_collection)
variables = {
"where": {
"attributes": [
{
"slug": product_type_collection_reference_attribute.slug,
"value": {
"reference": {
"referencedIds": {
filter_type: [
referenced_first_global_id,
referenced_second_global_id,
referenced_third_global_id,
]
}
}
},
},
]
}
}
# when
response = staff_api_client.post_graphql(
QUERY_PAGES_WITH_WHERE,
variables,
)
# then
content = get_graphql_content(response)
pages_nodes = content["data"]["pages"]["edges"]
assert len(page_list) > len(pages_nodes)
assert len(pages_nodes) == expected_count
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/page/tests/queries/pages_with_where/test_with_where_references_collections.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 280,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/graphql/product/tests/queries/products_filtrations/test_over_references_collections.py | import pytest
from ......attribute import AttributeEntityType, AttributeInputType, AttributeType
from ......attribute.models import Attribute, AttributeValue
from ......attribute.utils import associate_attribute_values_to_instance
from .....core.utils import to_global_id_or_none
from .....tests.utils import get_graphql_content
from .shared import PRODUCTS_FILTER_QUERY, PRODUCTS_WHERE_QUERY
@pytest.mark.parametrize("query", [PRODUCTS_WHERE_QUERY, PRODUCTS_FILTER_QUERY])
@pytest.mark.parametrize(
("filter_type", "expected_count"), [("containsAny", 2), ("containsAll", 1)]
)
def test_products_query_with_attr_slug_and_attribute_value_reference_to_collections(
query,
filter_type,
expected_count,
staff_api_client,
product_type,
product_list,
collection_list,
product_type_collection_reference_attribute,
channel_USD,
):
# given
product_type.product_attributes.add(product_type_collection_reference_attribute)
first_collection = collection_list[0]
second_collection = collection_list[1]
attribute_value_1, attribute_value_2 = AttributeValue.objects.bulk_create(
[
AttributeValue(
attribute=product_type_collection_reference_attribute,
name=f"Category {first_collection.pk}",
slug=f"collection-{first_collection.pk}",
reference_collection=first_collection,
),
AttributeValue(
attribute=product_type_collection_reference_attribute,
name=f"Category {second_collection.pk}",
slug=f"collection-{second_collection.pk}",
reference_collection=second_collection,
),
]
)
product_with_both_references = product_list[0]
associate_attribute_values_to_instance(
product_with_both_references,
{
product_type_collection_reference_attribute.pk: [
attribute_value_1,
attribute_value_2,
]
},
)
product_with_single_reference = product_list[1]
associate_attribute_values_to_instance(
product_with_single_reference,
{product_type_collection_reference_attribute.pk: [attribute_value_2]},
)
variables = {
"where": {
"attributes": [
{
"slug": "collection-reference",
"value": {
"reference": {
"collectionSlugs": {
filter_type: [
first_collection.slug,
second_collection.slug,
]
}
}
},
}
]
},
"channel": channel_USD.slug,
}
# when
response = staff_api_client.post_graphql(
query,
variables,
)
# then
content = get_graphql_content(response)
products_nodes = content["data"]["products"]["edges"]
assert len(products_nodes) == expected_count
@pytest.mark.parametrize("query", [PRODUCTS_WHERE_QUERY, PRODUCTS_FILTER_QUERY])
@pytest.mark.parametrize(
("filter_type", "expected_count"), [("containsAny", 2), ("containsAll", 1)]
)
def test_products_query_with_attribute_value_reference_to_collections(
query,
filter_type,
expected_count,
staff_api_client,
product_list,
product_type,
collection_list,
product_type_collection_reference_attribute,
channel_USD,
):
# given
second_collection_reference_attribute = Attribute.objects.create(
slug="second-collection-reference",
name="Category reference",
type=AttributeType.PRODUCT_TYPE,
input_type=AttributeInputType.REFERENCE,
entity_type=AttributeEntityType.COLLECTION,
)
product_type.product_attributes.add(
product_type_collection_reference_attribute,
second_collection_reference_attribute,
)
first_collection = collection_list[0]
second_collection = collection_list[1]
attribute_value_1, attribute_value_2 = AttributeValue.objects.bulk_create(
[
AttributeValue(
attribute=product_type_collection_reference_attribute,
name=f"Category {first_collection.pk}",
slug=f"collection-{first_collection.pk}",
reference_collection=first_collection,
),
AttributeValue(
attribute=second_collection_reference_attribute,
name=f"Category {second_collection.pk}",
slug=f"collection-{second_collection.pk}",
reference_collection=second_collection,
),
]
)
product_with_both_references = product_list[0]
associate_attribute_values_to_instance(
product_with_both_references,
{
product_type_collection_reference_attribute.pk: [attribute_value_1],
second_collection_reference_attribute.pk: [attribute_value_2],
},
)
product_with_single_reference = product_list[1]
associate_attribute_values_to_instance(
product_with_single_reference,
{second_collection_reference_attribute.pk: [attribute_value_2]},
)
variables = {
"where": {
"attributes": [
{
"value": {
"reference": {
"collectionSlugs": {
filter_type: [
first_collection.slug,
second_collection.slug,
]
}
}
},
}
]
},
"channel": channel_USD.slug,
}
# when
response = staff_api_client.post_graphql(
query,
variables,
)
# then
content = get_graphql_content(response)
products_nodes = content["data"]["products"]["edges"]
assert len(products_nodes) == expected_count
@pytest.mark.parametrize("query", [PRODUCTS_WHERE_QUERY, PRODUCTS_FILTER_QUERY])
@pytest.mark.parametrize(
("filter_type", "expected_count"), [("containsAny", 3), ("containsAll", 2)]
)
def test_products_query_with_attr_slug_and_attribute_value_referenced_collection_ids(
query,
filter_type,
expected_count,
staff_api_client,
product_list,
product_type,
collection_list,
product_type_collection_reference_attribute,
channel_USD,
):
# given
product_type.product_attributes.add(product_type_collection_reference_attribute)
first_collection = collection_list[0]
second_collection = collection_list[1]
third_collection = collection_list[2]
first_attr_value, second_attr_value, third_attr_value = (
AttributeValue.objects.bulk_create(
[
AttributeValue(
attribute=product_type_collection_reference_attribute,
name=f"Category {first_collection.pk}",
slug=f"collection-{first_collection.pk}",
reference_collection=first_collection,
),
AttributeValue(
attribute=product_type_collection_reference_attribute,
name=f"Category {second_collection.pk}",
slug=f"collection-{second_collection.pk}",
reference_collection=second_collection,
),
AttributeValue(
attribute=product_type_collection_reference_attribute,
name=f"Category {third_collection.pk}",
slug=f"collection-{third_collection.pk}",
reference_collection=third_collection,
),
]
)
)
first_product_with_all_ids = product_list[0]
second_product_with_all_ids = product_list[1]
product_with_single_id = product_list[2]
associate_attribute_values_to_instance(
first_product_with_all_ids,
{
product_type_collection_reference_attribute.pk: [
first_attr_value,
second_attr_value,
third_attr_value,
],
},
)
associate_attribute_values_to_instance(
second_product_with_all_ids,
{
product_type_collection_reference_attribute.pk: [
first_attr_value,
second_attr_value,
third_attr_value,
],
},
)
associate_attribute_values_to_instance(
product_with_single_id,
{product_type_collection_reference_attribute.pk: [first_attr_value]},
)
referenced_first_global_id = to_global_id_or_none(first_collection)
referenced_second_global_id = to_global_id_or_none(second_collection)
referenced_third_global_id = to_global_id_or_none(third_collection)
variables = {
"where": {
"attributes": [
{
"slug": product_type_collection_reference_attribute.slug,
"value": {
"reference": {
"referencedIds": {
filter_type: [
referenced_first_global_id,
referenced_second_global_id,
referenced_third_global_id,
]
}
}
},
}
]
},
"channel": channel_USD.slug,
}
# when
response = staff_api_client.post_graphql(
query,
variables,
)
# then
content = get_graphql_content(response)
products_nodes = content["data"]["products"]["edges"]
assert len(products_nodes) == expected_count
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/product/tests/queries/products_filtrations/test_over_references_collections.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 276,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/graphql/product/tests/queries/variants_where/test_over_references_collections.py | import pytest
from ......attribute import AttributeEntityType, AttributeInputType, AttributeType
from ......attribute.models import Attribute, AttributeValue
from ......attribute.utils import associate_attribute_values_to_instance
from .....core.utils import to_global_id_or_none
from .....tests.utils import get_graphql_content
from .shared import PRODUCT_VARIANTS_WHERE_QUERY
@pytest.mark.parametrize(
("filter_type", "expected_count"), [("containsAny", 2), ("containsAll", 1)]
)
def test_product_variants_query_with_attr_slug_and_attribute_value_reference_to_collections(
filter_type,
expected_count,
staff_api_client,
product_variant_list,
product_type_collection_reference_attribute,
channel_USD,
collection_list,
):
# given
product_type = product_variant_list[0].product.product_type
product_type.variant_attributes.add(product_type_collection_reference_attribute)
first_collection = collection_list[0]
second_collection = collection_list[1]
attribute_value_1, attribute_value_2 = AttributeValue.objects.bulk_create(
[
AttributeValue(
attribute=product_type_collection_reference_attribute,
name=f"Category {first_collection.pk}",
slug=f"collection-{first_collection.pk}",
reference_collection=first_collection,
),
AttributeValue(
attribute=product_type_collection_reference_attribute,
name=f"Category {second_collection.pk}",
slug=f"collection-{second_collection.pk}",
reference_collection=second_collection,
),
]
)
product_variant_with_both_references = product_variant_list[0]
associate_attribute_values_to_instance(
product_variant_with_both_references,
{
product_type_collection_reference_attribute.pk: [
attribute_value_1,
attribute_value_2,
]
},
)
product_variant_with_single_reference = product_variant_list[1]
associate_attribute_values_to_instance(
product_variant_with_single_reference,
{product_type_collection_reference_attribute.pk: [attribute_value_2]},
)
variables = {
"where": {
"attributes": [
{
"slug": "collection-reference",
"value": {
"reference": {
"collectionSlugs": {
filter_type: [
first_collection.slug,
second_collection.slug,
]
}
}
},
}
]
},
"channel": channel_USD.slug,
}
# when
response = staff_api_client.post_graphql(
PRODUCT_VARIANTS_WHERE_QUERY,
variables,
)
# then
content = get_graphql_content(response)
product_variants_nodes = content["data"]["productVariants"]["edges"]
assert len(product_variants_nodes) == expected_count
@pytest.mark.parametrize(
("filter_type", "expected_count"), [("containsAny", 2), ("containsAll", 1)]
)
def test_product_variants_query_with_attribute_value_reference_to_collections(
filter_type,
expected_count,
staff_api_client,
product_variant_list,
product_type,
product_type_collection_reference_attribute,
channel_USD,
collection_list,
):
# given
second_collection_reference_attribute = Attribute.objects.create(
slug="second-collection-reference",
name="Category reference",
type=AttributeType.PRODUCT_TYPE,
input_type=AttributeInputType.REFERENCE,
entity_type=AttributeEntityType.COLLECTION,
)
product_type.variant_attributes.add(
product_type_collection_reference_attribute,
second_collection_reference_attribute,
)
first_collection = collection_list[0]
second_collection = collection_list[1]
attribute_value_1, attribute_value_2 = AttributeValue.objects.bulk_create(
[
AttributeValue(
attribute=product_type_collection_reference_attribute,
name=f"Category {first_collection.pk}",
slug=f"collection-{first_collection.pk}",
reference_collection=first_collection,
),
AttributeValue(
attribute=second_collection_reference_attribute,
name=f"Category {second_collection.pk}",
slug=f"collection-{second_collection.pk}",
reference_collection=second_collection,
),
]
)
product_variant_with_both_references = product_variant_list[0]
associate_attribute_values_to_instance(
product_variant_with_both_references,
{
product_type_collection_reference_attribute.pk: [attribute_value_1],
second_collection_reference_attribute.pk: [attribute_value_2],
},
)
product_variant_with_single_reference = product_variant_list[1]
associate_attribute_values_to_instance(
product_variant_with_single_reference,
{second_collection_reference_attribute.pk: [attribute_value_2]},
)
variables = {
"where": {
"attributes": [
{
"value": {
"reference": {
"collectionSlugs": {
filter_type: [
first_collection.slug,
second_collection.slug,
]
}
}
},
}
]
},
"channel": channel_USD.slug,
}
# when
response = staff_api_client.post_graphql(
PRODUCT_VARIANTS_WHERE_QUERY,
variables,
)
# then
content = get_graphql_content(response)
product_variants_nodes = content["data"]["productVariants"]["edges"]
assert len(product_variants_nodes) == expected_count
@pytest.mark.parametrize(
("filter_type", "expected_count"),
[("containsAny", 3), ("containsAll", 2)],
)
def test_product_variants_query_with_attr_slug_and_attribute_value_referenced_collection_ids(
filter_type,
expected_count,
staff_api_client,
product_variant_list,
product_type,
product_type_collection_reference_attribute,
channel_USD,
collection_list,
):
# given
product_type.variant_attributes.add(product_type_collection_reference_attribute)
first_collection = collection_list[0]
second_collection = collection_list[1]
third_collection = collection_list[2]
first_attr_value, second_attr_value, third_attr_value = (
AttributeValue.objects.bulk_create(
[
AttributeValue(
attribute=product_type_collection_reference_attribute,
name=f"Category {first_collection.pk}",
slug=f"collection-{first_collection.pk}",
reference_collection=first_collection,
),
AttributeValue(
attribute=product_type_collection_reference_attribute,
name=f"Category {second_collection.pk}",
slug=f"collection-{second_collection.pk}",
reference_collection=second_collection,
),
AttributeValue(
attribute=product_type_collection_reference_attribute,
name=f"Category {third_collection.pk}",
slug=f"collection-{third_collection.pk}",
reference_collection=third_collection,
),
]
)
)
first_product_variant_with_all_ids = product_variant_list[0]
second_product_variant_with_all_ids = product_variant_list[1]
product_variant_with_single_id = product_variant_list[3]
associate_attribute_values_to_instance(
first_product_variant_with_all_ids,
{
product_type_collection_reference_attribute.pk: [
first_attr_value,
second_attr_value,
third_attr_value,
],
},
)
associate_attribute_values_to_instance(
second_product_variant_with_all_ids,
{
product_type_collection_reference_attribute.pk: [
first_attr_value,
second_attr_value,
third_attr_value,
],
},
)
associate_attribute_values_to_instance(
product_variant_with_single_id,
{product_type_collection_reference_attribute.pk: [first_attr_value]},
)
referenced_first_global_id = to_global_id_or_none(first_collection)
referenced_second_global_id = to_global_id_or_none(second_collection)
referenced_third_global_id = to_global_id_or_none(third_collection)
variables = {
"where": {
"attributes": [
{
"slug": product_type_collection_reference_attribute.slug,
"value": {
"reference": {
"referencedIds": {
filter_type: [
referenced_first_global_id,
referenced_second_global_id,
referenced_third_global_id,
]
}
}
},
}
]
},
"channel": channel_USD.slug,
}
# when
response = staff_api_client.post_graphql(
PRODUCT_VARIANTS_WHERE_QUERY,
variables,
)
# then
content = get_graphql_content(response)
product_variants_nodes = content["data"]["productVariants"]["edges"]
assert len(product_variants_nodes) == expected_count
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/product/tests/queries/variants_where/test_over_references_collections.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 271,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/graphql/page/tests/queries/pages_with_where/test_with_where_references_categories.py | import graphene
import pytest
from ......attribute import AttributeEntityType, AttributeInputType, AttributeType
from ......attribute.models import Attribute, AttributeValue
from ......attribute.utils import associate_attribute_values_to_instance
from .....core.utils import to_global_id_or_none
from .....tests.utils import get_graphql_content
from .shared import QUERY_PAGES_WITH_WHERE
@pytest.mark.parametrize(
("filter_type", "expected_count"),
[("containsAny", 2), ("containsAll", 1)],
)
def test_pages_query_with_attr_slug_and_attribute_value_reference_to_categories(
filter_type,
expected_count,
staff_api_client,
page_list,
page_type,
page_type_category_reference_attribute,
category_list,
):
# given
page_type.page_attributes.add(page_type_category_reference_attribute)
first_category = category_list[0]
second_category = category_list[1]
attribute_value_1, attribute_value_2 = AttributeValue.objects.bulk_create(
[
AttributeValue(
attribute=page_type_category_reference_attribute,
name=f"Category {first_category.pk}",
slug=f"category-{first_category.pk}",
reference_category=first_category,
),
AttributeValue(
attribute=page_type_category_reference_attribute,
name=f"Category {second_category.pk}",
slug=f"category-{second_category.pk}",
reference_category=second_category,
),
]
)
page_with_both_references = page_list[0]
associate_attribute_values_to_instance(
page_with_both_references,
{
page_type_category_reference_attribute.pk: [
attribute_value_1,
attribute_value_2,
]
},
)
page_with_single_reference = page_list[1]
associate_attribute_values_to_instance(
page_with_single_reference,
{page_type_category_reference_attribute.pk: [attribute_value_2]},
)
variables = {
"where": {
"attributes": [
{
"slug": "category-reference",
"value": {
"reference": {
"categorySlugs": {
filter_type: [first_category.slug, second_category.slug]
}
}
},
}
]
}
}
# when
response = staff_api_client.post_graphql(
QUERY_PAGES_WITH_WHERE,
variables,
)
# then
content = get_graphql_content(response)
pages_nodes = content["data"]["pages"]["edges"]
assert len(pages_nodes) == expected_count
assert pages_nodes[0]["node"]["id"] == graphene.Node.to_global_id(
"Page", page_list[0].pk
)
@pytest.mark.parametrize(
("filter_type", "expected_count"),
[("containsAny", 2), ("containsAll", 1)],
)
def test_pages_query_with_attribute_value_reference_to_category(
filter_type,
expected_count,
staff_api_client,
page_list,
page_type,
page_type_category_reference_attribute,
category_list,
):
# given
second_category_reference_attribute = Attribute.objects.create(
slug="second-category-reference",
name="category reference",
type=AttributeType.PRODUCT_TYPE,
input_type=AttributeInputType.REFERENCE,
entity_type=AttributeEntityType.CATEGORY,
)
page_type.page_attributes.add(page_type_category_reference_attribute)
page_type.page_attributes.add(second_category_reference_attribute)
first_category = category_list[0]
second_category = category_list[1]
attribute_value_1, attribute_value_2 = AttributeValue.objects.bulk_create(
[
AttributeValue(
attribute=page_type_category_reference_attribute,
name=f"Category {first_category.pk}",
slug=f"category-{first_category.pk}",
reference_category=first_category,
),
AttributeValue(
attribute=second_category_reference_attribute,
name=f"Category {second_category.pk}",
slug=f"category-{second_category.pk}",
reference_category=second_category,
),
]
)
page_with_both_references = page_list[0]
associate_attribute_values_to_instance(
page_with_both_references,
{
page_type_category_reference_attribute.pk: [
attribute_value_1,
],
second_category_reference_attribute.pk: [attribute_value_2],
},
)
page_with_single_reference = page_list[1]
associate_attribute_values_to_instance(
page_with_single_reference,
{second_category_reference_attribute.pk: [attribute_value_2]},
)
variables = {
"where": {
"attributes": [
{
"value": {
"reference": {
"categorySlugs": {
filter_type: [first_category.slug, second_category.slug]
}
}
},
}
]
}
}
# when
response = staff_api_client.post_graphql(
QUERY_PAGES_WITH_WHERE,
variables,
)
# then
content = get_graphql_content(response)
pages_nodes = content["data"]["pages"]["edges"]
assert len(pages_nodes) == expected_count
assert pages_nodes[0]["node"]["id"] == graphene.Node.to_global_id(
"Page", page_list[0].pk
)
@pytest.mark.parametrize(
("filter_type", "expected_count"), [("containsAny", 3), ("containsAll", 2)]
)
def test_pages_query_with_attr_slug_and_attribute_value_referenced_category_ids(
filter_type,
expected_count,
staff_api_client,
page_list,
page_type,
page_type_category_reference_attribute,
category_list,
):
# given
page_type.page_attributes.add(
page_type_category_reference_attribute,
)
first_category = category_list[0]
second_category = category_list[1]
third_category = category_list[2]
first_attr_value, second_attr_value, third_attr_value = (
AttributeValue.objects.bulk_create(
[
AttributeValue(
attribute=page_type_category_reference_attribute,
name=f"Category {first_category.pk}",
slug=f"category-{first_category.pk}",
reference_category=first_category,
),
AttributeValue(
attribute=page_type_category_reference_attribute,
name=f"Category {second_category.pk}",
slug=f"category-{second_category.pk}",
reference_category=second_category,
),
AttributeValue(
attribute=page_type_category_reference_attribute,
name=f"Category {third_category.pk}",
slug=f"category-{third_category.pk}",
reference_category=third_category,
),
]
)
)
fist_page_with_all_ids = page_list[0]
second_page_with_all_ids = page_list[1]
page_with_single_id = page_list[2]
associate_attribute_values_to_instance(
fist_page_with_all_ids,
{
page_type_category_reference_attribute.pk: [
first_attr_value,
second_attr_value,
third_attr_value,
],
},
)
associate_attribute_values_to_instance(
second_page_with_all_ids,
{
page_type_category_reference_attribute.pk: [
first_attr_value,
second_attr_value,
third_attr_value,
],
},
)
associate_attribute_values_to_instance(
page_with_single_id,
{
page_type_category_reference_attribute.pk: [
first_attr_value,
],
},
)
referenced_first_global_id = to_global_id_or_none(first_category)
referenced_second_global_id = to_global_id_or_none(second_category)
referenced_third_global_id = to_global_id_or_none(third_category)
variables = {
"where": {
"attributes": [
{
"slug": page_type_category_reference_attribute.slug,
"value": {
"reference": {
"referencedIds": {
filter_type: [
referenced_first_global_id,
referenced_second_global_id,
referenced_third_global_id,
]
}
}
},
},
]
}
}
# when
response = staff_api_client.post_graphql(
QUERY_PAGES_WITH_WHERE,
variables,
)
# then
content = get_graphql_content(response)
pages_nodes = content["data"]["pages"]["edges"]
assert len(page_list) > len(pages_nodes)
assert len(pages_nodes) == expected_count
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/page/tests/queries/pages_with_where/test_with_where_references_categories.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 274,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/graphql/product/tests/queries/products_filtrations/test_over_references_categories.py | import pytest
from ......attribute import AttributeEntityType, AttributeInputType, AttributeType
from ......attribute.models import Attribute, AttributeValue
from ......attribute.utils import associate_attribute_values_to_instance
from .....core.utils import to_global_id_or_none
from .....tests.utils import get_graphql_content
from .shared import PRODUCTS_FILTER_QUERY, PRODUCTS_WHERE_QUERY
@pytest.mark.parametrize("query", [PRODUCTS_WHERE_QUERY, PRODUCTS_FILTER_QUERY])
@pytest.mark.parametrize(
("filter_type", "expected_count"), [("containsAny", 2), ("containsAll", 1)]
)
def test_products_query_with_attr_slug_and_attribute_value_reference_to_categories(
query,
filter_type,
expected_count,
staff_api_client,
product_type,
product_list,
category_list,
product_type_category_reference_attribute,
channel_USD,
):
# given
product_type.product_attributes.add(product_type_category_reference_attribute)
first_category = category_list[0]
second_category = category_list[1]
attribute_value_1, attribute_value_2 = AttributeValue.objects.bulk_create(
[
AttributeValue(
attribute=product_type_category_reference_attribute,
name=f"Category {first_category.pk}",
slug=f"category-{first_category.pk}",
reference_category=first_category,
),
AttributeValue(
attribute=product_type_category_reference_attribute,
name=f"Category {second_category.pk}",
slug=f"category-{second_category.pk}",
reference_category=second_category,
),
]
)
product_with_both_references = product_list[0]
associate_attribute_values_to_instance(
product_with_both_references,
{
product_type_category_reference_attribute.pk: [
attribute_value_1,
attribute_value_2,
]
},
)
product_with_single_reference = product_list[1]
associate_attribute_values_to_instance(
product_with_single_reference,
{product_type_category_reference_attribute.pk: [attribute_value_2]},
)
variables = {
"where": {
"attributes": [
{
"slug": "category-reference",
"value": {
"reference": {
"categorySlugs": {
filter_type: [first_category.slug, second_category.slug]
}
}
},
}
]
},
"channel": channel_USD.slug,
}
# when
response = staff_api_client.post_graphql(
query,
variables,
)
# then
content = get_graphql_content(response)
products_nodes = content["data"]["products"]["edges"]
assert len(products_nodes) == expected_count
@pytest.mark.parametrize("query", [PRODUCTS_WHERE_QUERY, PRODUCTS_FILTER_QUERY])
@pytest.mark.parametrize(
("filter_type", "expected_count"), [("containsAny", 2), ("containsAll", 1)]
)
def test_products_query_with_attribute_value_reference_to_categories(
query,
filter_type,
expected_count,
staff_api_client,
product_list,
product_type,
category_list,
product_type_category_reference_attribute,
channel_USD,
):
# given
second_category_reference_attribute = Attribute.objects.create(
slug="second-category-reference",
name="Category reference",
type=AttributeType.PRODUCT_TYPE,
input_type=AttributeInputType.REFERENCE,
entity_type=AttributeEntityType.CATEGORY,
)
product_type.product_attributes.add(
product_type_category_reference_attribute,
second_category_reference_attribute,
)
first_category = category_list[0]
second_category = category_list[1]
attribute_value_1, attribute_value_2 = AttributeValue.objects.bulk_create(
[
AttributeValue(
attribute=product_type_category_reference_attribute,
name=f"Category {first_category.pk}",
slug=f"category-{first_category.pk}",
reference_category=first_category,
),
AttributeValue(
attribute=second_category_reference_attribute,
name=f"Category {second_category.pk}",
slug=f"category-{second_category.pk}",
reference_category=second_category,
),
]
)
product_with_both_references = product_list[0]
associate_attribute_values_to_instance(
product_with_both_references,
{
product_type_category_reference_attribute.pk: [attribute_value_1],
second_category_reference_attribute.pk: [attribute_value_2],
},
)
product_with_single_reference = product_list[1]
associate_attribute_values_to_instance(
product_with_single_reference,
{second_category_reference_attribute.pk: [attribute_value_2]},
)
variables = {
"where": {
"attributes": [
{
"value": {
"reference": {
"categorySlugs": {
filter_type: [first_category.slug, second_category.slug]
}
}
},
}
]
},
"channel": channel_USD.slug,
}
# when
response = staff_api_client.post_graphql(
query,
variables,
)
# then
content = get_graphql_content(response)
products_nodes = content["data"]["products"]["edges"]
assert len(products_nodes) == expected_count
@pytest.mark.parametrize("query", [PRODUCTS_WHERE_QUERY, PRODUCTS_FILTER_QUERY])
@pytest.mark.parametrize(
("filter_type", "expected_count"), [("containsAny", 3), ("containsAll", 2)]
)
def test_products_query_with_attr_slug_and_attribute_value_referenced_category_ids(
query,
filter_type,
expected_count,
staff_api_client,
product_list,
product_type,
category_list,
product_type_category_reference_attribute,
channel_USD,
):
# given
product_type.product_attributes.add(product_type_category_reference_attribute)
first_category = category_list[0]
second_category = category_list[1]
third_category = category_list[2]
first_attr_value, second_attr_value, third_attr_value = (
AttributeValue.objects.bulk_create(
[
AttributeValue(
attribute=product_type_category_reference_attribute,
name=f"Category {first_category.pk}",
slug=f"category-{first_category.pk}",
reference_category=first_category,
),
AttributeValue(
attribute=product_type_category_reference_attribute,
name=f"Category {second_category.pk}",
slug=f"category-{second_category.pk}",
reference_category=second_category,
),
AttributeValue(
attribute=product_type_category_reference_attribute,
name=f"Category {third_category.pk}",
slug=f"category-{third_category.pk}",
reference_category=third_category,
),
]
)
)
first_product_with_all_ids = product_list[0]
second_product_with_all_ids = product_list[1]
product_with_single_id = product_list[2]
associate_attribute_values_to_instance(
first_product_with_all_ids,
{
product_type_category_reference_attribute.pk: [
first_attr_value,
second_attr_value,
third_attr_value,
],
},
)
associate_attribute_values_to_instance(
second_product_with_all_ids,
{
product_type_category_reference_attribute.pk: [
first_attr_value,
second_attr_value,
third_attr_value,
],
},
)
associate_attribute_values_to_instance(
product_with_single_id,
{product_type_category_reference_attribute.pk: [first_attr_value]},
)
referenced_first_global_id = to_global_id_or_none(first_category)
referenced_second_global_id = to_global_id_or_none(second_category)
referenced_third_global_id = to_global_id_or_none(third_category)
variables = {
"where": {
"attributes": [
{
"slug": product_type_category_reference_attribute.slug,
"value": {
"reference": {
"referencedIds": {
filter_type: [
referenced_first_global_id,
referenced_second_global_id,
referenced_third_global_id,
]
}
}
},
}
]
},
"channel": channel_USD.slug,
}
# when
response = staff_api_client.post_graphql(
query,
variables,
)
# then
content = get_graphql_content(response)
products_nodes = content["data"]["products"]["edges"]
assert len(products_nodes) == expected_count
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/product/tests/queries/products_filtrations/test_over_references_categories.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 270,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/graphql/product/tests/queries/variants_where/test_over_references_categories.py | import pytest
from ......attribute import AttributeEntityType, AttributeInputType, AttributeType
from ......attribute.models import Attribute, AttributeValue
from ......attribute.utils import associate_attribute_values_to_instance
from .....core.utils import to_global_id_or_none
from .....tests.utils import get_graphql_content
from .shared import PRODUCT_VARIANTS_WHERE_QUERY
@pytest.mark.parametrize(
("filter_type", "expected_count"), [("containsAny", 2), ("containsAll", 1)]
)
def test_product_variants_query_with_attr_slug_and_attribute_value_reference_to_categories(
filter_type,
expected_count,
staff_api_client,
product_variant_list,
product_type_category_reference_attribute,
channel_USD,
category_list,
):
# given
product_type = product_variant_list[0].product.product_type
product_type.variant_attributes.add(product_type_category_reference_attribute)
first_category = category_list[0]
second_category = category_list[1]
attribute_value_1, attribute_value_2 = AttributeValue.objects.bulk_create(
[
AttributeValue(
attribute=product_type_category_reference_attribute,
name=f"Category {first_category.pk}",
slug=f"category-{first_category.pk}",
reference_category=first_category,
),
AttributeValue(
attribute=product_type_category_reference_attribute,
name=f"Category {second_category.pk}",
slug=f"category-{second_category.pk}",
reference_category=second_category,
),
]
)
product_variant_with_both_references = product_variant_list[0]
associate_attribute_values_to_instance(
product_variant_with_both_references,
{
product_type_category_reference_attribute.pk: [
attribute_value_1,
attribute_value_2,
]
},
)
product_variant_with_single_reference = product_variant_list[1]
associate_attribute_values_to_instance(
product_variant_with_single_reference,
{product_type_category_reference_attribute.pk: [attribute_value_2]},
)
variables = {
"where": {
"attributes": [
{
"slug": "category-reference",
"value": {
"reference": {
"categorySlugs": {
filter_type: [first_category.slug, second_category.slug]
}
}
},
}
]
},
"channel": channel_USD.slug,
}
# when
response = staff_api_client.post_graphql(
PRODUCT_VARIANTS_WHERE_QUERY,
variables,
)
# then
content = get_graphql_content(response)
product_variants_nodes = content["data"]["productVariants"]["edges"]
assert len(product_variants_nodes) == expected_count
@pytest.mark.parametrize(
("filter_type", "expected_count"), [("containsAny", 2), ("containsAll", 1)]
)
def test_product_variants_query_with_attribute_value_reference_to_categories(
filter_type,
expected_count,
staff_api_client,
product_variant_list,
product_type,
product_type_category_reference_attribute,
channel_USD,
category_list,
):
# given
second_category_reference_attribute = Attribute.objects.create(
slug="second-category-reference",
name="Category reference",
type=AttributeType.PRODUCT_TYPE,
input_type=AttributeInputType.REFERENCE,
entity_type=AttributeEntityType.CATEGORY,
)
product_type.variant_attributes.add(
product_type_category_reference_attribute,
second_category_reference_attribute,
)
first_category = category_list[0]
second_category = category_list[1]
attribute_value_1, attribute_value_2 = AttributeValue.objects.bulk_create(
[
AttributeValue(
attribute=product_type_category_reference_attribute,
name=f"Category {first_category.pk}",
slug=f"category-{first_category.pk}",
reference_category=first_category,
),
AttributeValue(
attribute=second_category_reference_attribute,
name=f"Category {second_category.pk}",
slug=f"category-{second_category.pk}",
reference_category=second_category,
),
]
)
product_variant_with_both_references = product_variant_list[0]
associate_attribute_values_to_instance(
product_variant_with_both_references,
{
product_type_category_reference_attribute.pk: [attribute_value_1],
second_category_reference_attribute.pk: [attribute_value_2],
},
)
product_variant_with_single_reference = product_variant_list[1]
associate_attribute_values_to_instance(
product_variant_with_single_reference,
{second_category_reference_attribute.pk: [attribute_value_2]},
)
variables = {
"where": {
"attributes": [
{
"value": {
"reference": {
"categorySlugs": {
filter_type: [first_category.slug, second_category.slug]
}
}
},
}
]
},
"channel": channel_USD.slug,
}
# when
response = staff_api_client.post_graphql(
PRODUCT_VARIANTS_WHERE_QUERY,
variables,
)
# then
content = get_graphql_content(response)
product_variants_nodes = content["data"]["productVariants"]["edges"]
assert len(product_variants_nodes) == expected_count
@pytest.mark.parametrize(
("filter_type", "expected_count"), [("containsAny", 3), ("containsAll", 2)]
)
def test_product_variants_query_with_attr_slug_and_attribute_value_referenced_category_ids(
filter_type,
expected_count,
staff_api_client,
product_variant_list,
product_type,
product_type_category_reference_attribute,
channel_USD,
category_list,
):
# given
product_type.variant_attributes.add(product_type_category_reference_attribute)
first_category = category_list[0]
second_category = category_list[1]
third_category = category_list[2]
first_attr_value, second_attr_value, third_attr_value = (
AttributeValue.objects.bulk_create(
[
AttributeValue(
attribute=product_type_category_reference_attribute,
name=f"Category {first_category.pk}",
slug=f"category-{first_category.pk}",
reference_category=first_category,
),
AttributeValue(
attribute=product_type_category_reference_attribute,
name=f"Category {second_category.pk}",
slug=f"category-{second_category.pk}",
reference_category=second_category,
),
AttributeValue(
attribute=product_type_category_reference_attribute,
name=f"Category {third_category.pk}",
slug=f"category-{third_category.pk}",
reference_category=third_category,
),
]
)
)
first_product_variant_with_all_ids = product_variant_list[0]
second_product_variant_with_all_ids = product_variant_list[1]
product_variant_with_single_id = product_variant_list[3]
associate_attribute_values_to_instance(
first_product_variant_with_all_ids,
{
product_type_category_reference_attribute.pk: [
first_attr_value,
second_attr_value,
third_attr_value,
],
},
)
associate_attribute_values_to_instance(
second_product_variant_with_all_ids,
{
product_type_category_reference_attribute.pk: [
first_attr_value,
second_attr_value,
third_attr_value,
],
},
)
associate_attribute_values_to_instance(
product_variant_with_single_id,
{product_type_category_reference_attribute.pk: [first_attr_value]},
)
referenced_first_global_id = to_global_id_or_none(first_category)
referenced_second_global_id = to_global_id_or_none(second_category)
referenced_third_global_id = to_global_id_or_none(third_category)
variables = {
"where": {
"attributes": [
{
"slug": product_type_category_reference_attribute.slug,
"value": {
"reference": {
"referencedIds": {
filter_type: [
referenced_first_global_id,
referenced_second_global_id,
referenced_third_global_id,
]
}
}
},
}
]
},
"channel": channel_USD.slug,
}
# when
response = staff_api_client.post_graphql(
PRODUCT_VARIANTS_WHERE_QUERY,
variables,
)
# then
content = get_graphql_content(response)
product_variants_nodes = content["data"]["productVariants"]["edges"]
assert len(product_variants_nodes) == expected_count
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/product/tests/queries/variants_where/test_over_references_categories.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 264,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/attribute/search.py | from typing import Union
from django.db.models import QuerySet, Value
from ..attribute import AttributeInputType
from ..core.postgres import NoValidationSearchVector
from ..core.utils.editorjs import clean_editor_js
from .models import Attribute, AttributeValue
def get_search_vectors_for_attribute_values(
attribute: Attribute,
values: Union[list, "QuerySet"],
page_id_to_title_map: dict[int, str] | None = None,
weight: str = "B",
) -> list[NoValidationSearchVector]:
search_vectors = []
input_type = attribute.input_type
if input_type in [AttributeInputType.DROPDOWN, AttributeInputType.MULTISELECT]:
search_vectors += [
NoValidationSearchVector(Value(value.name), config="simple", weight=weight)
for value in values
]
elif input_type == AttributeInputType.RICH_TEXT:
search_vectors += [
NoValidationSearchVector(
Value(clean_editor_js(value.rich_text, to_string=True)),
config="simple",
weight=weight,
)
for value in values
]
elif input_type == AttributeInputType.PLAIN_TEXT:
search_vectors += [
NoValidationSearchVector(
Value(value.plain_text), config="simple", weight=weight
)
for value in values
]
elif input_type == AttributeInputType.NUMERIC:
unit = attribute.unit
search_vectors += [
NoValidationSearchVector(
Value(value.name + " " + unit if unit else value.name),
config="simple",
weight=weight,
)
for value in values
]
elif input_type in [AttributeInputType.DATE, AttributeInputType.DATE_TIME]:
search_vectors += [
NoValidationSearchVector(
Value(value.date_time.strftime("%Y-%m-%d %H:%M:%S")),
config="simple",
weight=weight,
)
for value in values
]
elif input_type in [
AttributeInputType.REFERENCE,
AttributeInputType.SINGLE_REFERENCE,
]:
# for now only AttributeEntityType.PAGE is supported
search_vectors += [
NoValidationSearchVector(
Value(
get_reference_attribute_search_value(
value, page_id_to_title_map=page_id_to_title_map
)
),
config="simple",
weight=weight,
)
for value in values
if value.reference_page_id is not None
]
return search_vectors
def get_reference_attribute_search_value(
attribute_value: AttributeValue, page_id_to_title_map: dict[int, str] | None = None
) -> str:
"""Get search value for reference attribute."""
if attribute_value.reference_page_id:
if page_id_to_title_map:
return page_id_to_title_map.get(attribute_value.reference_page_id, "")
return (
attribute_value.reference_page.title
if attribute_value.reference_page
else ""
)
return ""
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/attribute/search.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 86,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
saleor/saleor:saleor/core/utils/batches.py | from django.db.models import QuerySet
def queryset_in_batches(queryset: QuerySet, batch_size: int):
"""Slice a queryset into batches."""
start_pk = 0
while True:
qs = queryset.filter(pk__gt=start_pk).order_by("pk")[:batch_size]
pks = list(qs.values_list("pk", flat=True))
if not pks:
break
yield pks
start_pk = pks[-1]
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/core/utils/batches.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 11,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
saleor/saleor:saleor/graphql/attribute/dataloaders/assigned_attributes.py | from collections import defaultdict
from collections.abc import Iterable
from django.db.models import F, Window
from django.db.models.functions import RowNumber
from django.db.models.query import QuerySet
from promise import Promise
from ....attribute.models import Attribute, AttributeValue
from ....attribute.models.page import AssignedPageAttributeValue, AttributePage
from ....attribute.models.product import AssignedProductAttributeValue, AttributeProduct
from ....attribute.models.product_variant import (
AssignedVariantAttribute,
AssignedVariantAttributeValue,
AttributeVariant,
)
from ....core.db.connection import allow_writer_in_context
from ....page import models as page_models
from ....product import models as product_models
from ...core.dataloaders import DataLoader
from ...page.dataloaders import PageByIdLoader
from ...product.dataloaders.products import ProductByIdLoader, ProductByVariantIdLoader
from .attributes import (
AttributesByAttributeId,
AttributesBySlugLoader,
AttributeValueByIdLoader,
)
type PRODUCT_ID = int
type PRODUCT_TYPE_ID = int
type PAGE_ID = int
type VARIANT_ID = int
type ATTRIBUTE_ID = int
type ATTRIBUTE_SLUG = str
type LIMIT = int | None
type VARIANT_SELECTION = bool | None
class AttributesByProductIdAndLimitLoader(
DataLoader[tuple[PRODUCT_ID, LIMIT], list[Attribute]]
):
context_key = "attribute_ids_by_product_id_and_limit"
def get_attribute_product_qs(
self, product_type_ids: set[int]
) -> QuerySet[AttributeProduct]:
return AttributeProduct.objects.using(self.database_connection_name).filter(
product_type__in=product_type_ids
)
def batch_load(self, keys: Iterable[tuple[PRODUCT_ID, LIMIT]]):
@allow_writer_in_context(self.context)
def with_products(products: list[product_models.Product]):
product_id_to_product_type_id_map = {
product.id: product.product_type_id for product in products if product
}
product_type_ids = set(product_id_to_product_type_id_map.values())
attribute_products = (
self.get_attribute_product_qs(product_type_ids)
.using(self.database_connection_name)
.values_list("attribute_id", "product_type_id")
)
product_type_id_to_attribute_ids = defaultdict(list)
for attribute_id, attr_product_type_id in attribute_products:
product_type_id_to_attribute_ids[attr_product_type_id].append(
attribute_id
)
attribute_ids = set()
for product_id, limit in keys:
product_type_id = product_id_to_product_type_id_map.get(product_id)
if not product_type_id:
continue
attribute_ids.update(
product_type_id_to_attribute_ids.get(product_type_id, [])[:limit]
)
def get_attributes_for_products(attributes: list[Attribute]):
attribute_map = {attr.id: attr for attr in attributes}
response = []
for product_id, limit in keys:
single_response_entry: list[Attribute] = []
product_type_id = product_id_to_product_type_id_map.get(product_id)
if not product_type_id:
response.append(single_response_entry)
continue
product_attribute_ids = product_type_id_to_attribute_ids.get(
product_type_id, []
)[:limit]
for product_attribute_id in product_attribute_ids:
attribute = attribute_map.get(product_attribute_id)
if not attribute:
continue
single_response_entry.append(attribute)
response.append(single_response_entry)
return response
return (
AttributesByAttributeId(self.context)
.load_many(attribute_ids)
.then(get_attributes_for_products)
)
product_ids = [product_id for product_id, _ in keys]
return (
ProductByIdLoader(self.context).load_many(product_ids).then(with_products)
)
class AttributesVisibleToCustomerByProductIdAndLimitLoader(
AttributesByProductIdAndLimitLoader
):
context_key = "attributes_visible_to_customer_by_product_id_and_limit"
def get_attribute_product_qs(
self, product_type_ids: set[int]
) -> QuerySet[AttributeProduct]:
return AttributeProduct.objects.using(self.database_connection_name).filter(
attribute__visible_in_storefront=True,
product_type__in=product_type_ids,
)
class AttributeByProductIdAndAttributeSlugLoader(
DataLoader[tuple[PRODUCT_ID, ATTRIBUTE_SLUG], Attribute | None]
):
context_key = "attribute_by_product_id_and_attribute_slug"
def batch_load(self, keys: Iterable[tuple[PRODUCT_ID, ATTRIBUTE_SLUG]]):
product_ids = [product_id for product_id, _ in keys]
attribute_slugs = [attribute_slug for _, attribute_slug in keys]
def with_attributes_and_products(
data: tuple[list[product_models.Product], list[Attribute]],
):
products, attributes = data
product_type_ids = {
product.product_type_id for product in products if product is not None
}
product_map = {
product.id: product for product in products if product is not None
}
attribute_map = {attr.slug: attr for attr in attributes if attr is not None}
attribute_products = (
AttributeProduct.objects.using(self.database_connection_name)
.filter(
attribute__in=[attr.id for attr in attribute_map.values()],
product_type__in=product_type_ids,
)
.values_list("attribute_id", "product_type_id")
)
product_type_id_to_attribute_ids = defaultdict(set)
for attribute_id, product_type_id in attribute_products:
product_type_id_to_attribute_ids[product_type_id].add(attribute_id)
response: list[Attribute | None] = []
for product_id, attribute_slug in keys:
attribute = attribute_map.get(attribute_slug)
if not attribute:
response.append(None)
continue
product = product_map.get(product_id)
if not product:
response.append(None)
continue
product_type_id = product.product_type_id
attributes_assigned_to_product_type = (
product_type_id_to_attribute_ids.get(product_type_id, set())
)
if attribute.id in attributes_assigned_to_product_type:
response.append(attribute)
else:
response.append(None)
return response
products_loader = ProductByIdLoader(self.context).load_many(product_ids)
attributes_loader = AttributesBySlugLoader(self.context).load_many(
attribute_slugs
)
return Promise.all([products_loader, attributes_loader]).then(
with_attributes_and_products
)
class AttributeByProductVariantIdAndAttributeSlugLoader(
DataLoader[tuple[VARIANT_ID, ATTRIBUTE_SLUG], Attribute | None]
):
context_key = "attribute_by_product_variant_id_and_attribute_slug"
def batch_load(self, keys: Iterable[tuple[VARIANT_ID, ATTRIBUTE_SLUG]]):
variant_ids = [variant_id for variant_id, _ in keys]
attribute_slugs = [attribute_slug for _, attribute_slug in keys]
def with_attributes_and_products(
data: tuple[list[product_models.Product], list[Attribute]],
):
products, attributes = data
product_type_ids = {
product.product_type_id for product in products if product is not None
}
variant_id_to_product_map = {
variant_id: product
for (variant_id, _), product in zip(keys, products, strict=False)
}
attribute_map = {attr.slug: attr for attr in attributes if attr is not None}
attribute_variants = (
AttributeVariant.objects.using(self.database_connection_name)
.filter(
attribute__in=[attr.id for attr in attribute_map.values()],
product_type__in=product_type_ids,
)
.values_list("attribute_id", "product_type_id")
)
product_type_id_to_attribute_ids = defaultdict(set)
for attribute_id, attr_product_type_id in attribute_variants:
product_type_id_to_attribute_ids[attr_product_type_id].add(attribute_id)
response: list[Attribute | None] = []
for variant_id, attribute_slug in keys:
attribute = attribute_map.get(attribute_slug)
if not attribute:
response.append(None)
continue
product = variant_id_to_product_map.get(variant_id)
if not product:
response.append(None)
continue
product_type_id = product.product_type_id
attributes_assigned_to_product_type = (
product_type_id_to_attribute_ids.get(product_type_id, set())
)
if attribute.id in attributes_assigned_to_product_type:
response.append(attribute)
else:
response.append(None)
return response
products_loader = ProductByVariantIdLoader(self.context).load_many(variant_ids)
attributes_loader = AttributesBySlugLoader(self.context).load_many(
attribute_slugs
)
return Promise.all([products_loader, attributes_loader]).then(
with_attributes_and_products
)
class AttributesByProductVariantIdAndSelectionAndLimitLoader(
DataLoader[tuple[VARIANT_ID, LIMIT, VARIANT_SELECTION], list[Attribute]]
):
context_key = "attribute_ids_by_product_variant_id_and_limit"
def get_attribute_variant_qs(
self, product_type_ids: set[int]
) -> QuerySet[AttributeVariant]:
return AttributeVariant.objects.using(self.database_connection_name).filter(
product_type__in=product_type_ids
)
def batch_load(self, keys: Iterable[tuple[VARIANT_ID, LIMIT, VARIANT_SELECTION]]):
@allow_writer_in_context(self.context)
def with_products(products: list[product_models.Product]):
# get attribute variants assigned to product type of the variants
variant_id_to_product_id_map = {
variant_id: product.id
for (variant_id, _, _), product in zip(keys, products, strict=False)
}
product_id_to_product_type_id_map = {
product.id: product.product_type_id for product in products
}
product_type_ids = set(product_id_to_product_type_id_map.values())
attribute_variants = self.get_attribute_variant_qs(
product_type_ids
).values_list("attribute_id", "product_type_id", "variant_selection")
# create list of attribute ids to fetch for each variant. Use limit and
# selection to reduce the number of attributes to fetch
product_type_id_to_attribute_id_and_variant_selection = defaultdict(list)
for (
attribute_id,
attr_product_type_id,
variant_selection,
) in attribute_variants:
product_type_id_to_attribute_id_and_variant_selection[
attr_product_type_id
].append((attribute_id, variant_selection))
attribute_ids = set()
for variant_id, limit, variant_selection in keys:
product_id = variant_id_to_product_id_map.get(variant_id)
if not product_id:
continue
product_type_id = product_id_to_product_type_id_map.get(product_id)
if not product_type_id:
continue
attribute_ids_and_selections: list[tuple[int, bool]] = (
product_type_id_to_attribute_id_and_variant_selection.get(
product_type_id, []
)
)
attribute_ids_to_include = []
for attr_id, is_variant_selection in attribute_ids_and_selections:
if (
variant_selection is not None
and variant_selection != is_variant_selection
):
continue
attribute_ids_to_include.append(attr_id)
attribute_ids.update(attribute_ids_to_include[:limit])
def get_attributes_for_variants(attributes: list[Attribute]):
attribute_map = {attr.id: attr for attr in attributes}
response = []
# loop over keys to build the response based on the variant_id, limit number and
# selection provided as an input
for variant_id, limit, variant_selection in keys:
single_response_entry: list[Attribute] = []
product_id = variant_id_to_product_id_map.get(variant_id)
if not product_id:
response.append(single_response_entry)
continue
product_type_id = product_id_to_product_type_id_map.get(product_id)
if not product_type_id:
response.append(single_response_entry)
continue
attribute_ids_and_selections: list[tuple[int, bool]] = (
product_type_id_to_attribute_id_and_variant_selection.get(
product_type_id, []
)[:limit]
)
for (
variant_attribute_id,
attribute_selection,
) in attribute_ids_and_selections:
if (
variant_selection is not None
and variant_selection != attribute_selection
):
continue
attribute = attribute_map.get(variant_attribute_id)
if not attribute:
continue
single_response_entry.append(attribute)
response.append(single_response_entry)
return response
return (
AttributesByAttributeId(self.context)
.load_many(attribute_ids)
.then(get_attributes_for_variants)
)
return (
ProductByVariantIdLoader(self.context)
.load_many([variant_id for variant_id, _, _ in keys])
.then(with_products)
)
class AttributesVisibleToCustomerByProductVariantIdAndSelectionAndLimitLoader(
AttributesByProductVariantIdAndSelectionAndLimitLoader
):
context_key = "attribute_ids_visible_to_customer_by_product_variant_id_and_limit"
def get_attribute_variant_qs(
self, product_type_ids: set[int]
) -> QuerySet[AttributeVariant]:
return AttributeVariant.objects.using(self.database_connection_name).filter(
attribute__visible_in_storefront=True, product_type__in=product_type_ids
)
class AttributesByPageIdAndLimitLoader(
DataLoader[tuple[PAGE_ID, LIMIT], list[Attribute]]
):
context_key = "attribute_ids_by_page_id_and_limit"
def get_attribute_page_qs(self, page_type_ids: set[int]) -> QuerySet[AttributePage]:
return AttributePage.objects.using(self.database_connection_name).filter(
page_type__in=page_type_ids
)
def batch_load(self, keys: Iterable[tuple[PAGE_ID, LIMIT]]):
@allow_writer_in_context(self.context)
def with_pages(pages: list[page_models.Page]):
page_id_to_page_type_id_map = {
page.id: page.page_type_id for page in pages if page
}
page_type_ids = set(page_id_to_page_type_id_map.values())
attribute_pages = (
self.get_attribute_page_qs(page_type_ids)
.using(self.database_connection_name)
.values_list("attribute_id", "page_type_id")
)
page_type_id_to_attribute_ids = defaultdict(list)
for attribute_id, attr_page_type_id in attribute_pages:
page_type_id_to_attribute_ids[attr_page_type_id].append(attribute_id)
attribute_ids = set()
for page_id, limit in keys:
page_type_id = page_id_to_page_type_id_map.get(page_id)
if not page_type_id:
continue
attribute_ids.update(
page_type_id_to_attribute_ids.get(page_type_id, [])[:limit]
)
def get_attributes_for_pages(attributes: list[Attribute]):
attribute_map = {attr.id: attr for attr in attributes}
response = []
for page_id, limit in keys:
single_response_entry: list[Attribute] = []
page_type_id = page_id_to_page_type_id_map.get(page_id)
if not page_type_id:
response.append(single_response_entry)
continue
page_attribute_ids = page_type_id_to_attribute_ids.get(
page_type_id, []
)[:limit]
for page_attribute_id in page_attribute_ids:
attribute = attribute_map.get(page_attribute_id)
if not attribute:
continue
single_response_entry.append(attribute)
response.append(single_response_entry)
return response
return (
AttributesByAttributeId(self.context)
.load_many(attribute_ids)
.then(get_attributes_for_pages)
)
page_ids = [page_id for page_id, _ in keys]
return PageByIdLoader(self.context).load_many(page_ids).then(with_pages)
class AttributesVisibleToCustomerByPageIdAndLimitLoader(
AttributesByPageIdAndLimitLoader
):
context_key = "attributes_visible_to_customer_by_page_id_and_limit"
def get_attribute_page_qs(self, page_type_ids: set[int]) -> QuerySet[AttributePage]:
return AttributePage.objects.using(self.database_connection_name).filter(
attribute__visible_in_storefront=True,
page_type__in=page_type_ids,
)
class AttributeByPageIdAndAttributeSlugLoader(
DataLoader[tuple[PAGE_ID, ATTRIBUTE_SLUG], Attribute | None]
):
context_key = "attribute_by_page_id_and_attribute_slug"
def batch_load(self, keys: Iterable[tuple[PAGE_ID, ATTRIBUTE_SLUG]]):
page_ids = [page_id for page_id, _ in keys]
attribute_slugs = [attribute_slug for _, attribute_slug in keys]
def with_pages_and_attributes(
data: tuple[list[page_models.Page], list[Attribute]],
):
pages, attributes = data
page_type_ids = {page.page_type_id for page in pages if page is not None}
page_map = {page.id: page for page in pages if page is not None}
attribute_map = {attr.slug: attr for attr in attributes if attr is not None}
attribute_pages = (
AttributePage.objects.using(self.database_connection_name)
.filter(
attribute__in=[attr.id for attr in attribute_map.values()],
page_type__in=page_type_ids,
)
.values_list("attribute_id", "page_type_id")
)
page_type_id_to_attribute_ids = defaultdict(set)
for attribute_id, page_type_id in attribute_pages:
page_type_id_to_attribute_ids[page_type_id].add(attribute_id)
response: list[Attribute | None] = []
for page_id, attribute_slug in keys:
attribute = attribute_map.get(attribute_slug)
if not attribute:
response.append(None)
continue
page = page_map.get(page_id)
if not page:
response.append(None)
continue
page_type_id = page.page_type_id
attributes_assigned_to_page_type = page_type_id_to_attribute_ids.get(
page_type_id, set()
)
if attribute.id in attributes_assigned_to_page_type:
response.append(attribute)
else:
response.append(None)
return response
pages_loader = PageByIdLoader(self.context).load_many(page_ids)
attributes_loader = AttributesBySlugLoader(self.context).load_many(
attribute_slugs
)
return Promise.all([pages_loader, attributes_loader]).then(
with_pages_and_attributes
)
class AttributeValuesByProductIdAndAttributeIdAndLimitLoader(
DataLoader[tuple[PRODUCT_ID, ATTRIBUTE_ID, LIMIT], list[AttributeValue]]
):
context_key = "attribute_values_by_product_and_attribute"
def batch_load(self, keys: Iterable[tuple[PRODUCT_ID, ATTRIBUTE_ID, LIMIT]]):
limit_map = defaultdict(list)
for product_id, attribute_id, limit in keys:
limit_map[limit].append((product_id, attribute_id))
attribute_value_id_to_fetch_map: dict[
tuple[PRODUCT_ID, ATTRIBUTE_ID, LIMIT], list[int]
] = defaultdict(list)
for limit, values in limit_map.items():
product_ids = [val[0] for val in values]
attribute_ids = [val[1] for val in values]
assigned_attribute_values = AssignedProductAttributeValue.objects.using(
self.database_connection_name
).annotate(attribute_id=F("value__attribute_id"))
if limit is not None:
assigned_attribute_values = assigned_attribute_values.annotate(
row_num=Window(
expression=RowNumber(),
partition_by=[F("attribute_id"), F("product_id")],
order_by=["sort_order", "pk"],
)
).filter(row_num__lte=limit)
assigned_attribute_values = assigned_attribute_values.filter(
attribute_id__in=attribute_ids,
product_id__in=product_ids,
).values_list("product_id", "value_id", "attribute_id")
for product_id, value_id, attribute_id in assigned_attribute_values:
attribute_value_id_to_fetch_map[
(product_id, attribute_id, limit)
].append(value_id)
attribute_value_ids_to_fetch = set()
for id_list in attribute_value_id_to_fetch_map.values():
attribute_value_ids_to_fetch.update(id_list)
def with_attribute_values(attribute_values: list[AttributeValue]):
attribute_value_map = {av.id: av for av in attribute_values}
response_data: defaultdict[
tuple[PRODUCT_ID, ATTRIBUTE_ID, LIMIT], list[AttributeValue]
] = defaultdict(list)
for (
product_id,
attribute_id,
limit,
), value_ids in attribute_value_id_to_fetch_map.items():
for value_id in value_ids:
attr_val = attribute_value_map.get(value_id)
if not attr_val:
continue
response_data[(product_id, attribute_id, limit)].append(attr_val)
return [response_data.get(key, []) for key in keys]
return (
AttributeValueByIdLoader(self.context)
.load_many(attribute_value_ids_to_fetch)
.then(with_attribute_values)
)
class AttributeValuesByPageIdAndAttributeIdAndLimitLoader(
DataLoader[tuple[PAGE_ID, ATTRIBUTE_ID, LIMIT], list[AttributeValue]]
):
context_key = "attribute_values_by_page_and_attribute"
def batch_load(self, keys: Iterable[tuple[PAGE_ID, ATTRIBUTE_ID, LIMIT]]):
limit_map = defaultdict(list)
for page_id, attribute_id, limit in keys:
limit_map[limit].append((page_id, attribute_id))
attribute_value_id_to_fetch_map: dict[
tuple[PAGE_ID, ATTRIBUTE_ID, LIMIT], list[int]
] = defaultdict(list)
for limit, values in limit_map.items():
page_ids = [val[0] for val in values]
attribute_ids = [val[1] for val in values]
assigned_attribute_values = AssignedPageAttributeValue.objects.using(
self.database_connection_name
).annotate(attribute_id=F("value__attribute_id"))
if limit is not None:
assigned_attribute_values = assigned_attribute_values.annotate(
row_num=Window(
expression=RowNumber(),
partition_by=[F("attribute_id"), F("page_id")],
order_by=["sort_order", "pk"],
)
).filter(row_num__lte=limit)
assigned_attribute_values = assigned_attribute_values.filter(
attribute_id__in=attribute_ids,
page_id__in=page_ids,
).values_list("page_id", "value_id", "attribute_id")
for page_id, value_id, attribute_id in assigned_attribute_values:
attribute_value_id_to_fetch_map[(page_id, attribute_id, limit)].append(
value_id
)
attribute_value_ids_to_fetch = set()
for id_list in attribute_value_id_to_fetch_map.values():
attribute_value_ids_to_fetch.update(id_list)
def with_attribute_values(attribute_values: list[AttributeValue]):
attribute_value_map = {av.id: av for av in attribute_values}
response_data: defaultdict[
tuple[PAGE_ID, ATTRIBUTE_ID, LIMIT], list[AttributeValue]
] = defaultdict(list)
for (
page_id,
attribute_id,
limit,
), value_ids in attribute_value_id_to_fetch_map.items():
for value_id in value_ids:
attr_val = attribute_value_map.get(value_id)
if not attr_val:
continue
response_data[(page_id, attribute_id, limit)].append(attr_val)
return [response_data.get(key, []) for key in keys]
return (
AttributeValueByIdLoader(self.context)
.load_many(attribute_value_ids_to_fetch)
.then(with_attribute_values)
)
class AttributeValuesByVariantIdAndAttributeIdAndLimitLoader(
DataLoader[tuple[VARIANT_ID, ATTRIBUTE_ID, LIMIT], list[AttributeValue]]
):
context_key = "attribute_values_by_variant_and_attribute"
def batch_load(self, keys: Iterable[tuple[VARIANT_ID, ATTRIBUTE_ID, LIMIT]]):
limit_map = defaultdict(list)
for variant_id, attribute_id, limit in keys:
limit_map[limit].append((variant_id, attribute_id))
attribute_value_id_to_fetch_map: dict[
tuple[VARIANT_ID, ATTRIBUTE_ID, LIMIT], list[int]
] = defaultdict(list)
for limit, values in limit_map.items():
variant_ids = [val[0] for val in values]
attribute_ids = [val[1] for val in values]
assigned_variant_attributes = (
AssignedVariantAttribute.objects.using(self.database_connection_name)
.annotate(attribute_id=F("assignment__attribute_id"))
.filter(
variant_id__in=variant_ids,
attribute_id__in=attribute_ids,
)
.values_list("variant_id", "id", "attribute_id")
)
assigned_variant_attributes_map = {
id_: (variant_id, attribute_id)
for variant_id, id_, attribute_id in assigned_variant_attributes
}
assigned_variant_values = AssignedVariantAttributeValue.objects.using(
self.database_connection_name
).filter(assignment_id__in=assigned_variant_attributes_map.keys())
if limit is not None:
assigned_variant_values = assigned_variant_values.annotate(
row_num=Window(
expression=RowNumber(),
partition_by=F("assignment_id"),
order_by=["sort_order", "pk"],
)
).filter(row_num__lte=limit)
assigned_variant_values = assigned_variant_values.values_list(
"value_id", "assignment_id"
)
for value_id, assignment_id in assigned_variant_values:
if assignment_id not in assigned_variant_attributes_map:
continue
variant_id, attribute_id = assigned_variant_attributes_map[
assignment_id
]
attribute_value_id_to_fetch_map[
(variant_id, attribute_id, limit)
].append(value_id)
attribute_value_ids_to_fetch = set()
for id_list in attribute_value_id_to_fetch_map.values():
attribute_value_ids_to_fetch.update(id_list)
def with_attribute_values(attribute_values: list[AttributeValue]):
attribute_value_map = {av.id: av for av in attribute_values}
response_data: defaultdict[
tuple[VARIANT_ID, ATTRIBUTE_ID, LIMIT], list[AttributeValue]
] = defaultdict(list)
for (
variant_id,
attribute_id,
limit,
), value_ids in attribute_value_id_to_fetch_map.items():
for value_id in value_ids:
attr_val = attribute_value_map.get(value_id)
if not attr_val:
continue
response_data[(variant_id, attribute_id, limit)].append(attr_val)
return [response_data.get(key, []) for key in keys]
return (
AttributeValueByIdLoader(self.context)
.load_many(attribute_value_ids_to_fetch)
.then(with_attribute_values)
)
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/attribute/dataloaders/assigned_attributes.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 636,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
saleor/saleor:saleor/graphql/attribute/dataloaders/reference_types.py | from collections import defaultdict
from collections.abc import Iterable
from ....attribute.models import Attribute
from ....page.models import PageType
from ....product.models import ProductType
from ...core.dataloaders import DataLoader
from ...page.dataloaders import PageTypeByIdLoader
from ...product.dataloaders import ProductTypeByIdLoader
type LIMIT = int | None
type PAGE_TYPE_ID = int
type ATTRIBUTE_ID = int
class AttributeReferenceProductTypesByAttributeIdAndLimitLoader(
DataLoader[tuple[ATTRIBUTE_ID, LIMIT], list[ProductType]]
):
context_key = "attributereferenceproducttypes_by_attributeid_and_limit"
def batch_load(self, keys: Iterable[tuple[ATTRIBUTE_ID, LIMIT]]):
ReferenceTypeModel = Attribute.reference_product_types.through
attribute_ids = [attribute_id for attribute_id, _ in keys]
reference_types = (
ReferenceTypeModel.objects.using(self.database_connection_name).filter(
attribute_id__in=attribute_ids
)
).values_list("attribute_id", "producttype_id")
attribute_id_to_product_type_ids = defaultdict(list)
for attribute_id, producttype_id in reference_types:
attribute_id_to_product_type_ids[attribute_id].append(producttype_id)
product_type_ids = set()
for attribute_id, limit in keys:
product_type_ids.update(
attribute_id_to_product_type_ids.get(attribute_id, [])[:limit]
)
def get_reference_types(product_types):
response = []
product_type_map = {
product_type.id: product_type for product_type in product_types
}
for attribute_id, limit in keys:
product_type_ids = attribute_id_to_product_type_ids.get(
attribute_id, []
)[:limit]
single_response_entry: list[ProductType] = []
for product_type_id in product_type_ids:
if product_type := product_type_map.get(product_type_id):
single_response_entry.append(product_type)
response.append(single_response_entry)
return response
return (
ProductTypeByIdLoader(self.context)
.load_many(list(product_type_ids))
.then(get_reference_types)
)
class AttributeReferencePageTypesByAttributeIdAndLimitLoader(
DataLoader[tuple[ATTRIBUTE_ID, LIMIT], list[PageType]]
):
context_key = "attributereferencepagetypes_by_attributeid_and_limit"
def batch_load(self, keys: Iterable[tuple[ATTRIBUTE_ID, LIMIT]]):
ReferenceTypeModel = Attribute.reference_page_types.through
attribute_ids = [attribute_id for attribute_id, _ in keys]
reference_types = (
ReferenceTypeModel.objects.using(self.database_connection_name).filter(
attribute_id__in=attribute_ids
)
).values_list("attribute_id", "pagetype_id")
attribute_id_to_page_type_ids = defaultdict(list)
for attribute_id, pagetype_id in reference_types:
attribute_id_to_page_type_ids[attribute_id].append(pagetype_id)
page_type_ids = set()
for attribute_id, limit in keys:
page_type_ids.update(
attribute_id_to_page_type_ids.get(attribute_id, [])[:limit]
)
def get_reference_types(page_types):
response = []
page_type_map = {page_type.id: page_type for page_type in page_types}
for attribute_id, limit in keys:
page_type_ids = attribute_id_to_page_type_ids.get(attribute_id, [])[
:limit
]
single_response_entry: list[PageType] = []
for page_type_id in page_type_ids:
if page_type := page_type_map.get(page_type_id):
single_response_entry.append(page_type)
response.append(single_response_entry)
return response
return (
PageTypeByIdLoader(self.context)
.load_many(list(page_type_ids))
.then(get_reference_types)
)
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/attribute/dataloaders/reference_types.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 89,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
saleor/saleor:saleor/graphql/attribute/unions.py | import graphene
from ...page import models as page_models
from ...product import models as product_models
from ..core import ResolveInfo
from ..page.types import PageType
from ..product.types import ProductType
class ReferenceType(graphene.Union):
class Meta:
description = (
"The reference types (product or page type) that are used to narrow down "
"the choices of reference objects.\n"
"ProductType applicable for reference attribute with `PRODUCT` or "
"`PRODUCT_VARIANT` entity type.\n"
"PageType applicable for reference attribute with `PAGE` entity type."
)
types = (ProductType, PageType)
@classmethod
def resolve_type(cls, instance, info: ResolveInfo):
if isinstance(instance, product_models.ProductType):
return ProductType
if isinstance(instance, page_models.PageType):
return PageType
return super().resolve_type(instance, info)
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/attribute/unions.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 23,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
saleor/saleor:saleor/tests/e2e/attributes/utils/update_attribute.py | from ...utils import get_graphql_content
ATTRIBUTE_UPDATE_MUTATION = """
mutation updateAttribute(
$id: ID!, $input: AttributeUpdateInput!
) {
attributeUpdate(
id: $id,
input: $input) {
errors {
field
message
code
}
attribute {
id
name
slug
unit
entityType
referenceTypes {
... on ProductType {
id
slug
}
... on PageType {
id
slug
}
}
externalReference
productTypes(first: 10) {
edges {
node {
id
}
}
}
}
}
}
"""
def attribute_update(client, attribute_id, input_data):
"""Send a GraphQL mutation to update an attribute."""
variables = {"id": attribute_id, "input": input_data}
response = client.post_graphql(
ATTRIBUTE_UPDATE_MUTATION,
variables,
)
content = get_graphql_content(response)
assert content["data"]["attributeUpdate"]["errors"] == []
data = content["data"]["attributeUpdate"]["attribute"]
assert data["id"] == attribute_id
return data
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/tests/e2e/attributes/utils/update_attribute.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 53,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/tests/e2e/product/test_create_product_with_restricted_reference_attributes.py | import pytest
from ..attributes.utils import attribute_create, attribute_update
from ..pages.utils import create_page, create_page_type
from ..utils import assign_permissions
from .utils import (
create_category,
create_product,
create_product_type,
create_product_variant,
raw_create_product,
update_product,
)
@pytest.mark.e2e
def test_product_with_restricted_reference_attribute(
e2e_staff_api_client,
permission_manage_page_types_and_attributes,
permission_manage_pages,
permission_manage_products,
permission_manage_product_types_and_attributes,
):
# Before
permissions = [
permission_manage_page_types_and_attributes,
permission_manage_pages,
permission_manage_products,
permission_manage_product_types_and_attributes,
]
assign_permissions(e2e_staff_api_client, permissions)
category_data = create_category(e2e_staff_api_client)
category_id = category_data["id"]
page_type_data = create_page_type(e2e_staff_api_client)
page_type_id = page_type_data["id"]
product_type_data = create_product_type(
e2e_staff_api_client,
slug="product-type",
)
product_type_id = product_type_data["id"]
# Step 1 - Create reference attributes with reference types
page_reference_attribute_data = attribute_create(
e2e_staff_api_client,
input_type="REFERENCE",
entity_type="PAGE",
name="page ref",
slug="page-ref",
reference_types=[page_type_id],
)
page_ref_attribute_id = page_reference_attribute_data["id"]
assert page_reference_attribute_data["entityType"] == "PAGE"
assert len(page_reference_attribute_data["referenceTypes"]) == 1
assert page_reference_attribute_data["referenceTypes"][0]["id"] == page_type_id
assert (
page_reference_attribute_data["referenceTypes"][0]["__typename"] == "PageType"
)
product_reference_attribute_data = attribute_create(
e2e_staff_api_client,
input_type="REFERENCE",
entity_type="PRODUCT",
name="product ref",
slug="product-ref",
reference_types=[product_type_id],
)
product_ref_attribute_id = product_reference_attribute_data["id"]
assert product_reference_attribute_data["entityType"] == "PRODUCT"
assert len(product_reference_attribute_data["referenceTypes"]) == 1
assert (
product_reference_attribute_data["referenceTypes"][0]["id"] == product_type_id
)
assert (
product_reference_attribute_data["referenceTypes"][0]["__typename"]
== "ProductType"
)
variant_reference_attribute_data = attribute_create(
e2e_staff_api_client,
input_type="SINGLE_REFERENCE",
entity_type="PRODUCT_VARIANT",
name="variant ref",
slug="variant-ref",
reference_types=[product_type_id],
)
variant_ref_attribute_id = variant_reference_attribute_data["id"]
assert variant_reference_attribute_data["entityType"] == "PRODUCT_VARIANT"
assert len(product_reference_attribute_data["referenceTypes"]) == 1
assert (
variant_reference_attribute_data["referenceTypes"][0]["id"] == product_type_id
)
assert (
variant_reference_attribute_data["referenceTypes"][0]["__typename"]
== "ProductType"
)
# Step 2 - Create product type with the attributes
product_type_with_references = create_product_type(
e2e_staff_api_client,
"referenced-product-type",
product_attributes=[
page_ref_attribute_id,
product_ref_attribute_id,
variant_ref_attribute_id,
],
)
product_type_with_references_id = product_type_with_references["id"]
assert len(product_type_with_references["productAttributes"]) == 3
# Step 3 - Prepare references not valid for specified attributes
page_type_data = create_page_type(e2e_staff_api_client, name="another page type")
page_type_id_not_in_choices = page_type_data["id"]
page_not_in_choices = create_page(
e2e_staff_api_client, page_type_id_not_in_choices, title="invalid page"
)
page_not_in_choices_id = page_not_in_choices["id"]
product_type_data = create_product_type(
e2e_staff_api_client, slug="another-product-type"
)
product_type_id_not_in_choices = product_type_data["id"]
product_not_in_choices = create_product(
e2e_staff_api_client,
product_type_id_not_in_choices,
category_id,
product_name="invalid product",
)
product_not_in_choices_id = product_not_in_choices["id"]
variant_not_in_choices = create_product_variant(
e2e_staff_api_client, product_not_in_choices_id
)
variant_id_not_in_choices = variant_not_in_choices["id"]
# Step 4 - Create product with a wrong attribute value and check for error
invalid_attribute_data = [
{"id": page_ref_attribute_id, "references": [page_not_in_choices_id]},
{"id": product_ref_attribute_id, "references": [product_not_in_choices_id]},
{"id": variant_ref_attribute_id, "reference": variant_id_not_in_choices},
]
product_content = raw_create_product(
e2e_staff_api_client,
product_type_with_references_id,
category_id,
attributes=invalid_attribute_data,
)
product_data = product_content["data"]["productCreate"]
errors = product_data["errors"]
assert errors[0]["field"] == "attributes"
assert errors[0]["code"] == "INVALID"
assert len(errors[0]["attributes"]) == len(invalid_attribute_data)
# Step 5 - Create product with a correct attribute values
correct_page = create_page(e2e_staff_api_client, page_type_id)
correct_page_id = correct_page["id"]
correct_product_ref = create_product(
e2e_staff_api_client,
product_type_id,
category_id,
product_name="valid product",
)
correct_product_ref_id = correct_product_ref["id"]
correct_variant_ref = create_product_variant(
e2e_staff_api_client, correct_product_ref_id
)
correct_variant_ref_id = correct_variant_ref["id"]
attributes = [
{"id": page_ref_attribute_id, "references": [correct_page_id]},
{"id": product_ref_attribute_id, "references": [correct_product_ref_id]},
{"id": variant_ref_attribute_id, "reference": correct_variant_ref_id},
]
product_data = create_product(
e2e_staff_api_client,
product_type_with_references_id,
category_id,
attributes=attributes,
product_name="Tested product",
)
product_id = product_data["id"]
assert len(product_data["attributes"]) == 3
# Step 6 - Update attributes and clear reference types
attribute_data = attribute_update(
e2e_staff_api_client, page_ref_attribute_id, {"referenceTypes": []}
)
assert attribute_data["referenceTypes"] == []
attribute_data = attribute_update(
e2e_staff_api_client, product_ref_attribute_id, {"referenceTypes": []}
)
assert attribute_data["referenceTypes"] == []
attribute_data = attribute_update(
e2e_staff_api_client, variant_ref_attribute_id, {"referenceTypes": []}
)
assert attribute_data["referenceTypes"] == []
# Step 7 - Update product with a previously wrong attribute value
product_data = update_product(
e2e_staff_api_client,
product_id,
{"attributes": invalid_attribute_data},
)
assert len(product_data["attributes"]) == len(invalid_attribute_data)
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/tests/e2e/product/test_create_product_with_restricted_reference_attributes.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 188,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/graphql/payment/tests/test_utils.py | import pytest
from django.core.exceptions import ValidationError
from ....page.models import PageType
from ....payment.error_codes import (
TransactionRequestActionErrorCode,
TransactionRequestRefundForGrantedRefundErrorCode,
)
from ..utils import validate_and_resolve_refund_reason_context
def test_no_reference_type_configured_no_reference_id_provided(site_settings):
# Given
site_settings.refund_reason_reference_type = None
site_settings.save()
# When
result = validate_and_resolve_refund_reason_context(
reason_reference_id=None,
requestor_is_user=True,
refund_reference_field_name="refundReasonReference",
error_code_enum=TransactionRequestActionErrorCode,
site_settings=site_settings,
)
# Then
assert result == {
"is_passing_reason_reference_required": False,
"refund_reason_reference_type": None,
"should_apply": False,
}
def test_no_reference_type_configured_reference_id_provided_raises_invalid(
site_settings,
):
# Given
site_settings.refund_reason_reference_type = None
site_settings.save()
# When / Then
with pytest.raises(ValidationError) as exc_info:
validate_and_resolve_refund_reason_context(
reason_reference_id="some-id",
requestor_is_user=True,
refund_reference_field_name="refundReasonReference",
error_code_enum=TransactionRequestActionErrorCode,
site_settings=site_settings,
)
error_dict = exc_info.value.error_dict
assert "refundReasonReference" in error_dict
assert (
error_dict["refundReasonReference"][0].code
== TransactionRequestActionErrorCode.INVALID.value
)
assert "Reason reference type is not configured" in str(
error_dict["refundReasonReference"][0]
)
def test_reference_type_configured_no_reference_id_user_requestor_raises_required(
site_settings,
):
# Given
page_type = PageType(name="Refund Reasons", slug="refund-reasons")
page_type.save()
site_settings.refund_reason_reference_type = page_type
site_settings.save()
# When / Then
with pytest.raises(ValidationError) as exc_info:
validate_and_resolve_refund_reason_context(
reason_reference_id=None,
requestor_is_user=True,
refund_reference_field_name="refundReasonReference",
error_code_enum=TransactionRequestActionErrorCode,
site_settings=site_settings,
)
error_dict = exc_info.value.error_dict
assert "refundReasonReference" in error_dict
assert (
error_dict["refundReasonReference"][0].code
== TransactionRequestActionErrorCode.REQUIRED.value
)
assert "Reason reference is required" in str(error_dict["refundReasonReference"][0])
def test_reference_type_configured_no_reference_id_app_requestor_success(
site_settings,
):
"""Test when reference type is configured, no reference ID provided, but requestor is app - should succeed."""
# Given
page_type = PageType(name="Refund Reasons", slug="refund-reasons")
page_type.save()
site_settings.refund_reason_reference_type = page_type
site_settings.save()
# When
result = validate_and_resolve_refund_reason_context(
reason_reference_id=None,
requestor_is_user=False, # App requestor
refund_reference_field_name="refundReasonReference",
error_code_enum=TransactionRequestActionErrorCode,
site_settings=site_settings,
)
# Then
assert result == {
"is_passing_reason_reference_required": True,
"refund_reason_reference_type": page_type,
"should_apply": False,
}
def test_reference_type_configured_reference_id_provided_success(site_settings):
"""Test when reference type is configured and reference ID is provided - should succeed."""
# Given
page_type = PageType(name="Refund Reasons", slug="refund-reasons")
page_type.save()
site_settings.refund_reason_reference_type = page_type
site_settings.save()
# When
result = validate_and_resolve_refund_reason_context(
reason_reference_id="some-reference-id",
requestor_is_user=True,
refund_reference_field_name="refundReasonReference",
error_code_enum=TransactionRequestActionErrorCode,
site_settings=site_settings,
)
# Then
assert result == {
"is_passing_reason_reference_required": True,
"refund_reason_reference_type": page_type,
"should_apply": True,
}
def test_reference_type_configured_reference_id_provided_app_requestor_success(
site_settings,
):
"""Test when reference type is configured, reference ID is provided, and requestor is app - should succeed."""
# Given
page_type = PageType(name="Refund Reasons", slug="refund-reasons")
page_type.save()
site_settings.refund_reason_reference_type = page_type
site_settings.save()
# When
result = validate_and_resolve_refund_reason_context(
reason_reference_id="some-reference-id",
requestor_is_user=False, # App requestor
refund_reference_field_name="refundReasonReference",
error_code_enum=TransactionRequestActionErrorCode,
site_settings=site_settings,
)
# Then
assert result == {
"is_passing_reason_reference_required": True,
"refund_reason_reference_type": page_type,
"should_apply": True,
}
def test_custom_field_name_in_error_message(site_settings):
# Given
site_settings.refund_reason_reference_type = None
site_settings.save()
custom_field_name = "customReasonReference"
# When / Then
with pytest.raises(ValidationError) as exc_info:
validate_and_resolve_refund_reason_context(
reason_reference_id="some-id",
requestor_is_user=True,
refund_reference_field_name=custom_field_name,
error_code_enum=TransactionRequestActionErrorCode,
site_settings=site_settings,
)
error_dict = exc_info.value.error_dict
assert custom_field_name in error_dict
assert (
error_dict[custom_field_name][0].code
== TransactionRequestActionErrorCode.INVALID.value
)
def test_custom_field_name_in_required_error_message(site_settings):
# Given
page_type = PageType(name="Refund Reasons", slug="refund-reasons")
page_type.save()
site_settings.refund_reason_reference_type = page_type
site_settings.save()
custom_field_name = "customReasonReference"
# When / Then
with pytest.raises(ValidationError) as exc_info:
validate_and_resolve_refund_reason_context(
reason_reference_id=None,
requestor_is_user=True,
refund_reference_field_name=custom_field_name,
error_code_enum=TransactionRequestActionErrorCode,
site_settings=site_settings,
)
error_dict = exc_info.value.error_dict
assert custom_field_name in error_dict
assert (
error_dict[custom_field_name][0].code
== TransactionRequestActionErrorCode.REQUIRED.value
)
def test_different_error_code_enum(site_settings):
"""Test that the function works with different error code enums."""
# Given
site_settings.refund_reason_reference_type = None
site_settings.save()
# When / Then
with pytest.raises(ValidationError) as exc_info:
validate_and_resolve_refund_reason_context(
reason_reference_id="some-id",
requestor_is_user=True,
refund_reference_field_name="refundReasonReference",
error_code_enum=TransactionRequestRefundForGrantedRefundErrorCode,
site_settings=site_settings,
)
error_dict = exc_info.value.error_dict
assert "refundReasonReference" in error_dict
assert (
error_dict["refundReasonReference"][0].code
== TransactionRequestRefundForGrantedRefundErrorCode.INVALID.value
)
def test_is_passing_reason_reference_required_when_no_reference_type(site_settings):
# Given: No reference type configured
site_settings.refund_reason_reference_type = None
site_settings.save()
# When
result = validate_and_resolve_refund_reason_context(
reason_reference_id=None,
requestor_is_user=False,
refund_reference_field_name="refundReasonReference",
error_code_enum=TransactionRequestActionErrorCode,
site_settings=site_settings,
)
# Then
assert result["is_passing_reason_reference_required"] is False
def test_is_passing_reason_reference_required_when_reference_type_configured(
site_settings,
):
# Given: Reference type configured
page_type = PageType(name="Refund Reasons", slug="refund-reasons")
page_type.save()
site_settings.refund_reason_reference_type = page_type
site_settings.save()
# When
result = validate_and_resolve_refund_reason_context(
reason_reference_id=None,
requestor_is_user=False,
refund_reference_field_name="refundReasonReference",
error_code_enum=TransactionRequestActionErrorCode,
site_settings=site_settings,
)
# Then
assert result["is_passing_reason_reference_required"] is True
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/payment/tests/test_utils.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 235,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/graphql/shop/mutations/refund_reason_reference_type_clear.py | import graphene
from ....permission.enums import SitePermissions
from ...core import ResolveInfo
from ...core.descriptions import ADDED_IN_322
from ...core.doc_category import DOC_CATEGORY_ORDERS
from ...core.mutations import BaseMutation
from ...core.types.common import RefundReasonReferenceTypeClearError
from ...site.dataloaders import get_site_promise
from ..types import RefundSettings
class RefundReasonReferenceTypeClear(BaseMutation):
refund_settings = graphene.Field(
RefundSettings, description="Refund settings.", required=True
)
class Meta:
description = (
"Updates RefundSettings. The `Page` (Model) Type will be cleared from `reasonReferenceType`. When it's cleared, passing reason reference to refund mutations is no longer accepted and will raise error."
+ ADDED_IN_322
)
doc_category = DOC_CATEGORY_ORDERS
permissions = (SitePermissions.MANAGE_SETTINGS,)
error_type_class = RefundReasonReferenceTypeClearError
error_type_field = "refund_settings_errors"
@classmethod
def perform_mutation(cls, _root, info: ResolveInfo, /, **data):
site = get_site_promise(info.context).get()
settings = site.settings
settings.refund_reason_reference_type = None
settings.save(update_fields=["refund_reason_reference_type"])
return RefundReasonReferenceTypeClear(refund_settings=settings)
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/shop/mutations/refund_reason_reference_type_clear.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 29,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
saleor/saleor:saleor/graphql/shop/mutations/refund_settings_update.py | import graphene
from ....permission.enums import SitePermissions
from ...core import ResolveInfo
from ...core.descriptions import ADDED_IN_322
from ...core.doc_category import DOC_CATEGORY_SHOP
from ...core.mutations import BaseMutation
from ...core.types import BaseInputObjectType
from ...core.types.common import RefundSettingsUpdateError
from ...site.dataloaders import get_site_promise
from ..types import RefundSettings
class RefundSettingsUpdateInput(BaseInputObjectType):
refund_reason_reference_type = graphene.ID(
description=(
"The ID of a model type, that will be used to reference refund reasons. "
"All models with of this type will be accepted as refund reasons. "
f"{ADDED_IN_322}"
),
required=True,
)
class Meta:
doc_category = DOC_CATEGORY_SHOP
class RefundSettingsUpdate(BaseMutation):
refund_settings = graphene.Field(
RefundSettings, description="Refund settings.", required=True
)
class Arguments:
input = RefundSettingsUpdateInput(
required=True, description="Fields required to update refund settings."
)
class Meta:
description = "Update refund settings across all channels." + ADDED_IN_322
doc_category = DOC_CATEGORY_SHOP
permissions = (SitePermissions.MANAGE_SETTINGS,)
error_type_class = RefundSettingsUpdateError
error_type_field = "refund_settings_errors"
@classmethod
def perform_mutation( # type: ignore[override]
cls, _root, info: ResolveInfo, /, input
):
refund_reason_reference_type = input.get("refund_reason_reference_type")
site = get_site_promise(info.context).get()
settings = site.settings
if refund_reason_reference_type:
model_type = cls.get_node_or_error(
info,
refund_reason_reference_type,
only_type="PageType",
field="refund_reason_reference_type",
)
settings.refund_reason_reference_type = model_type # type: ignore[assignment]
settings.save(update_fields=["refund_reason_reference_type"])
return RefundSettingsUpdate(refund_settings=settings)
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/shop/mutations/refund_settings_update.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 52,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
saleor/saleor:saleor/graphql/shop/tests/mutations/test_reason_reference_type_clear.py | from .....page.models import PageType
from ....tests.utils import assert_no_permission, get_graphql_content
REFUND_REASON_REFERENCE_TYPE_CLEAR_MUTATION = """
mutation refundReasonReferenceClear {
refundReasonReferenceClear {
refundSettings {
reasonReferenceType {
id
name
slug
}
}
errors {
code
field
message
}
}
}
"""
def test_refund_reason_reference_type_clear_by_staff_success(
staff_api_client, site_settings, permission_manage_settings, page_type
):
"""Test successful clearing of refund reason reference type by staff user."""
# given
staff_user = staff_api_client.user
staff_user.user_permissions.add(permission_manage_settings)
# Set initial page type
site_settings.refund_reason_reference_type = page_type
site_settings.save()
assert site_settings.refund_reason_reference_type == page_type
# when
response = staff_api_client.post_graphql(
REFUND_REASON_REFERENCE_TYPE_CLEAR_MUTATION,
)
# then
content = get_graphql_content(response)
data = content["data"]["refundReasonReferenceClear"]
assert not data["errors"]
assert data["refundSettings"]
assert data["refundSettings"]["reasonReferenceType"] is None
# Verify database update
site_settings.refresh_from_db()
assert site_settings.refund_reason_reference_type is None
def test_refund_reason_reference_type_clear_by_app_success(
app_api_client, site_settings, permission_manage_settings, page_type
):
"""Test successful clearing of refund reason reference type by app."""
# given
app_api_client.app.permissions.add(permission_manage_settings)
# Set initial page type
site_settings.refund_reason_reference_type = page_type
site_settings.save()
assert site_settings.refund_reason_reference_type == page_type
# when
response = app_api_client.post_graphql(
REFUND_REASON_REFERENCE_TYPE_CLEAR_MUTATION,
)
# then
content = get_graphql_content(response)
data = content["data"]["refundReasonReferenceClear"]
assert not data["errors"]
assert data["refundSettings"]
assert data["refundSettings"]["reasonReferenceType"] is None
# Verify database update
site_settings.refresh_from_db()
assert site_settings.refund_reason_reference_type is None
def test_refund_reason_reference_type_clear_when_already_none(
staff_api_client, site_settings, permission_manage_settings
):
"""Test clearing when refund reason reference type is already None."""
# given
staff_user = staff_api_client.user
staff_user.user_permissions.add(permission_manage_settings)
# Ensure it's already None
site_settings.refund_reason_reference_type = None
site_settings.save()
assert site_settings.refund_reason_reference_type is None
# when
response = staff_api_client.post_graphql(
REFUND_REASON_REFERENCE_TYPE_CLEAR_MUTATION,
)
# then
content = get_graphql_content(response)
data = content["data"]["refundReasonReferenceClear"]
assert not data["errors"]
assert data["refundSettings"]
assert data["refundSettings"]["reasonReferenceType"] is None
# Verify database state remains unchanged
site_settings.refresh_from_db()
assert site_settings.refund_reason_reference_type is None
def test_refund_reason_reference_type_clear_multiple_page_types(
staff_api_client, site_settings, permission_manage_settings
):
"""Test clearing when multiple page types exist."""
# given
staff_user = staff_api_client.user
staff_user.user_permissions.add(permission_manage_settings)
# Create multiple page types
page_type1 = PageType.objects.create(name="Type 1", slug="type-1")
page_type2 = PageType.objects.create(name="Type 2", slug="type-2")
# Set one as the reference type
site_settings.refund_reason_reference_type = page_type1
site_settings.save()
assert site_settings.refund_reason_reference_type == page_type1
# when
response = staff_api_client.post_graphql(
REFUND_REASON_REFERENCE_TYPE_CLEAR_MUTATION,
)
# then
content = get_graphql_content(response)
data = content["data"]["refundReasonReferenceClear"]
assert not data["errors"]
assert data["refundSettings"]
assert data["refundSettings"]["reasonReferenceType"] is None
# Verify database update
site_settings.refresh_from_db()
assert site_settings.refund_reason_reference_type is None
# Verify other page types still exist
assert PageType.objects.filter(id=page_type1.id).exists()
assert PageType.objects.filter(id=page_type2.id).exists()
def test_refund_reason_reference_type_clear_no_permission_staff(
staff_api_client,
):
"""Test permission denied for staff without proper permissions."""
# when
response = staff_api_client.post_graphql(
REFUND_REASON_REFERENCE_TYPE_CLEAR_MUTATION
)
# then
assert_no_permission(response)
def test_refund_reason_reference_type_clear_no_permission_customer(
user_api_client,
):
"""Test permission denied for customer users."""
# when
response = user_api_client.post_graphql(REFUND_REASON_REFERENCE_TYPE_CLEAR_MUTATION)
# then
assert_no_permission(response)
def test_refund_reason_reference_type_clear_no_permission_anonymous(
api_client,
):
"""Test permission denied for anonymous users."""
# when
response = api_client.post_graphql(REFUND_REASON_REFERENCE_TYPE_CLEAR_MUTATION)
# then
assert_no_permission(response)
def test_refund_reason_reference_type_clear_app_no_permission(
app_api_client,
):
"""Test permission denied for app without proper permissions."""
# when
response = app_api_client.post_graphql(REFUND_REASON_REFERENCE_TYPE_CLEAR_MUTATION)
# then
assert_no_permission(response)
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/shop/tests/mutations/test_reason_reference_type_clear.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 155,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/graphql/shop/tests/mutations/test_refund_settings_update.py | import graphene
from .....page.models import PageType
from ....tests.utils import assert_no_permission, get_graphql_content
REFUND_SETTINGS_UPDATE_MUTATION = """
mutation refundSettingsUpdate($input: RefundSettingsUpdateInput!) {
refundSettingsUpdate(input: $input) {
refundSettings {
reasonReferenceType {
id
name
slug
}
}
errors {
code
field
message
}
}
}
"""
def test_refund_settings_update_by_staff_success(
staff_api_client, site_settings, permission_manage_settings, page_type
):
# given
staff_user = staff_api_client.user
staff_user.user_permissions.add(permission_manage_settings)
assert site_settings.refund_reason_reference_type is None
page_type_id = graphene.Node.to_global_id("PageType", page_type.id)
variables = {"input": {"refundReasonReferenceType": page_type_id}}
# when
response = staff_api_client.post_graphql(
REFUND_SETTINGS_UPDATE_MUTATION,
variables,
)
# then
content = get_graphql_content(response)
data = content["data"]["refundSettingsUpdate"]
assert not data["errors"]
assert data["refundSettings"]
assert data["refundSettings"]["reasonReferenceType"]["id"] == page_type_id
assert data["refundSettings"]["reasonReferenceType"]["name"] == page_type.name
assert data["refundSettings"]["reasonReferenceType"]["slug"] == page_type.slug
# Verify database update
site_settings.refresh_from_db()
assert site_settings.refund_reason_reference_type == page_type
def test_refund_settings_update_by_app_success(
app_api_client, site_settings, permission_manage_settings, page_type
):
# given
app_api_client.app.permissions.add(permission_manage_settings)
assert site_settings.refund_reason_reference_type is None
page_type_id = graphene.Node.to_global_id("PageType", page_type.id)
variables = {"input": {"refundReasonReferenceType": page_type_id}}
# when
response = app_api_client.post_graphql(
REFUND_SETTINGS_UPDATE_MUTATION,
variables,
)
# then
content = get_graphql_content(response)
data = content["data"]["refundSettingsUpdate"]
assert not data["errors"]
assert data["refundSettings"]
assert data["refundSettings"]["reasonReferenceType"]["id"] == page_type_id
# Verify database update
site_settings.refresh_from_db()
assert site_settings.refund_reason_reference_type == page_type
def test_refund_settings_update_change_page_type(
staff_api_client, site_settings, permission_manage_settings, page_type
):
# given - set initial page type
staff_user = staff_api_client.user
staff_user.user_permissions.add(permission_manage_settings)
initial_page_type = PageType.objects.create(
name="Initial Type", slug="initial-type"
)
site_settings.refund_reason_reference_type = initial_page_type
site_settings.save()
page_type_id = graphene.Node.to_global_id("PageType", page_type.id)
variables = {"input": {"refundReasonReferenceType": page_type_id}}
# when
response = staff_api_client.post_graphql(
REFUND_SETTINGS_UPDATE_MUTATION,
variables,
)
# then
content = get_graphql_content(response)
data = content["data"]["refundSettingsUpdate"]
assert not data["errors"]
assert data["refundSettings"]["reasonReferenceType"]["id"] == page_type_id
# Verify database update
site_settings.refresh_from_db()
assert site_settings.refund_reason_reference_type == page_type
def test_refund_settings_update_empty_id_success(
staff_api_client, permission_manage_settings, site_settings
):
# given
staff_user = staff_api_client.user
staff_user.user_permissions.add(permission_manage_settings)
variables = {"input": {"refundReasonReferenceType": ""}}
# when
response = staff_api_client.post_graphql(
REFUND_SETTINGS_UPDATE_MUTATION,
variables,
)
# then
content = get_graphql_content(response)
data = content["data"]["refundSettingsUpdate"]
assert not data["errors"]
assert data["refundSettings"]
assert data["refundSettings"]["reasonReferenceType"] is None
def test_refund_settings_update_invalid_id_format(
staff_api_client, permission_manage_settings
):
# given
staff_user = staff_api_client.user
staff_user.user_permissions.add(permission_manage_settings)
variables = {"input": {"refundReasonReferenceType": "invalid-id-format"}}
# when
response = staff_api_client.post_graphql(
REFUND_SETTINGS_UPDATE_MUTATION,
variables,
)
# then
content = get_graphql_content(response, ignore_errors=True)
assert "errors" in content
def test_refund_settings_update_nonexistent_page_type(
staff_api_client, permission_manage_settings
):
# given
staff_user = staff_api_client.user
staff_user.user_permissions.add(permission_manage_settings)
nonexistent_id = graphene.Node.to_global_id("PageType", 99999)
variables = {"input": {"refundReasonReferenceType": nonexistent_id}}
# when
response = staff_api_client.post_graphql(
REFUND_SETTINGS_UPDATE_MUTATION,
variables,
)
# then
content = get_graphql_content(response, ignore_errors=True)
assert "errors" in content
def test_refund_settings_update_wrong_page_type(
staff_api_client, permission_manage_settings, product
):
# given
staff_user = staff_api_client.user
staff_user.user_permissions.add(permission_manage_settings)
product_id = graphene.Node.to_global_id("Product", product.id)
variables = {"input": {"refundReasonReferenceType": product_id}}
# when
response = staff_api_client.post_graphql(
REFUND_SETTINGS_UPDATE_MUTATION,
variables,
)
# then
content = get_graphql_content(response, ignore_errors=True)
assert "errors" in content
def test_refund_settings_update_no_permission_staff(staff_api_client, page_type):
# given
page_type_id = graphene.Node.to_global_id("PageType", page_type.id)
variables = {"input": {"refundReasonReferenceType": page_type_id}}
# when
response = staff_api_client.post_graphql(REFUND_SETTINGS_UPDATE_MUTATION, variables)
# then
assert_no_permission(response)
def test_refund_settings_update_no_permission_customer(user_api_client, page_type):
# given
page_type_id = graphene.Node.to_global_id("PageType", page_type.id)
variables = {"input": {"refundReasonReferenceType": page_type_id}}
# when
response = user_api_client.post_graphql(REFUND_SETTINGS_UPDATE_MUTATION, variables)
# then
assert_no_permission(response)
def test_refund_settings_update_no_permission_anonymous(api_client, page_type):
# given
page_type_id = graphene.Node.to_global_id("PageType", page_type.id)
variables = {"input": {"refundReasonReferenceType": page_type_id}}
# when
response = api_client.post_graphql(REFUND_SETTINGS_UPDATE_MUTATION, variables)
# then
assert_no_permission(response)
def test_refund_settings_update_app_no_permission(app_api_client, page_type):
# given
page_type_id = graphene.Node.to_global_id("PageType", page_type.id)
variables = {"input": {"refundReasonReferenceType": page_type_id}}
# when
response = app_api_client.post_graphql(REFUND_SETTINGS_UPDATE_MUTATION, variables)
# then
assert_no_permission(response)
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/shop/tests/mutations/test_refund_settings_update.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 191,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/graphql/channel/dataloaders/by_checkout.py | from uuid import UUID
from ....channel.models import Channel
from ...checkout.dataloaders import CheckoutByTokenLoader
from ...core.dataloaders import DataLoader
from .by_self import ChannelByIdLoader
class ChannelByCheckoutIDLoader(DataLoader[UUID, Channel]):
context_key = "channel_by_checkout"
def batch_load(self, keys):
def with_checkouts(checkouts):
def with_channels(channels):
channel_map = {channel.id: channel for channel in channels}
return [
channel_map.get(checkout.channel_id) if checkout else None
for checkout in checkouts
]
channel_ids = {checkout.channel_id for checkout in checkouts if checkout}
return (
ChannelByIdLoader(self.context)
.load_many(channel_ids)
.then(with_channels)
)
return CheckoutByTokenLoader(self.context).load_many(keys).then(with_checkouts)
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/channel/dataloaders/by_checkout.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 22,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
saleor/saleor:saleor/graphql/channel/dataloaders/by_self.py | from ....channel.models import Channel
from ...core.dataloaders import DataLoader
class ChannelByIdLoader(DataLoader[int, Channel]):
context_key = "channel_by_id"
def batch_load(self, keys):
channels = Channel.objects.using(self.database_connection_name).in_bulk(keys)
return [channels.get(channel_id) for channel_id in keys]
class ChannelBySlugLoader(DataLoader[str, Channel]):
context_key = "channel_by_slug"
def batch_load(self, keys):
channels = Channel.objects.using(self.database_connection_name).in_bulk(
keys, field_name="slug"
)
return [channels.get(slug) for slug in keys]
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/channel/dataloaders/by_self.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 14,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
saleor/saleor:saleor/graphql/channel/dataloaders/by_transaction.py | from collections.abc import Iterable
from promise import Promise
from ....channel.models import Channel
from ....payment.models import TransactionItem
from ...core.dataloaders import DataLoader
from .by_checkout import ChannelByCheckoutIDLoader
from .by_order import ChannelByOrderIdLoader
class ChannelByTransactionIdLoader(DataLoader[int, Channel]):
context_key = "channel_by_transaction_id"
def batch_load(self, keys: Iterable[int]):
transaction_ids = list(keys)
transaction_items = (
TransactionItem.objects.using(self.database_connection_name)
.only("order_id", "checkout_id", "id")
.in_bulk(transaction_ids)
)
transaction_to_order = {
item.id: item.order_id
for item in transaction_items.values()
if item.order_id
}
transaction_to_checkout = {
item.id: item.checkout_id
for item in transaction_items.values()
if item.checkout_id
}
order_ids = [
order_id
for transaction_id in transaction_ids
if (order_id := transaction_to_order.get(transaction_id)) is not None
]
checkout_ids = [
checkout_id
for transaction_id in transaction_ids
if (checkout_id := transaction_to_checkout.get(transaction_id)) is not None
]
def resolve_channels(loaded_data):
checkout_channels, order_channels = loaded_data
order_to_channel = dict(zip(order_ids, order_channels, strict=False))
checkout_to_channel = dict(
zip(checkout_ids, checkout_channels, strict=False)
)
def get_channel_for_transaction(transaction_id):
if order_id := transaction_to_order.get(transaction_id):
return order_to_channel.get(order_id)
if checkout_id := transaction_to_checkout.get(transaction_id):
return checkout_to_channel.get(checkout_id)
return None
return [get_channel_for_transaction(tid) for tid in transaction_ids]
checkout_promise = ChannelByCheckoutIDLoader(self.context).load_many(
checkout_ids
)
order_promise = ChannelByOrderIdLoader(self.context).load_many(order_ids)
return Promise.all([checkout_promise, order_promise]).then(resolve_channels)
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/channel/dataloaders/by_transaction.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 54,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
saleor/saleor:saleor/graphql/channel/tests/dataloaders/test_by_transaction.py | from .....channel.models import Channel
from ....context import SaleorContext
from ...dataloaders.by_transaction import ChannelByTransactionIdLoader
def test_batch_load_with_order_transactions(
order_with_lines, transaction_item_generator
):
# given
order = order_with_lines
transaction1 = transaction_item_generator(order_id=order.id, checkout_id=None)
transaction2 = transaction_item_generator(order_id=order.id, checkout_id=None)
# when
context = SaleorContext()
loader = ChannelByTransactionIdLoader(context)
channels = loader.batch_load([transaction1.id, transaction2.id]).get()
# then
assert len(channels) == 2
assert channels[0] == order.channel
assert channels[1] == order.channel
assert all(isinstance(channel, Channel) for channel in channels)
def test_batch_load_with_checkout_transactions(checkout, transaction_item_generator):
# given
transaction1 = transaction_item_generator(checkout_id=checkout.token, order_id=None)
transaction2 = transaction_item_generator(checkout_id=checkout.token, order_id=None)
# when
context = SaleorContext()
loader = ChannelByTransactionIdLoader(context)
channels = loader.batch_load([transaction1.id, transaction2.id]).get()
# then
assert len(channels) == 2
assert channels[0] == checkout.channel
assert channels[1] == checkout.channel
assert all(isinstance(channel, Channel) for channel in channels)
def test_batch_load_with_mixed_transactions(
order_with_lines, checkout, transaction_item_generator
):
# given
order = order_with_lines
order_transaction = transaction_item_generator(order_id=order.id, checkout_id=None)
checkout_transaction = transaction_item_generator(
checkout_id=checkout.token, order_id=None
)
# when
context = SaleorContext()
loader = ChannelByTransactionIdLoader(context)
channels = loader.batch_load([order_transaction.id, checkout_transaction.id]).get()
# then
assert len(channels) == 2
assert channels[0] == order.channel
assert channels[1] == checkout.channel
def test_batch_load_with_nonexistent_transaction_ids():
# given
nonexistent_ids = [99999, 88888]
# when
context = SaleorContext()
loader = ChannelByTransactionIdLoader(context)
channels = loader.batch_load(nonexistent_ids).get()
# then
assert len(channels) == 2
assert channels[0] is None
assert channels[1] is None
def test_batch_load_with_transaction_without_order_or_checkout(
transaction_item_generator,
):
# given - transaction with neither order_id nor checkout_id
transaction = transaction_item_generator(order_id=None, checkout_id=None)
# when
context = SaleorContext()
loader = ChannelByTransactionIdLoader(context)
channels = loader.batch_load([transaction.id]).get()
# then
assert len(channels) == 1
assert channels[0] is None
def test_batch_load_empty_keys():
# when
context = SaleorContext()
loader = ChannelByTransactionIdLoader(context)
channels = loader.batch_load([]).get()
# then
assert channels == []
def test_batch_load_with_deleted_order(order_with_lines, transaction_item_generator):
# given
order = order_with_lines
transaction = transaction_item_generator(order_id=order.id, checkout_id=None)
transaction_id = transaction.id
# Delete transaction first, then order to avoid protected foreign key constraint
transaction.delete()
order.delete()
# when
context = SaleorContext()
loader = ChannelByTransactionIdLoader(context)
channels = loader.batch_load([transaction_id]).get()
# then
assert len(channels) == 1
assert channels[0] is None
def test_batch_load_with_deleted_checkout(checkout, transaction_item_generator):
# given
transaction = transaction_item_generator(checkout_id=checkout.token, order_id=None)
checkout.delete()
# when
context = SaleorContext()
loader = ChannelByTransactionIdLoader(context)
channels = loader.batch_load([transaction.id]).get()
# then
assert len(channels) == 1
assert channels[0] is None
def test_batch_load_maintains_order(
order_with_lines, checkout, transaction_item_generator
):
# given
order = order_with_lines
transaction1 = transaction_item_generator(order_id=order.id, checkout_id=None)
transaction2 = transaction_item_generator(checkout_id=checkout.token, order_id=None)
transaction3 = transaction_item_generator(order_id=order.id, checkout_id=None)
# when
context = SaleorContext()
loader = ChannelByTransactionIdLoader(context)
# Request in specific order
channels = loader.batch_load(
[transaction2.id, transaction1.id, transaction3.id]
).get()
# then
assert len(channels) == 3
# Results should match the order of request
assert channels[0] == checkout.channel # transaction2 -> checkout
assert channels[1] == order.channel # transaction1 -> order
assert channels[2] == order.channel # transaction3 -> order
def test_batch_load_handles_duplicate_transaction_ids(
order_with_lines, transaction_item_generator
):
# given
order = order_with_lines
transaction = transaction_item_generator(order_id=order.id, checkout_id=None)
# when
context = SaleorContext()
loader = ChannelByTransactionIdLoader(context)
# Request same transaction ID multiple times
channels = loader.batch_load([transaction.id, transaction.id, transaction.id]).get()
# then
assert len(channels) == 3
assert all(channel == order.channel for channel in channels)
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/channel/tests/dataloaders/test_by_transaction.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 140,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/plugins/webhook/tests/subscription_webhooks/filterable_webhooks/test_checkout_fully_authorized.py | import json
from unittest.mock import patch
import graphene
from django.test import override_settings
from ......core.models import EventDelivery
from ......graphql.webhook.subscription_query import SubscriptionQuery
from ......webhook.event_types import WebhookEventAsyncType
from .....manager import get_plugins_manager
CHECKOUT_FULLY_AUTHORIZED_SUBSCRIPTION = """
subscription {
checkoutFullyAuthorized(channels: ["%s"]) {
checkout {
id
token
lines {
id
variant {
id
}
}
}
}
}
"""
@patch(
"saleor.webhook.transport.asynchronous.transport.send_webhook_request_async.apply_async"
)
@override_settings(
PLUGINS=["saleor.plugins.webhook.plugin.WebhookPlugin"],
CELERY_TASK_ALWAYS_EAGER=True,
)
def test_checkout_fully_authorized(
mocked_async, checkout_with_item, subscription_webhook, settings
):
# given
manager = get_plugins_manager(False)
checkout = checkout_with_item
checkout_line = checkout.lines.first()
channel = checkout.channel
assert channel.slug == settings.DEFAULT_CHANNEL_SLUG
event_type = WebhookEventAsyncType.CHECKOUT_FULLY_AUTHORIZED
query = CHECKOUT_FULLY_AUTHORIZED_SUBSCRIPTION % settings.DEFAULT_CHANNEL_SLUG
webhook = subscription_webhook(query, event_type)
subscription_query = SubscriptionQuery(query)
webhook.filterable_channel_slugs = subscription_query.get_filterable_channel_slugs()
webhook.save()
checkout_id = graphene.Node.to_global_id("Checkout", checkout.pk)
# when
manager.checkout_fully_authorized(checkout)
# then
expected_payload = json.dumps(
{
"data": {
"checkoutFullyAuthorized": {
"checkout": {
"id": checkout_id,
"token": str(checkout.token),
"lines": [
{
"id": graphene.Node.to_global_id(
"CheckoutLine", checkout_line.id
),
"variant": {
"id": graphene.Node.to_global_id(
"ProductVariant", checkout_line.variant_id
)
},
}
],
}
}
}
}
)
deliveries = EventDelivery.objects.all()
assert len(deliveries) == 1
assert deliveries[0].payload.get_payload() == expected_payload
assert deliveries[0].webhook == webhook
assert mocked_async.called
@patch(
"saleor.webhook.transport.asynchronous.transport.send_webhook_request_async.apply_async"
)
@override_settings(
PLUGINS=["saleor.plugins.webhook.plugin.WebhookPlugin"],
CELERY_TASK_ALWAYS_EAGER=True,
)
def test_checkout_fully_authorized_without_channels_input(
mocked_async, checkout_with_item, subscription_webhook
):
# given
manager = get_plugins_manager(False)
checkout = checkout_with_item
checkout_line = checkout.lines.first()
event_type = WebhookEventAsyncType.CHECKOUT_FULLY_AUTHORIZED
query = """subscription {
checkoutFullyAuthorized {
checkout {
id
token
lines {
id
variant {
id
}
}
}
}
}"""
webhook = subscription_webhook(query, event_type)
subscription_query = SubscriptionQuery(query)
webhook.filterable_channel_slugs = subscription_query.get_filterable_channel_slugs()
webhook.save()
checkout_id = graphene.Node.to_global_id("Checkout", checkout.pk)
# when
manager.checkout_fully_authorized(checkout)
# then
expected_payload = json.dumps(
{
"data": {
"checkoutFullyAuthorized": {
"checkout": {
"id": checkout_id,
"token": str(checkout.token),
"lines": [
{
"id": graphene.Node.to_global_id(
"CheckoutLine", checkout_line.id
),
"variant": {
"id": graphene.Node.to_global_id(
"ProductVariant", checkout_line.variant_id
)
},
}
],
}
}
}
}
)
deliveries = EventDelivery.objects.all()
assert len(deliveries) == 1
assert deliveries[0].payload.get_payload() == expected_payload
assert deliveries[0].webhook == webhook
assert mocked_async.called
@patch(
"saleor.webhook.transport.asynchronous.transport.create_event_delivery_list_for_webhooks"
)
@patch(
"saleor.webhook.transport.asynchronous.transport.send_webhook_request_async.apply_async"
)
@override_settings(
PLUGINS=["saleor.plugins.webhook.plugin.WebhookPlugin"],
CELERY_TASK_ALWAYS_EAGER=True,
)
def test_checkout_fully_authorized_with_different_channel(
mocked_async,
mocked_create_event_delivery_list_for_webhooks,
checkout_JPY_with_item,
subscription_webhook,
settings,
):
# given
manager = get_plugins_manager(False)
checkout = checkout_JPY_with_item
channel = checkout.channel
assert channel.slug != settings.DEFAULT_CHANNEL_SLUG
event_type = WebhookEventAsyncType.CHECKOUT_FULLY_AUTHORIZED
query = CHECKOUT_FULLY_AUTHORIZED_SUBSCRIPTION % settings.DEFAULT_CHANNEL_SLUG
webhook = subscription_webhook(query, event_type)
subscription_query = SubscriptionQuery(query)
webhook.filterable_channel_slugs = subscription_query.get_filterable_channel_slugs()
webhook.save()
# when
manager.checkout_fully_authorized(checkout)
# then
assert not mocked_async.called
assert not mocked_create_event_delivery_list_for_webhooks.called
deliveries = EventDelivery.objects.all()
assert len(deliveries) == 0
@patch(
"saleor.webhook.transport.asynchronous.transport.create_event_delivery_list_for_webhooks"
)
@patch(
"saleor.webhook.transport.asynchronous.transport.send_webhook_request_async.apply_async"
)
@override_settings(
PLUGINS=["saleor.plugins.webhook.plugin.WebhookPlugin"],
CELERY_TASK_ALWAYS_EAGER=True,
)
def test_different_event_doesnt_trigger_webhook(
mocked_async,
mocked_create_event_delivery_list_for_webhooks,
checkout_with_item,
subscription_webhook,
settings,
):
# given
manager = get_plugins_manager(False)
checkout = checkout_with_item
channel = checkout.channel
assert channel.slug == settings.DEFAULT_CHANNEL_SLUG
event_type = WebhookEventAsyncType.CHECKOUT_UPDATED
query = CHECKOUT_FULLY_AUTHORIZED_SUBSCRIPTION % settings.DEFAULT_CHANNEL_SLUG
webhook = subscription_webhook(query, event_type)
subscription_query = SubscriptionQuery(query)
webhook.filterable_channel_slugs = subscription_query.get_filterable_channel_slugs()
webhook.save()
# when
manager.checkout_fully_authorized(checkout)
# then
assert not mocked_async.called
assert not mocked_create_event_delivery_list_for_webhooks.called
deliveries = EventDelivery.objects.all()
assert len(deliveries) == 0
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/plugins/webhook/tests/subscription_webhooks/filterable_webhooks/test_checkout_fully_authorized.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 215,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/tests/e2e/checkout/test_checkout_complete_with_gift_card_recalculcates_status_for_other_checkout.py | import pytest
from ..gift_cards.utils import create_gift_card
from ..product.utils.preparing_product import prepare_product
from ..shop.utils import prepare_shop
from ..utils import assign_permissions
from .utils import (
checkout_add_promo_code,
checkout_complete,
checkout_create,
checkout_delivery_method_update,
get_checkout,
raw_checkout_complete,
)
@pytest.mark.e2e
@pytest.mark.parametrize(
"query_second_checkout_status_before_checkout_complete",
[
True,
False,
],
)
def test_checkout_complete_with_gift_card_recalculcates_status_for_other_checkout(
e2e_app_api_client,
e2e_not_logged_api_client,
permission_manage_product_types_and_attributes,
permission_manage_orders,
permission_manage_checkouts,
permission_manage_gift_card,
shop_permissions,
query_second_checkout_status_before_checkout_complete,
):
# Before
permissions = [
*shop_permissions,
permission_manage_product_types_and_attributes,
permission_manage_orders,
permission_manage_checkouts,
permission_manage_gift_card,
]
assign_permissions(e2e_app_api_client, permissions)
shop_data, _ = prepare_shop(
e2e_app_api_client,
channels=[
{
"shipping_zones": [
{
"shipping_methods": [{}],
},
],
"order_settings": {
"allowUnpaidOrders": False,
"markAsPaidStrategy": "TRANSACTION_FLOW",
},
}
],
)
channel_id = shop_data[0]["id"]
channel_slug = shop_data[0]["slug"]
warehouse_id = shop_data[0]["warehouse_id"]
shipping_method_id = shop_data[0]["shipping_zones"][0]["shipping_methods"][0]["id"]
variant_price = 10
(
_,
product_variant_id,
_,
) = prepare_product(
e2e_app_api_client,
warehouse_id,
channel_id,
variant_price,
)
assert shipping_method_id is not None
gift_card = create_gift_card(e2e_app_api_client, 20, "USD", active=True)
gift_card_code = gift_card["code"]
gift_card_id = gift_card["id"]
# Step 1 - Create first checkout
lines = [
{"variantId": product_variant_id, "quantity": 1},
]
checkout_data = checkout_create(
e2e_not_logged_api_client,
lines,
channel_slug,
email="testEmail@example.com",
)
first_checkout_id = checkout_data["id"]
# Step 2 - Update delivery method for first checkout
checkout_data = checkout_delivery_method_update(
e2e_not_logged_api_client,
first_checkout_id,
shipping_method_id,
)
assert checkout_data["deliveryMethod"]["id"] == shipping_method_id
# Step 3 - Add gift card to first checkout
checkout_data = checkout_add_promo_code(
e2e_not_logged_api_client,
first_checkout_id,
gift_card_code,
)
total_gross_amount = checkout_data["totalPrice"]["gross"]["amount"]
assert total_gross_amount == 0
assert checkout_data["giftCards"][0]["id"] == gift_card_id
assert checkout_data["giftCards"][0]["last4CodeChars"] == gift_card_code[-4:]
# Step 4 - Check first checkout status
checkout_data = get_checkout(e2e_not_logged_api_client, first_checkout_id)
assert checkout_data["authorizeStatus"] == "FULL"
assert checkout_data["chargeStatus"] == "FULL"
# Step 5 - Create second checkout
lines = [
{"variantId": product_variant_id, "quantity": 1},
]
checkout_data = checkout_create(
e2e_not_logged_api_client,
lines,
channel_slug,
email="testEmail@example.com",
)
second_checkout_id = checkout_data["id"]
# Step 6 - Update delivery method for second checkout
checkout_data = checkout_delivery_method_update(
e2e_not_logged_api_client,
second_checkout_id,
shipping_method_id,
)
assert checkout_data["deliveryMethod"]["id"] == shipping_method_id
# Step 7 - Add gift card to second checkout
checkout_data = checkout_add_promo_code(
e2e_not_logged_api_client,
second_checkout_id,
gift_card_code,
)
total_gross_amount = checkout_data["totalPrice"]["gross"]["amount"]
assert total_gross_amount == 0
assert checkout_data["giftCards"][0]["id"] == gift_card_id
assert checkout_data["giftCards"][0]["last4CodeChars"] == gift_card_code[-4:]
# Step 8 - Check second checkout status
checkout_data = get_checkout(e2e_not_logged_api_client, second_checkout_id)
assert checkout_data["authorizeStatus"] == "FULL"
assert checkout_data["chargeStatus"] == "FULL"
# Step 9 - Complete first checkout
order_data = checkout_complete(
e2e_not_logged_api_client,
first_checkout_id,
)
assert order_data["id"] is not None
# # Step 10 - Check second checkout status once again
if query_second_checkout_status_before_checkout_complete:
checkout_data = get_checkout(e2e_not_logged_api_client, second_checkout_id)
assert checkout_data["authorizeStatus"] == "NONE"
assert checkout_data["chargeStatus"] == "NONE"
# Step 11 - Attempt to complete second checkout
response = raw_checkout_complete(
e2e_not_logged_api_client,
second_checkout_id,
)
errors = response["errors"]
assert len(errors) == 1
assert errors[0] == {
"code": "CHECKOUT_NOT_FULLY_PAID",
"field": None,
"message": "Provided payment methods can not cover the checkout's total amount",
}
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/tests/e2e/checkout/test_checkout_complete_with_gift_card_recalculcates_status_for_other_checkout.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 162,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/tests/e2e/checkout/test_checkout_complete_with_transaction_and_gift_card.py | import pytest
from ..gift_cards.utils import create_gift_card
from ..orders.utils import order_query
from ..product.utils.preparing_product import prepare_product
from ..shop.utils import prepare_shop
from ..transactions.utils import create_transaction
from ..utils import assign_permissions
from .utils import (
checkout_add_promo_code,
checkout_complete,
checkout_create,
checkout_delivery_method_update,
)
@pytest.mark.e2e
def test_checkout_complete_with_transaction_and_gift_card(
e2e_app_api_client,
e2e_not_logged_api_client,
permission_manage_product_types_and_attributes,
permission_manage_orders,
permission_manage_checkouts,
permission_manage_payments,
permission_manage_gift_card,
shop_permissions,
):
# Before
permissions = [
*shop_permissions,
permission_manage_product_types_and_attributes,
permission_manage_orders,
permission_manage_checkouts,
permission_manage_payments,
permission_manage_gift_card,
]
assign_permissions(e2e_app_api_client, permissions)
shop_data, _ = prepare_shop(
e2e_app_api_client,
channels=[
{
"shipping_zones": [
{
"shipping_methods": [{}],
},
],
"order_settings": {
"allowUnpaidOrders": False,
"markAsPaidStrategy": "TRANSACTION_FLOW",
},
}
],
)
channel_id = shop_data[0]["id"]
channel_slug = shop_data[0]["slug"]
warehouse_id = shop_data[0]["warehouse_id"]
shipping_method_id = shop_data[0]["shipping_zones"][0]["shipping_methods"][0]["id"]
variant_price = 10
(
_,
product_variant_id,
_,
) = prepare_product(
e2e_app_api_client,
warehouse_id,
channel_id,
variant_price,
)
assert shipping_method_id is not None
gift_card = create_gift_card(e2e_app_api_client, 100, "USD", active=True)
gift_card_code = gift_card["code"]
gift_card_id = gift_card["id"]
# Step 1 - Create checkout
lines = [
{"variantId": product_variant_id, "quantity": 1},
]
checkout_data = checkout_create(
e2e_not_logged_api_client,
lines,
channel_slug,
email="testEmail@example.com",
)
checkout_id = checkout_data["id"]
subtotal_gross_amount = checkout_data["subtotalPrice"]["gross"]["amount"]
assert subtotal_gross_amount == float(variant_price)
# Step 2 - Update delivery method
checkout_data = checkout_delivery_method_update(
e2e_not_logged_api_client,
checkout_id,
shipping_method_id,
)
assert checkout_data["deliveryMethod"]["id"] == shipping_method_id
total_gross_amount = checkout_data["totalPrice"]["gross"]["amount"]
shipping_price = checkout_data["deliveryMethod"]["price"]["amount"]
assert shipping_price == 10
assert total_gross_amount == subtotal_gross_amount + shipping_price
assert checkout_data["chargeStatus"] == "NONE"
assert checkout_data["authorizeStatus"] == "NONE"
# Step 3 - Create transaction that partially authorize payment
create_transaction(
e2e_app_api_client,
checkout_id,
transaction_name="transaction",
psp_reference="PSP-test",
available_actions=["CHARGE", "CANCEL"],
amount_authorized=subtotal_gross_amount,
)
# Step 4 - Add gift card to checkout
checkout_data = checkout_add_promo_code(
e2e_not_logged_api_client,
checkout_id,
gift_card_code,
)
total_gross_amount = checkout_data["totalPrice"]["gross"]["amount"]
assert total_gross_amount == 0
assert checkout_data["giftCards"][0]["id"] == gift_card_id
assert checkout_data["giftCards"][0]["last4CodeChars"] == gift_card_code[-4:]
# Step 5 - Complete checkout.
order_data = checkout_complete(
e2e_not_logged_api_client,
checkout_id,
)
assert order_data["status"] == "UNFULFILLED"
assert order_data["total"]["gross"]["amount"] == 0
@pytest.mark.e2e
def test_checkout_complete_with_gift_card_and_transaction(
e2e_app_api_client,
e2e_not_logged_api_client,
permission_manage_product_types_and_attributes,
permission_manage_orders,
permission_manage_checkouts,
permission_manage_payments,
permission_manage_gift_card,
shop_permissions,
):
# Before
permissions = [
*shop_permissions,
permission_manage_product_types_and_attributes,
permission_manage_orders,
permission_manage_checkouts,
permission_manage_payments,
permission_manage_gift_card,
]
assign_permissions(e2e_app_api_client, permissions)
shop_data, _ = prepare_shop(
e2e_app_api_client,
channels=[
{
"shipping_zones": [
{
"shipping_methods": [{}],
},
],
"order_settings": {
"allowUnpaidOrders": False,
"markAsPaidStrategy": "TRANSACTION_FLOW",
},
}
],
)
channel_id = shop_data[0]["id"]
channel_slug = shop_data[0]["slug"]
warehouse_id = shop_data[0]["warehouse_id"]
shipping_method_id = shop_data[0]["shipping_zones"][0]["shipping_methods"][0]["id"]
variant_price = 10
(
_,
product_variant_id,
_,
) = prepare_product(
e2e_app_api_client,
warehouse_id,
channel_id,
variant_price,
)
assert shipping_method_id is not None
gift_card = create_gift_card(e2e_app_api_client, 100, "USD", active=True)
gift_card_code = gift_card["code"]
gift_card_id = gift_card["id"]
# Step 1 - Create checkout
lines = [
{"variantId": product_variant_id, "quantity": 1},
]
checkout_data = checkout_create(
e2e_not_logged_api_client,
lines,
channel_slug,
email="testEmail@example.com",
)
checkout_id = checkout_data["id"]
subtotal_gross_amount = checkout_data["subtotalPrice"]["gross"]["amount"]
assert subtotal_gross_amount == float(variant_price)
# Step 2 - Update delivery method
checkout_data = checkout_delivery_method_update(
e2e_not_logged_api_client,
checkout_id,
shipping_method_id,
)
assert checkout_data["deliveryMethod"]["id"] == shipping_method_id
total_gross_amount = checkout_data["totalPrice"]["gross"]["amount"]
shipping_price = checkout_data["deliveryMethod"]["price"]["amount"]
assert shipping_price == 10
assert total_gross_amount == subtotal_gross_amount + shipping_price
assert checkout_data["chargeStatus"] == "NONE"
assert checkout_data["authorizeStatus"] == "NONE"
# Step 3 - Add gift card to checkout
checkout_data = checkout_add_promo_code(
e2e_not_logged_api_client,
checkout_id,
gift_card_code,
)
total_gross_amount = checkout_data["totalPrice"]["gross"]["amount"]
assert total_gross_amount == 0
assert checkout_data["giftCards"][0]["id"] == gift_card_id
assert checkout_data["giftCards"][0]["last4CodeChars"] == gift_card_code[-4:]
# Step 4 - Create transaction that partially authorize payment
create_transaction(
e2e_app_api_client,
checkout_id,
transaction_name="transaction",
psp_reference="PSP-test",
available_actions=["CHARGE", "CANCEL"],
amount_authorized=subtotal_gross_amount,
)
# Step 5 - Complete checkout.
order_data = checkout_complete(
e2e_not_logged_api_client,
checkout_id,
)
assert order_data["status"] == "UNFULFILLED"
assert order_data["total"]["gross"]["amount"] == 0
@pytest.mark.e2e
def test_checkout_complete_with_only_gift_card(
e2e_app_api_client,
e2e_not_logged_api_client,
permission_manage_product_types_and_attributes,
permission_manage_orders,
permission_manage_checkouts,
permission_manage_payments,
permission_manage_gift_card,
shop_permissions,
):
# Before
permissions = [
*shop_permissions,
permission_manage_product_types_and_attributes,
permission_manage_orders,
permission_manage_checkouts,
permission_manage_payments,
permission_manage_gift_card,
]
assign_permissions(e2e_app_api_client, permissions)
shop_data, _ = prepare_shop(
e2e_app_api_client,
channels=[
{
"shipping_zones": [
{
"shipping_methods": [{}],
},
],
"order_settings": {
"allowUnpaidOrders": False,
"markAsPaidStrategy": "TRANSACTION_FLOW",
},
}
],
)
channel_id = shop_data[0]["id"]
channel_slug = shop_data[0]["slug"]
warehouse_id = shop_data[0]["warehouse_id"]
shipping_method_id = shop_data[0]["shipping_zones"][0]["shipping_methods"][0]["id"]
variant_price = 10
(
_,
product_variant_id,
_,
) = prepare_product(
e2e_app_api_client,
warehouse_id,
channel_id,
variant_price,
)
assert shipping_method_id is not None
gift_card = create_gift_card(e2e_app_api_client, 100, "USD", active=True)
gift_card_code = gift_card["code"]
gift_card_id = gift_card["id"]
# Step 1 - Create checkout
lines = [
{"variantId": product_variant_id, "quantity": 1},
]
checkout_data = checkout_create(
e2e_not_logged_api_client,
lines,
channel_slug,
email="testEmail@example.com",
)
checkout_id = checkout_data["id"]
subtotal_gross_amount = checkout_data["subtotalPrice"]["gross"]["amount"]
assert subtotal_gross_amount == float(variant_price)
# Step 2 - Update delivery method
checkout_data = checkout_delivery_method_update(
e2e_not_logged_api_client,
checkout_id,
shipping_method_id,
)
assert checkout_data["deliveryMethod"]["id"] == shipping_method_id
total_gross_amount = checkout_data["totalPrice"]["gross"]["amount"]
shipping_price = checkout_data["deliveryMethod"]["price"]["amount"]
assert shipping_price == 10
assert total_gross_amount == subtotal_gross_amount + shipping_price
assert checkout_data["chargeStatus"] == "NONE"
assert checkout_data["authorizeStatus"] == "NONE"
# Step 3 - Add gift card to checkout
checkout_data = checkout_add_promo_code(
e2e_not_logged_api_client,
checkout_id,
gift_card_code,
)
total_gross_amount = checkout_data["totalPrice"]["gross"]["amount"]
assert total_gross_amount == 0
assert checkout_data["giftCards"][0]["id"] == gift_card_id
assert checkout_data["giftCards"][0]["last4CodeChars"] == gift_card_code[-4:]
# Step 4 - Complete checkout.
order_data = checkout_complete(
e2e_not_logged_api_client,
checkout_id,
)
assert order_data["status"] == "UNCONFIRMED"
assert order_data["total"]["gross"]["amount"] == 0
# Step 5 - Check order status
order_data = order_query(e2e_app_api_client, order_data["id"])
assert order_data["status"] == "UNFULFILLED"
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/tests/e2e/checkout/test_checkout_complete_with_transaction_and_gift_card.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 331,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/graphql/attribute/tests/queries/test_selected_attribute.py | from datetime import UTC, datetime
import graphene
from .....attribute import AttributeEntityType, AttributeInputType, AttributeType
from .....attribute.models.base import AttributeValue, AttributeValueTranslation
from .....attribute.utils import associate_attribute_values_to_instance
from ....tests.utils import get_graphql_content
from ...types import (
ASSIGNED_ATTRIBUTE_MAP,
ASSIGNED_MULTI_REFERENCE_MAP,
ASSIGNED_SINGLE_REFERENCE_MAP,
)
PAGE_QUERY = """
query PageQuery($id: ID) {
page(id: $id) {
attributes {
values {
id
name
}
}
}
}
"""
def test_attribute_value_name_when_referenced_product_was_changed(
staff_api_client,
product,
page,
page_type_product_reference_attribute,
):
# given
page_type = page.page_type
page_type.page_attributes.set([page_type_product_reference_attribute])
attr_value = AttributeValue.objects.create(
attribute=page_type_product_reference_attribute,
name=product.name,
slug=f"{page.pk}_{product.pk}",
reference_product_id=product.pk,
)
associate_attribute_values_to_instance(
page, {page_type_product_reference_attribute.pk: [attr_value]}
)
new_product_name = "New Product Name"
product.name = new_product_name
product.save(update_fields=["name"])
# when
response = staff_api_client.post_graphql(
PAGE_QUERY,
variables={"id": graphene.Node.to_global_id("Page", page.pk)},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["attributes"]) == 1
assert len(content["data"]["page"]["attributes"][0]["values"]) == 1
data = content["data"]["page"]["attributes"][0]["values"][0]
assert data["name"] == new_product_name
def test_attribute_value_name_when_referenced_variant_was_changed(
staff_api_client,
variant,
page,
page_type_variant_reference_attribute,
):
# given
page_type = page.page_type
page_type.page_attributes.set([page_type_variant_reference_attribute])
attr_value = AttributeValue.objects.create(
attribute=page_type_variant_reference_attribute,
name=variant.name,
slug=f"{page.pk}_{variant.pk}",
reference_variant_id=variant.pk,
)
associate_attribute_values_to_instance(
page, {page_type_variant_reference_attribute.pk: [attr_value]}
)
product_name = "Product Name"
variant.product.name = product_name
variant.product.save(update_fields=["name"])
new_variant_name = "New Variant Name"
variant.name = new_variant_name
variant.save(update_fields=["name"])
# when
response = staff_api_client.post_graphql(
PAGE_QUERY,
variables={"id": graphene.Node.to_global_id("Page", page.pk)},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["attributes"]) == 1
assert len(content["data"]["page"]["attributes"][0]["values"]) == 1
data = content["data"]["page"]["attributes"][0]["values"][0]
assert data["name"] == f"{product_name}: {new_variant_name}"
def test_attribute_value_name_when_referenced_page_was_changed(
staff_api_client,
page,
page_list,
page_type_page_reference_attribute,
):
# given
referenced_page = page_list[0]
page_type = page.page_type
page_type.page_attributes.set([page_type_page_reference_attribute])
attr_value = AttributeValue.objects.create(
attribute=page_type_page_reference_attribute,
name=referenced_page.title,
slug=f"{page.pk}_{referenced_page.pk}",
reference_page_id=referenced_page.pk,
)
associate_attribute_values_to_instance(
page, {page_type_page_reference_attribute.pk: [attr_value]}
)
new_page_title = "New Page Title"
referenced_page.title = new_page_title
referenced_page.save(update_fields=["title"])
# when
response = staff_api_client.post_graphql(
PAGE_QUERY,
variables={"id": graphene.Node.to_global_id("Page", page.pk)},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["attributes"]) == 1
assert len(content["data"]["page"]["attributes"][0]["values"]) == 1
data = content["data"]["page"]["attributes"][0]["values"][0]
assert data["name"] == new_page_title
ASSIGNED_NUMERIC_ATTRIBUTE_QUERY = """
query PageQuery($id: ID) {
page(id: $id) {
assignedAttributes(limit:10) {
... on AssignedNumericAttribute {
attribute {
id
}
value
}
}
}
}
"""
def test_assigned_numeric_attribute(staff_api_client, page, numeric_attribute):
# given
page_type = page.page_type
page_type.page_attributes.set([numeric_attribute])
attr_value = numeric_attribute.values.first()
associate_attribute_values_to_instance(page, {numeric_attribute.pk: [attr_value]})
# when
response = staff_api_client.post_graphql(
ASSIGNED_NUMERIC_ATTRIBUTE_QUERY,
variables={"id": graphene.Node.to_global_id("Page", page.pk)},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["assignedAttributes"]) == 1
assert (
content["data"]["page"]["assignedAttributes"][0]["value"] == attr_value.numeric
)
ASSIGNED_TEXT_ATTRIBUTE_QUERY = """
query PageQuery($id: ID) {
page(id: $id) {
assignedAttributes(limit:10) {
...on AssignedTextAttribute{
value
translation(languageCode:FR)
}
}
}
}
"""
def test_assigned_text_attribute_translation(
staff_api_client,
page,
rich_text_attribute_page_type,
translated_page_unique_attribute_value,
):
# given
page_type = page.page_type
page_type.page_attributes.set([rich_text_attribute_page_type])
attr_value = rich_text_attribute_page_type.values.first()
assert attr_value.id == translated_page_unique_attribute_value.attribute_value_id
associate_attribute_values_to_instance(
page, {rich_text_attribute_page_type.pk: [attr_value]}
)
assert attr_value.rich_text is not None
# when
response = staff_api_client.post_graphql(
ASSIGNED_TEXT_ATTRIBUTE_QUERY,
variables={"id": graphene.Node.to_global_id("Page", page.pk)},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["assignedAttributes"]) == 1
assert (
content["data"]["page"]["assignedAttributes"][0]["translation"]
== translated_page_unique_attribute_value.rich_text
)
def test_assigned_text_attribute(staff_api_client, page, rich_text_attribute):
# given
page_type = page.page_type
page_type.page_attributes.set([rich_text_attribute])
attr_value = rich_text_attribute.values.first()
associate_attribute_values_to_instance(page, {rich_text_attribute.pk: [attr_value]})
assert attr_value.rich_text is not None
# when
response = staff_api_client.post_graphql(
ASSIGNED_TEXT_ATTRIBUTE_QUERY,
variables={"id": graphene.Node.to_global_id("Page", page.pk)},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["assignedAttributes"]) == 1
assert (
content["data"]["page"]["assignedAttributes"][0]["value"]
== attr_value.rich_text
)
ASSIGNED_PLAIN_TEXT_ATTRIBUTE_QUERY = """
query PageQuery($id: ID) {
page(id: $id) {
assignedAttributes(limit:10) {
...on AssignedPlainTextAttribute{
value
translation(languageCode:FR)
}
}
}
}
"""
def test_assigned_plain_text_attribute_translation(
staff_api_client,
page,
plain_text_attribute_page_type,
translated_page_unique_attribute_value,
):
# given
page_type = page.page_type
page_type.page_attributes.set([plain_text_attribute_page_type])
attr_value = plain_text_attribute_page_type.values.first()
translation = AttributeValueTranslation.objects.create(
language_code="fr",
attribute_value=attr_value,
plain_text="French description.",
)
assert attr_value.id == translation.attribute_value_id
associate_attribute_values_to_instance(
page, {plain_text_attribute_page_type.pk: [attr_value]}
)
assert attr_value.plain_text is not None
# when
response = staff_api_client.post_graphql(
ASSIGNED_PLAIN_TEXT_ATTRIBUTE_QUERY,
variables={"id": graphene.Node.to_global_id("Page", page.pk)},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["assignedAttributes"]) == 1
assert (
content["data"]["page"]["assignedAttributes"][0]["translation"]
== translation.plain_text
)
def test_assigned_plain_text_attribute(staff_api_client, page, plain_text_attribute):
# given
page_type = page.page_type
page_type.page_attributes.set([plain_text_attribute])
attr_value = plain_text_attribute.values.first()
associate_attribute_values_to_instance(
page, {plain_text_attribute.pk: [attr_value]}
)
assert attr_value.plain_text is not None
# when
response = staff_api_client.post_graphql(
ASSIGNED_PLAIN_TEXT_ATTRIBUTE_QUERY,
variables={"id": graphene.Node.to_global_id("Page", page.pk)},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["assignedAttributes"]) == 1
assert (
content["data"]["page"]["assignedAttributes"][0]["value"]
== attr_value.plain_text
)
ASSIGNED_FILE_ATTRIBUTE_QUERY = """
query PageQuery($id: ID) {
page(id: $id) {
assignedAttributes(limit:10) {
...on AssignedFileAttribute{
value {
url
contentType
}
}
}
}
}
"""
def test_assigned_file_attribute(staff_api_client, page, file_attribute):
# given
page_type = page.page_type
page_type.page_attributes.set([file_attribute])
attr_value = file_attribute.values.first()
attr_value.file_url = "https://example.com/file.pdf"
attr_value.save()
associate_attribute_values_to_instance(page, {file_attribute.pk: [attr_value]})
assert attr_value.file_url is not None
# when
response = staff_api_client.post_graphql(
ASSIGNED_FILE_ATTRIBUTE_QUERY,
variables={"id": graphene.Node.to_global_id("Page", page.pk)},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["assignedAttributes"]) == 1
assert (
content["data"]["page"]["assignedAttributes"][0]["value"]["url"]
== attr_value.file_url
)
assert (
content["data"]["page"]["assignedAttributes"][0]["value"]["contentType"]
== attr_value.content_type
)
ASSIGNED_SINGLE_PAGE_REFERENCE_ATTRIBUTE_QUERY = """
query PageQuery($id: ID) {
page(id: $id) {
assignedAttributes(limit:10) {
__typename
...on AssignedSinglePageReferenceAttribute{
value{
__typename
slug
}
}
}
}
}
"""
def test_assigned_single_page_reference_attribute(
staff_api_client,
page,
page_list,
page_type_page_reference_attribute,
):
# given
referenced_page = page_list[0]
expected_reference_slug = "referenced-page-slug"
referenced_page.slug = expected_reference_slug
referenced_page.save(update_fields=["slug"])
page_type_page_reference_attribute.input_type = AttributeInputType.SINGLE_REFERENCE
page_type_page_reference_attribute.save()
page_type = page.page_type
page_type.page_attributes.set([page_type_page_reference_attribute])
attr_value = AttributeValue.objects.create(
attribute=page_type_page_reference_attribute,
name=referenced_page.title,
slug=f"{page.pk}_{referenced_page.pk}",
reference_page_id=referenced_page.pk,
)
associate_attribute_values_to_instance(
page, {page_type_page_reference_attribute.pk: [attr_value]}
)
# when
response = staff_api_client.post_graphql(
ASSIGNED_SINGLE_PAGE_REFERENCE_ATTRIBUTE_QUERY,
variables={"id": graphene.Node.to_global_id("Page", page.pk)},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["assignedAttributes"]) == 1
data = content["data"]["page"]["assignedAttributes"][0]["value"]
assert data["__typename"] == "Page"
assert data["slug"] == expected_reference_slug
ASSIGNED_SINGLE_PRODUCT_REFERENCE_ATTRIBUTE_QUERY = """
query PageQuery($id: ID) {
page(id: $id) {
assignedAttributes(limit:10) {
__typename
...on AssignedSingleProductReferenceAttribute{
value{
__typename
slug
}
}
}
}
}
"""
def test_assigned_single_product_reference_attribute(
staff_api_client,
page,
product,
page_type_product_reference_attribute,
):
# given
referenced_product = product
expected_reference_slug = "referenced-product-slug"
referenced_product.slug = expected_reference_slug
referenced_product.save(update_fields=["slug"])
page_type_product_reference_attribute.input_type = (
AttributeInputType.SINGLE_REFERENCE
)
page_type_product_reference_attribute.save()
page_type = page.page_type
page_type.page_attributes.set([page_type_product_reference_attribute])
attr_value = AttributeValue.objects.create(
attribute=page_type_product_reference_attribute,
slug=f"{page.pk}_{referenced_product.pk}",
reference_product_id=referenced_product.pk,
)
associate_attribute_values_to_instance(
page, {page_type_product_reference_attribute.pk: [attr_value]}
)
# when
response = staff_api_client.post_graphql(
ASSIGNED_SINGLE_PRODUCT_REFERENCE_ATTRIBUTE_QUERY,
variables={"id": graphene.Node.to_global_id("Page", page.pk)},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["assignedAttributes"]) == 1
data = content["data"]["page"]["assignedAttributes"][0]["value"]
assert data["__typename"] == "Product"
assert data["slug"] == expected_reference_slug
ASSIGNED_SINGLE_PRODUCT_VARIANT_REFERENCE_ATTRIBUTE_QUERY = """
query PageQuery($id: ID) {
page(id: $id) {
assignedAttributes(limit:10) {
__typename
...on AssignedSingleProductVariantReferenceAttribute{
value{
__typename
sku
}
}
}
}
}
"""
def test_assigned_single_product_variant_reference_attribute(
staff_api_client,
page,
variant,
page_type_variant_reference_attribute,
):
# given
referenced_variant = variant
expected_reference_sku = "referenced-variant-sku"
referenced_variant.sku = expected_reference_sku
referenced_variant.save(update_fields=["sku"])
page_type_variant_reference_attribute.input_type = (
AttributeInputType.SINGLE_REFERENCE
)
page_type_variant_reference_attribute.save()
page_type = page.page_type
page_type.page_attributes.set([page_type_variant_reference_attribute])
attr_value = AttributeValue.objects.create(
attribute=page_type_variant_reference_attribute,
slug=f"{page.pk}_{referenced_variant.pk}",
reference_variant_id=referenced_variant.pk,
)
associate_attribute_values_to_instance(
page, {page_type_variant_reference_attribute.pk: [attr_value]}
)
# when
response = staff_api_client.post_graphql(
ASSIGNED_SINGLE_PRODUCT_VARIANT_REFERENCE_ATTRIBUTE_QUERY,
variables={"id": graphene.Node.to_global_id("Page", page.pk)},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["assignedAttributes"]) == 1
data = content["data"]["page"]["assignedAttributes"][0]["value"]
assert data["__typename"] == "ProductVariant"
assert data["sku"] == expected_reference_sku
ASSIGNED_SINGLE_CATEGORY_REFERENCE_ATTRIBUTE_QUERY = """
query PageQuery($id: ID) {
page(id: $id) {
assignedAttributes(limit:10) {
__typename
...on AssignedSingleCategoryReferenceAttribute{
value{
__typename
slug
}
}
}
}
}
"""
def test_assigned_single_category_reference_attribute(
staff_api_client,
page,
category,
page_type_category_reference_attribute,
):
# given
referenced_category = category
expected_reference_slug = "referenced-category-slug"
referenced_category.slug = expected_reference_slug
referenced_category.save(update_fields=["slug"])
page_type_category_reference_attribute.input_type = (
AttributeInputType.SINGLE_REFERENCE
)
page_type_category_reference_attribute.save()
page_type = page.page_type
page_type.page_attributes.set([page_type_category_reference_attribute])
attr_value = AttributeValue.objects.create(
attribute=page_type_category_reference_attribute,
slug=f"{page.pk}_{referenced_category.pk}",
reference_category_id=referenced_category.pk,
)
associate_attribute_values_to_instance(
page, {page_type_category_reference_attribute.pk: [attr_value]}
)
# when
response = staff_api_client.post_graphql(
ASSIGNED_SINGLE_CATEGORY_REFERENCE_ATTRIBUTE_QUERY,
variables={"id": graphene.Node.to_global_id("Page", page.pk)},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["assignedAttributes"]) == 1
data = content["data"]["page"]["assignedAttributes"][0]["value"]
assert data["__typename"] == "Category"
assert data["slug"] == expected_reference_slug
ASSIGNED_SINGLE_COLLECTION_REFERENCE_ATTRIBUTE_QUERY = """
query PageQuery($id: ID) {
page(id: $id) {
assignedAttributes(limit:10) {
__typename
...on AssignedSingleCollectionReferenceAttribute{
value{
__typename
slug
}
}
}
}
}
"""
def test_assigned_single_collection_reference_attribute(
staff_api_client,
page,
collection,
page_type_collection_reference_attribute,
):
# given
referenced_collection = collection
expected_reference_slug = "referenced-collection-slug"
referenced_collection.slug = expected_reference_slug
referenced_collection.save(update_fields=["slug"])
page_type_collection_reference_attribute.input_type = (
AttributeInputType.SINGLE_REFERENCE
)
page_type_collection_reference_attribute.save()
page_type = page.page_type
page_type.page_attributes.set([page_type_collection_reference_attribute])
attr_value = AttributeValue.objects.create(
attribute=page_type_collection_reference_attribute,
slug=f"{page.pk}_{referenced_collection.pk}",
reference_collection_id=referenced_collection.pk,
)
associate_attribute_values_to_instance(
page, {page_type_collection_reference_attribute.pk: [attr_value]}
)
# when
response = staff_api_client.post_graphql(
ASSIGNED_SINGLE_COLLECTION_REFERENCE_ATTRIBUTE_QUERY,
variables={"id": graphene.Node.to_global_id("Page", page.pk)},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["assignedAttributes"]) == 1
data = content["data"]["page"]["assignedAttributes"][0]["value"]
assert data["__typename"] == "Collection"
assert data["slug"] == expected_reference_slug
ASSIGNED_MULTIPLE_PAGE_REFERENCE_ATTRIBUTE_QUERY = """
query PageQuery($id: ID) {
page(id: $id) {
assignedAttributes(limit:10) {
...on AssignedMultiPageReferenceAttribute{
__typename
value{
__typename
slug
}
}
}
}
}
"""
def test_assigned_multi_page_reference_attribute(
staff_api_client,
page,
page_list,
page_type_page_reference_attribute,
):
# given
referenced_page = page_list[0]
expected_reference_slug = "referenced-page-slug"
referenced_page.slug = expected_reference_slug
referenced_page.save(update_fields=["slug"])
page_type_page_reference_attribute.input_type = AttributeInputType.REFERENCE
page_type_page_reference_attribute.save()
page_type = page.page_type
page_type.page_attributes.set([page_type_page_reference_attribute])
attr_value = AttributeValue.objects.create(
attribute=page_type_page_reference_attribute,
name=referenced_page.title,
slug=f"{page.pk}_{referenced_page.pk}",
reference_page_id=referenced_page.pk,
)
associate_attribute_values_to_instance(
page, {page_type_page_reference_attribute.pk: [attr_value]}
)
# when
response = staff_api_client.post_graphql(
ASSIGNED_MULTIPLE_PAGE_REFERENCE_ATTRIBUTE_QUERY,
variables={"id": graphene.Node.to_global_id("Page", page.pk)},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["assignedAttributes"]) == 1
data = content["data"]["page"]["assignedAttributes"][0]["value"]
assert len(data) == 1
single_page_data = data[0]
assert single_page_data["__typename"] == "Page"
assert single_page_data["slug"] == expected_reference_slug
ASSIGNED_MULTIPLE_PRODUCT_REFERENCE_ATTRIBUTE_QUERY = """
query PageQuery($id: ID, $valueLimit: PositiveInt) {
page(id: $id) {
assignedAttributes(limit:10) {
...on AssignedMultiProductReferenceAttribute{
__typename
value(limit: $valueLimit) {
__typename
slug
}
}
}
}
}
"""
def test_assigned_multi_product_reference_attribute(
staff_api_client,
page,
product,
page_type_product_reference_attribute,
):
# given
referenced_product = product
expected_reference_slug = "referenced-product-slug"
referenced_product.slug = expected_reference_slug
referenced_product.save(update_fields=["slug"])
page_type_product_reference_attribute.input_type = AttributeInputType.REFERENCE
page_type_product_reference_attribute.save()
page_type = page.page_type
page_type.page_attributes.set([page_type_product_reference_attribute])
attr_value = AttributeValue.objects.create(
attribute=page_type_product_reference_attribute,
slug=f"{page.pk}_{referenced_product.pk}",
reference_product_id=referenced_product.pk,
)
associate_attribute_values_to_instance(
page, {page_type_product_reference_attribute.pk: [attr_value]}
)
# when
response = staff_api_client.post_graphql(
ASSIGNED_MULTIPLE_PRODUCT_REFERENCE_ATTRIBUTE_QUERY,
variables={"id": graphene.Node.to_global_id("Page", page.pk)},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["assignedAttributes"]) == 1
data = content["data"]["page"]["assignedAttributes"][0]["value"]
assert len(data) == 1
single_page_data = data[0]
assert single_page_data["__typename"] == "Product"
assert single_page_data["slug"] == expected_reference_slug
def test_applies_limit_to_multi_product_references(
staff_api_client,
page,
product_list,
page_type_product_reference_attribute,
):
# given
page_type = page.page_type
# make sure that our attribute is on first position
page_type.page_attributes.update(storefront_search_position=10)
page_type.page_attributes.set([page_type_product_reference_attribute])
page_type_product_reference_attribute.input_type = AttributeInputType.REFERENCE
page_type_product_reference_attribute.storefront_search_position = 1
page_type_product_reference_attribute.save()
first_reference = AttributeValue.objects.create(
attribute=page_type_product_reference_attribute,
slug=f"{page.pk}_{product_list[0].pk}",
reference_product_id=product_list[0].pk,
)
second_reference = AttributeValue.objects.create(
attribute=page_type_product_reference_attribute,
slug=f"{page.pk}_{product_list[1].pk}",
reference_product_id=product_list[1].pk,
)
associate_attribute_values_to_instance(
page,
{page_type_product_reference_attribute.pk: [first_reference, second_reference]},
)
assert page.attributevalues.count() == 3
expected_value_limit = 1
# when
response = staff_api_client.post_graphql(
ASSIGNED_MULTIPLE_PRODUCT_REFERENCE_ATTRIBUTE_QUERY,
variables={
"id": graphene.Node.to_global_id("Page", page.pk),
"valueLimit": expected_value_limit,
},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["assignedAttributes"]) == 1
assert (
len(content["data"]["page"]["assignedAttributes"][0]["value"])
== expected_value_limit
)
assert (
content["data"]["page"]["assignedAttributes"][0]["value"][0]["slug"]
== first_reference.reference_product.slug
)
ASSIGNED_MULTIPLE_PRODUCT_VARIANT_REFERENCE_ATTRIBUTE_QUERY = """
query PageQuery($id: ID, $valueLimit: PositiveInt) {
page(id: $id) {
assignedAttributes(limit:10) {
...on AssignedMultiProductVariantReferenceAttribute{
__typename
value(limit: $valueLimit) {
__typename
sku
}
}
}
}
}
"""
def test_assigned_multi_product_variant_reference_attribute(
staff_api_client,
page,
variant,
page_type_variant_reference_attribute,
):
# given
referenced_variant = variant
expected_reference_sku = "referenced-variant-sku"
referenced_variant.sku = expected_reference_sku
referenced_variant.save(update_fields=["sku"])
page_type_variant_reference_attribute.input_type = AttributeInputType.REFERENCE
page_type_variant_reference_attribute.save()
page_type = page.page_type
page_type.page_attributes.set([page_type_variant_reference_attribute])
attr_value = AttributeValue.objects.create(
attribute=page_type_variant_reference_attribute,
slug=f"{page.pk}_{referenced_variant.pk}",
reference_variant_id=referenced_variant.pk,
)
associate_attribute_values_to_instance(
page, {page_type_variant_reference_attribute.pk: [attr_value]}
)
# when
response = staff_api_client.post_graphql(
ASSIGNED_MULTIPLE_PRODUCT_VARIANT_REFERENCE_ATTRIBUTE_QUERY,
variables={"id": graphene.Node.to_global_id("Page", page.pk)},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["assignedAttributes"]) == 1
data = content["data"]["page"]["assignedAttributes"][0]["value"]
assert len(data) == 1
single_page_data = data[0]
assert single_page_data["__typename"] == "ProductVariant"
assert single_page_data["sku"] == expected_reference_sku
def test_applies_limit_to_multi_variant_references(
staff_api_client,
page,
product_variant_list,
page_type_variant_reference_attribute,
):
# given
page_type = page.page_type
# make sure that our attribute is on first position
page_type.page_attributes.update(storefront_search_position=10)
page_type.page_attributes.set([page_type_variant_reference_attribute])
page_type_variant_reference_attribute.storefront_search_position = 1
page_type_variant_reference_attribute.input_type = AttributeInputType.REFERENCE
page_type_variant_reference_attribute.save()
first_reference = AttributeValue.objects.create(
attribute=page_type_variant_reference_attribute,
slug=f"{page.pk}_{product_variant_list[0].pk}",
reference_variant_id=product_variant_list[0].pk,
)
second_reference = AttributeValue.objects.create(
attribute=page_type_variant_reference_attribute,
slug=f"{page.pk}_{product_variant_list[1].pk}",
reference_variant_id=product_variant_list[1].pk,
)
associate_attribute_values_to_instance(
page,
{page_type_variant_reference_attribute.pk: [first_reference, second_reference]},
)
assert page.attributevalues.count() == 3
expected_value_limit = 1
# when
response = staff_api_client.post_graphql(
ASSIGNED_MULTIPLE_PRODUCT_VARIANT_REFERENCE_ATTRIBUTE_QUERY,
variables={
"id": graphene.Node.to_global_id("Page", page.pk),
"valueLimit": expected_value_limit,
},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["assignedAttributes"]) == 1
assert (
len(content["data"]["page"]["assignedAttributes"][0]["value"])
== expected_value_limit
)
assert (
content["data"]["page"]["assignedAttributes"][0]["value"][0]["sku"]
== first_reference.reference_variant.sku
)
ASSIGNED_MULTIPLE_CATEGORY_REFERENCE_ATTRIBUTE_QUERY = """
query PageQuery($id: ID, $valueLimit: PositiveInt) {
page(id: $id) {
assignedAttributes(limit:10) {
...on AssignedMultiCategoryReferenceAttribute{
__typename
value(limit: $valueLimit) {
__typename
slug
}
}
}
}
}
"""
def test_assigned_multi_category_reference_attribute(
staff_api_client,
page,
category,
page_type_category_reference_attribute,
):
# given
referenced_category = category
expected_reference_slug = "referenced-category-slug"
referenced_category.slug = expected_reference_slug
referenced_category.save(update_fields=["slug"])
page_type_category_reference_attribute.input_type = AttributeInputType.REFERENCE
page_type_category_reference_attribute.save()
page_type = page.page_type
page_type.page_attributes.set([page_type_category_reference_attribute])
attr_value = AttributeValue.objects.create(
attribute=page_type_category_reference_attribute,
slug=f"{page.pk}_{referenced_category.pk}",
reference_category_id=referenced_category.pk,
)
associate_attribute_values_to_instance(
page, {page_type_category_reference_attribute.pk: [attr_value]}
)
# when
response = staff_api_client.post_graphql(
ASSIGNED_MULTIPLE_CATEGORY_REFERENCE_ATTRIBUTE_QUERY,
variables={"id": graphene.Node.to_global_id("Page", page.pk)},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["assignedAttributes"]) == 1
data = content["data"]["page"]["assignedAttributes"][0]["value"]
assert len(data) == 1
single_page_data = data[0]
assert single_page_data["__typename"] == "Category"
assert single_page_data["slug"] == expected_reference_slug
def test_applies_limit_to_multi_category_references(
staff_api_client,
page,
categories,
page_type_category_reference_attribute,
):
# given
page_type = page.page_type
# make sure that our attribute is on first position
page_type.page_attributes.update(storefront_search_position=10)
page_type.page_attributes.set([page_type_category_reference_attribute])
page_type_category_reference_attribute.input_type = AttributeInputType.REFERENCE
page_type_category_reference_attribute.storefront_search_position = 1
page_type_category_reference_attribute.save()
first_reference = AttributeValue.objects.create(
attribute=page_type_category_reference_attribute,
slug=f"{page.pk}_{categories[0].pk}",
reference_category_id=categories[0].pk,
)
second_reference = AttributeValue.objects.create(
attribute=page_type_category_reference_attribute,
slug=f"{page.pk}_{categories[1].pk}",
reference_category_id=categories[1].pk,
)
associate_attribute_values_to_instance(
page,
{
page_type_category_reference_attribute.pk: [
first_reference,
second_reference,
]
},
)
assert page.attributevalues.count() == 3
expected_value_limit = 1
# when
response = staff_api_client.post_graphql(
ASSIGNED_MULTIPLE_CATEGORY_REFERENCE_ATTRIBUTE_QUERY,
variables={
"id": graphene.Node.to_global_id("Page", page.pk),
"valueLimit": expected_value_limit,
},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["assignedAttributes"]) == 1
assert (
len(content["data"]["page"]["assignedAttributes"][0]["value"])
== expected_value_limit
)
assert (
content["data"]["page"]["assignedAttributes"][0]["value"][0]["slug"]
== first_reference.reference_category.slug
)
ASSIGNED_MULTIPLE_COLLECTION_REFERENCE_ATTRIBUTE_QUERY = """
query PageQuery($id: ID, $valueLimit: PositiveInt) {
page(id: $id) {
assignedAttributes(limit:10) {
...on AssignedMultiCollectionReferenceAttribute{
__typename
value(limit: $valueLimit) {
__typename
slug
}
}
}
}
}
"""
def test_assigned_multi_collection_reference_attribute(
staff_api_client,
page,
collection,
page_type_collection_reference_attribute,
):
# given
referenced_collection = collection
expected_reference_slug = "referenced-collection-slug"
referenced_collection.slug = expected_reference_slug
referenced_collection.save(update_fields=["slug"])
page_type_collection_reference_attribute.input_type = AttributeInputType.REFERENCE
page_type_collection_reference_attribute.save()
page_type = page.page_type
page_type.page_attributes.set([page_type_collection_reference_attribute])
attr_value = AttributeValue.objects.create(
attribute=page_type_collection_reference_attribute,
slug=f"{page.pk}_{referenced_collection.pk}",
reference_collection_id=referenced_collection.pk,
)
associate_attribute_values_to_instance(
page, {page_type_collection_reference_attribute.pk: [attr_value]}
)
# when
response = staff_api_client.post_graphql(
ASSIGNED_MULTIPLE_COLLECTION_REFERENCE_ATTRIBUTE_QUERY,
variables={"id": graphene.Node.to_global_id("Page", page.pk)},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["assignedAttributes"]) == 1
data = content["data"]["page"]["assignedAttributes"][0]["value"]
assert len(data) == 1
single_page_data = data[0]
assert single_page_data["__typename"] == "Collection"
assert single_page_data["slug"] == expected_reference_slug
def test_applies_limit_to_multi_collection_references(
staff_api_client,
page,
published_collections,
page_type_collection_reference_attribute,
):
# given
page_type = page.page_type
# make sure that our attribute is on first position
page_type.page_attributes.update(storefront_search_position=10)
page_type.page_attributes.set([page_type_collection_reference_attribute])
page_type_collection_reference_attribute.input_type = AttributeInputType.REFERENCE
page_type_collection_reference_attribute.storefront_search_position = 1
page_type_collection_reference_attribute.save()
first_reference = AttributeValue.objects.create(
attribute=page_type_collection_reference_attribute,
slug=f"{page.pk}_{published_collections[0].pk}",
reference_collection_id=published_collections[0].pk,
)
second_reference = AttributeValue.objects.create(
attribute=page_type_collection_reference_attribute,
slug=f"{page.pk}_{published_collections[1].pk}",
reference_collection_id=published_collections[1].pk,
)
associate_attribute_values_to_instance(
page,
{
page_type_collection_reference_attribute.pk: [
first_reference,
second_reference,
]
},
)
assert page.attributevalues.count() == 3
expected_value_limit = 1
# when
response = staff_api_client.post_graphql(
ASSIGNED_MULTIPLE_COLLECTION_REFERENCE_ATTRIBUTE_QUERY,
variables={
"id": graphene.Node.to_global_id("Page", page.pk),
"valueLimit": expected_value_limit,
},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["assignedAttributes"]) == 1
assert (
len(content["data"]["page"]["assignedAttributes"][0]["value"])
== expected_value_limit
)
assert (
content["data"]["page"]["assignedAttributes"][0]["value"][0]["slug"]
== first_reference.reference_collection.slug
)
ASSIGNED_SINGLE_CHOICE_ATTRIBUTE_QUERY = """
query PageQuery($id: ID) {
page(id: $id) {
assignedAttributes(limit:10) {
...on AssignedSingleChoiceAttribute{
__typename
value{
name
slug
translation(languageCode:FR)
}
}
}
}
}
"""
def test_assigned_single_choice_attribute_translation(
staff_api_client,
page,
size_page_attribute,
):
# given
page_type = page.page_type
page_type.page_attributes.set([size_page_attribute])
attr_value = size_page_attribute.values.first()
translation = AttributeValueTranslation.objects.create(
language_code="fr",
attribute_value=attr_value,
name="French Size Name",
)
associate_attribute_values_to_instance(page, {size_page_attribute.pk: [attr_value]})
# when
response = staff_api_client.post_graphql(
ASSIGNED_SINGLE_CHOICE_ATTRIBUTE_QUERY,
variables={"id": graphene.Node.to_global_id("Page", page.pk)},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["assignedAttributes"]) == 1
assert (
content["data"]["page"]["assignedAttributes"][0]["value"]["translation"]
== translation.name
)
def test_assigned_single_choice_attribute(
staff_api_client,
page,
size_page_attribute,
):
# given
page_type = page.page_type
page_type.page_attributes.set([size_page_attribute])
attr_value = size_page_attribute.values.first()
expected_attr_value_name = "Size M"
expected_attr_value_slug = "size-m"
attr_value.slug = expected_attr_value_slug
attr_value.name = expected_attr_value_name
attr_value.save()
associate_attribute_values_to_instance(page, {size_page_attribute.pk: [attr_value]})
# when
response = staff_api_client.post_graphql(
ASSIGNED_SINGLE_CHOICE_ATTRIBUTE_QUERY,
variables={"id": graphene.Node.to_global_id("Page", page.pk)},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["assignedAttributes"]) == 1
attr_value_data = content["data"]["page"]["assignedAttributes"][0]["value"]
assert attr_value_data["name"] == expected_attr_value_name
assert attr_value_data["slug"] == expected_attr_value_slug
ASSIGNED_MULTI_CHOICE_ATTRIBUTE_QUERY = """
query PageQuery($id: ID, $valueLimit: PositiveInt) {
page(id: $id) {
assignedAttributes(limit:10) {
... on AssignedMultiChoiceAttribute {
__typename
value(limit: $valueLimit) {
name
slug
translation(languageCode: FR)
}
}
}
}
}
"""
def test_assigned_multi_choice_attribute_translation(
staff_api_client,
page,
size_page_attribute,
):
# given
page_type = page.page_type
page_type.page_attributes.set([size_page_attribute])
size_page_attribute.input_type = AttributeInputType.MULTISELECT
size_page_attribute.save()
attr_value = size_page_attribute.values.first()
translation = AttributeValueTranslation.objects.create(
language_code="fr",
attribute_value=attr_value,
name="French Size Name",
)
associate_attribute_values_to_instance(page, {size_page_attribute.pk: [attr_value]})
# when
response = staff_api_client.post_graphql(
ASSIGNED_MULTI_CHOICE_ATTRIBUTE_QUERY,
variables={"id": graphene.Node.to_global_id("Page", page.pk)},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["assignedAttributes"]) == 1
assert len(content["data"]["page"]["assignedAttributes"][0]["value"]) == 1
attr_value_data = content["data"]["page"]["assignedAttributes"][0]["value"][0]
assert attr_value_data["translation"] == translation.name
def test_applies_limit_to_multi_choices(
staff_api_client,
page,
size_page_attribute,
):
# given
page_type = page.page_type
page_type.page_attributes.set([size_page_attribute])
size_page_attribute.input_type = AttributeInputType.MULTISELECT
size_page_attribute.save()
first_choice = size_page_attribute.values.first()
second_choice = size_page_attribute.values.last()
associate_attribute_values_to_instance(
page, {size_page_attribute.pk: [first_choice, second_choice]}
)
assert page.attributevalues.count() == 2
expected_value_limit = 1
# when
response = staff_api_client.post_graphql(
ASSIGNED_MULTI_CHOICE_ATTRIBUTE_QUERY,
variables={
"id": graphene.Node.to_global_id("Page", page.pk),
"valueLimit": expected_value_limit,
},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["assignedAttributes"]) == 1
assert (
len(content["data"]["page"]["assignedAttributes"][0]["value"])
== expected_value_limit
)
assert (
content["data"]["page"]["assignedAttributes"][0]["value"][0]["slug"]
== first_choice.slug
)
def test_assigned_multi_choice_attribute(
staff_api_client,
page,
size_page_attribute,
):
# given
page_type = page.page_type
page_type.page_attributes.set([size_page_attribute])
size_page_attribute.input_type = AttributeInputType.MULTISELECT
size_page_attribute.save()
attr_value = size_page_attribute.values.first()
expected_attr_value_name = "Size M"
expected_attr_value_slug = "size-m"
attr_value.slug = expected_attr_value_slug
attr_value.name = expected_attr_value_name
attr_value.save()
associate_attribute_values_to_instance(page, {size_page_attribute.pk: [attr_value]})
# when
response = staff_api_client.post_graphql(
ASSIGNED_MULTI_CHOICE_ATTRIBUTE_QUERY,
variables={"id": graphene.Node.to_global_id("Page", page.pk)},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["assignedAttributes"]) == 1
assert len(content["data"]["page"]["assignedAttributes"][0]["value"]) == 1
attr_value_data = content["data"]["page"]["assignedAttributes"][0]["value"][0]
assert attr_value_data["name"] == expected_attr_value_name
assert attr_value_data["slug"] == expected_attr_value_slug
ASSIGNED_SWATCH_ATTRIBUTE_QUERY = """
query PageQuery($id: ID) {
page(id: $id) {
assignedAttributes(limit:10) {
... on AssignedSwatchAttribute {
value {
name
slug
hexColor
file {
url
contentType
}
}
}
}
}
}
"""
def test_assigned_swatch_attribute(
staff_api_client,
page,
swatch_attribute,
):
# given
swatch_attribute.type = AttributeType.PAGE_TYPE
swatch_attribute.save()
page_type = page.page_type
page_type.page_attributes.set([swatch_attribute])
attr_value = swatch_attribute.values.first()
expected_attr_value_name = "Red"
expected_attr_value_slug = "red"
expected_attr_hex_value = "#5C3030"
attr_value.slug = expected_attr_value_slug
attr_value.name = expected_attr_value_name
attr_value.value = expected_attr_hex_value
attr_value.save()
associate_attribute_values_to_instance(page, {swatch_attribute.pk: [attr_value]})
# when
response = staff_api_client.post_graphql(
ASSIGNED_SWATCH_ATTRIBUTE_QUERY,
variables={"id": graphene.Node.to_global_id("Page", page.pk)},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["assignedAttributes"]) == 1
attr_value_data = content["data"]["page"]["assignedAttributes"][0]["value"]
assert attr_value_data["name"] == expected_attr_value_name
assert attr_value_data["slug"] == expected_attr_value_slug
assert attr_value_data["hexColor"] == expected_attr_hex_value
def test_assigned_swatch_file_attribute(staff_api_client, page, swatch_attribute):
# given
swatch_attribute.type = AttributeType.PAGE_TYPE
swatch_attribute.save()
page_type = page.page_type
page_type.page_attributes.set([swatch_attribute])
attr_value = swatch_attribute.values.first()
attr_value.file_url = "https://example.com/file.pdf"
attr_value.content_type = "application/pdf"
attr_value.save()
associate_attribute_values_to_instance(page, {swatch_attribute.pk: [attr_value]})
assert attr_value.file_url is not None
# when
response = staff_api_client.post_graphql(
ASSIGNED_SWATCH_ATTRIBUTE_QUERY,
variables={"id": graphene.Node.to_global_id("Page", page.pk)},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["assignedAttributes"]) == 1
attr_value_data = content["data"]["page"]["assignedAttributes"][0]["value"]
assert attr_value_data["name"] == attr_value.name
assert attr_value_data["slug"] == attr_value.slug
assert attr_value_data["file"]["url"] == attr_value.file_url
assert attr_value_data["file"]["contentType"] == attr_value.content_type
ASSIGNED_BOOLEAN_ATTRIBUTE_QUERY = """
query PageQuery($id: ID) {
page(id: $id) {
assignedAttributes(limit:10) {
... on AssignedBooleanAttribute {
value
}
}
}
}
"""
def test_assigned_boolean_attribute(
staff_api_client,
page,
boolean_attribute,
):
# given
boolean_attribute.type = AttributeType.PAGE_TYPE
boolean_attribute.save()
page_type = page.page_type
page_type.page_attributes.set([boolean_attribute])
attr_value = boolean_attribute.values.first()
expected_attr_value = True
attr_value.boolean = expected_attr_value
attr_value.save()
associate_attribute_values_to_instance(page, {boolean_attribute.pk: [attr_value]})
# when
response = staff_api_client.post_graphql(
ASSIGNED_BOOLEAN_ATTRIBUTE_QUERY,
variables={"id": graphene.Node.to_global_id("Page", page.pk)},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["assignedAttributes"]) == 1
attr_value_data = content["data"]["page"]["assignedAttributes"][0]["value"]
assert attr_value_data is expected_attr_value
ASSIGNED_DATE_ATTRIBUTE_QUERY = """
query PageQuery($id: ID) {
page(id: $id) {
assignedAttributes(limit:10) {
... on AssignedDateAttribute {
value
}
}
}
}
"""
def test_assigned_date_attribute(
staff_api_client,
page,
date_attribute,
):
# given
date_attribute.type = AttributeType.PAGE_TYPE
date_attribute.save()
page_type = page.page_type
page_type.page_attributes.set([date_attribute])
attr_value = date_attribute.values.first()
expected_attr_datetime_value = datetime.now(UTC)
attr_value.date_time = expected_attr_datetime_value
attr_value.save()
associate_attribute_values_to_instance(page, {date_attribute.pk: [attr_value]})
# when
response = staff_api_client.post_graphql(
ASSIGNED_DATE_ATTRIBUTE_QUERY,
variables={"id": graphene.Node.to_global_id("Page", page.pk)},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["assignedAttributes"]) == 1
attr_value_data = content["data"]["page"]["assignedAttributes"][0]["value"]
assert attr_value_data == str(expected_attr_datetime_value.date())
ASSIGNED_DATETIME_ATTRIBUTE_QUERY = """
query PageQuery($id: ID) {
page(id: $id) {
assignedAttributes(limit:10) {
... on AssignedDateTimeAttribute {
value
}
}
}
}
"""
def test_assigned_datetime_attribute(
staff_api_client,
page,
date_time_attribute,
):
# given
date_time_attribute.type = AttributeType.PAGE_TYPE
date_time_attribute.save()
page_type = page.page_type
page_type.page_attributes.set([date_time_attribute])
attr_value = date_time_attribute.values.first()
expected_attr_datetime_value = datetime.now(UTC)
attr_value.date_time = expected_attr_datetime_value
attr_value.save()
associate_attribute_values_to_instance(page, {date_time_attribute.pk: [attr_value]})
# when
response = staff_api_client.post_graphql(
ASSIGNED_DATETIME_ATTRIBUTE_QUERY,
variables={"id": graphene.Node.to_global_id("Page", page.pk)},
)
# then
content = get_graphql_content(response)
assert len(content["data"]["page"]["assignedAttributes"]) == 1
attr_value_data = content["data"]["page"]["assignedAttributes"][0]["value"]
assert attr_value_data == str(expected_attr_datetime_value.isoformat())
def test_all_non_reference_attribute_type_has_own_assigned_types():
expected_attribute_types = [value for (value, _) in AttributeInputType.CHOICES]
expected_attribute_types.remove(
AttributeInputType.SINGLE_REFERENCE,
)
expected_attribute_types.remove(
AttributeInputType.REFERENCE,
)
assert all(ASSIGNED_ATTRIBUTE_MAP.values())
assert set(ASSIGNED_ATTRIBUTE_MAP.keys()) == set(expected_attribute_types)
def test_all_single_reference_attribute_type_has_own_assigned_types():
expected_entity_names = [
entity_name for (entity_name, _) in AttributeEntityType.CHOICES
]
assert all(ASSIGNED_SINGLE_REFERENCE_MAP.values())
assert set(ASSIGNED_SINGLE_REFERENCE_MAP.keys()) == set(expected_entity_names)
def test_all_multi_reference_attribute_type_has_own_assigned_types():
expected_entity_names = [
entity_name for (entity_name, _) in AttributeEntityType.CHOICES
]
assert all(ASSIGNED_MULTI_REFERENCE_MAP.values())
assert set(ASSIGNED_MULTI_REFERENCE_MAP.keys()) == set(expected_entity_names)
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/attribute/tests/queries/test_selected_attribute.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 1415,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/graphql/product/tests/queries/variants_where/test_over_references_pages.py | import pytest
from ......attribute import AttributeEntityType, AttributeInputType, AttributeType
from ......attribute.models import Attribute, AttributeValue
from ......attribute.utils import associate_attribute_values_to_instance
from ......page.models import Page
from .....core.utils import to_global_id_or_none
from .....tests.utils import get_graphql_content
from .shared import PRODUCT_VARIANTS_WHERE_QUERY
@pytest.mark.parametrize(
("filter_type", "expected_count"), [("containsAny", 2), ("containsAll", 1)]
)
def test_product_variants_query_with_attr_slug_and_attribute_value_reference_to_pages(
filter_type,
expected_count,
staff_api_client,
product_variant_list,
page_type,
product_type_page_reference_attribute,
channel_USD,
):
# given
product_type = product_variant_list[0].product.product_type
product_type.variant_attributes.add(product_type_page_reference_attribute)
reference_page_1_slug = "referenced-page-1"
reference_page_2_slug = "referenced-page-2"
referenced_page_1, referenced_page_2 = Page.objects.bulk_create(
[
Page(
title="Referenced Page 1",
slug=reference_page_1_slug,
page_type=page_type,
is_published=True,
),
Page(
title="Referenced Page 2",
slug=reference_page_2_slug,
page_type=page_type,
is_published=True,
),
]
)
attribute_value_1, attribute_value_2 = AttributeValue.objects.bulk_create(
[
AttributeValue(
attribute=product_type_page_reference_attribute,
name=f"Page {referenced_page_1.pk}",
slug=f"page-{referenced_page_1.pk}",
reference_page=referenced_page_1,
),
AttributeValue(
attribute=product_type_page_reference_attribute,
name=f"Page {referenced_page_2.pk}",
slug=f"page-{referenced_page_2.pk}",
reference_page=referenced_page_2,
),
]
)
product_variant_with_both_references = product_variant_list[0]
associate_attribute_values_to_instance(
product_variant_with_both_references,
{
product_type_page_reference_attribute.pk: [
attribute_value_1,
attribute_value_2,
]
},
)
product_variant_with_single_reference = product_variant_list[1]
associate_attribute_values_to_instance(
product_variant_with_single_reference,
{product_type_page_reference_attribute.pk: [attribute_value_2]},
)
variables = {
"where": {
"attributes": [
{
"slug": "product-page-reference",
"value": {
"reference": {
"pageSlugs": {
filter_type: [
reference_page_1_slug,
reference_page_2_slug,
]
}
}
},
}
]
},
"channel": channel_USD.slug,
}
# when
response = staff_api_client.post_graphql(
PRODUCT_VARIANTS_WHERE_QUERY,
variables,
)
# then
content = get_graphql_content(response)
product_variants_nodes = content["data"]["productVariants"]["edges"]
assert len(product_variants_nodes) == expected_count
@pytest.mark.parametrize(
("filter_type", "expected_count"), [("containsAny", 2), ("containsAll", 1)]
)
def test_product_variants_query_with_attribute_value_reference_to_pages(
filter_type,
expected_count,
staff_api_client,
product_variant_list,
product_type,
page_type,
product_type_page_reference_attribute,
channel_USD,
):
# given
second_page_reference_attribute = Attribute.objects.create(
slug="second-page-reference",
name="Page reference",
type=AttributeType.PRODUCT_TYPE,
input_type=AttributeInputType.REFERENCE,
entity_type=AttributeEntityType.PAGE,
)
product_type.variant_attributes.add(
product_type_page_reference_attribute,
second_page_reference_attribute,
)
reference_1 = "referenced-page-1"
reference_2 = "referenced-page-2"
referenced_page_1, referenced_page_2 = Page.objects.bulk_create(
[
Page(
title="Referenced Page 1",
slug=reference_1,
page_type=page_type,
is_published=True,
),
Page(
title="Referenced Page 2",
slug=reference_2,
page_type=page_type,
is_published=True,
),
]
)
attribute_value_1, attribute_value_2 = AttributeValue.objects.bulk_create(
[
AttributeValue(
attribute=product_type_page_reference_attribute,
name=f"Page {referenced_page_1.pk}",
slug=f"page-{referenced_page_1.pk}",
reference_page=referenced_page_1,
),
AttributeValue(
attribute=second_page_reference_attribute,
name=f"Page {referenced_page_2.pk}",
slug=f"page-{referenced_page_2.pk}",
reference_page=referenced_page_2,
),
]
)
product_variant_with_both_references = product_variant_list[0]
associate_attribute_values_to_instance(
product_variant_with_both_references,
{
product_type_page_reference_attribute.pk: [attribute_value_1],
second_page_reference_attribute.pk: [attribute_value_2],
},
)
product_variant_with_single_reference = product_variant_list[1]
associate_attribute_values_to_instance(
product_variant_with_single_reference,
{second_page_reference_attribute.pk: [attribute_value_2]},
)
variables = {
"where": {
"attributes": [
{
"value": {
"reference": {
"pageSlugs": {filter_type: [reference_1, reference_2]}
}
},
}
]
},
"channel": channel_USD.slug,
}
# when
response = staff_api_client.post_graphql(
PRODUCT_VARIANTS_WHERE_QUERY,
variables,
)
# then
content = get_graphql_content(response)
product_variants_nodes = content["data"]["productVariants"]["edges"]
assert len(product_variants_nodes) == expected_count
@pytest.mark.parametrize(
("filter_type", "expected_count"), [("containsAny", 3), ("containsAll", 2)]
)
def test_product_variants_query_with_attr_slug_and_attribute_value_referenced_page_ids(
filter_type,
expected_count,
staff_api_client,
product_variant_list,
product_type,
page_type,
product_type_page_reference_attribute,
channel_USD,
):
# given
product_type.variant_attributes.add(product_type_page_reference_attribute)
referenced_first_page, referenced_second_page, referenced_third_page = (
Page.objects.bulk_create(
[
Page(
title="Referenced Page",
slug="referenced-page",
page_type=page_type,
is_published=True,
),
Page(
title="Referenced Page",
slug="referenced-page2",
page_type=page_type,
is_published=True,
),
Page(
title="Referenced Page",
slug="referenced-page3",
page_type=page_type,
is_published=True,
),
]
)
)
first_attr_value, second_attr_value, third_attr_value = (
AttributeValue.objects.bulk_create(
[
AttributeValue(
attribute=product_type_page_reference_attribute,
name=f"Page {referenced_first_page.pk}",
slug=f"page-{referenced_first_page.pk}",
reference_page=referenced_first_page,
),
AttributeValue(
attribute=product_type_page_reference_attribute,
name=f"Page {referenced_second_page.pk}",
slug=f"page-{referenced_second_page.pk}",
reference_page=referenced_second_page,
),
AttributeValue(
attribute=product_type_page_reference_attribute,
name=f"Page {referenced_third_page.pk}",
slug=f"page-{referenced_third_page.pk}",
reference_page=referenced_third_page,
),
]
)
)
first_product_variant_with_all_ids = product_variant_list[0]
second_product_variant_with_all_ids = product_variant_list[1]
product_variant_with_single_id = product_variant_list[3]
associate_attribute_values_to_instance(
first_product_variant_with_all_ids,
{
product_type_page_reference_attribute.pk: [
first_attr_value,
second_attr_value,
third_attr_value,
],
},
)
associate_attribute_values_to_instance(
second_product_variant_with_all_ids,
{
product_type_page_reference_attribute.pk: [
first_attr_value,
second_attr_value,
third_attr_value,
],
},
)
associate_attribute_values_to_instance(
product_variant_with_single_id,
{product_type_page_reference_attribute.pk: [first_attr_value]},
)
referenced_first_global_id = to_global_id_or_none(referenced_first_page)
referenced_second_global_id = to_global_id_or_none(referenced_second_page)
referenced_third_global_id = to_global_id_or_none(referenced_third_page)
variables = {
"where": {
"attributes": [
{
"slug": product_type_page_reference_attribute.slug,
"value": {
"reference": {
"referencedIds": {
filter_type: [
referenced_first_global_id,
referenced_second_global_id,
referenced_third_global_id,
]
}
}
},
}
]
},
"channel": channel_USD.slug,
}
# when
response = staff_api_client.post_graphql(
PRODUCT_VARIANTS_WHERE_QUERY,
variables,
)
# then
content = get_graphql_content(response)
product_variants_nodes = content["data"]["productVariants"]["edges"]
assert len(product_variants_nodes) == expected_count
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/product/tests/queries/variants_where/test_over_references_pages.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 319,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/graphql/product/tests/queries/variants_where/test_over_references_products.py | import graphene
import pytest
from ......attribute import AttributeEntityType, AttributeInputType, AttributeType
from ......attribute.models import Attribute, AttributeValue
from ......attribute.utils import associate_attribute_values_to_instance
from ......product.models import Product
from .....core.utils import to_global_id_or_none
from .....tests.utils import get_graphql_content
from .shared import PRODUCT_VARIANTS_WHERE_QUERY
@pytest.mark.parametrize(
("filter_type", "expected_count"),
[("containsAny", 2), ("containsAll", 1)],
)
def test_product_variants_query_with_attr_slug_and_attribute_value_reference_to_products(
filter_type,
expected_count,
staff_api_client,
product_variant_list,
product_type_product_reference_attribute,
channel_USD,
):
# given
product_type = product_variant_list[0].product.product_type
product_type.variant_attributes.add(product_type_product_reference_attribute)
ref_product_1, ref_product_2 = Product.objects.bulk_create(
[
Product(
name="Reference Product 1",
slug="ref-1",
product_type=product_type,
),
Product(
name="Reference Product 2",
slug="ref-2",
product_type=product_type,
),
]
)
attribute_value_1, attribute_value_2 = AttributeValue.objects.bulk_create(
[
AttributeValue(
attribute=product_type_product_reference_attribute,
name=f"Product {ref_product_1.pk}",
slug=f"product-{ref_product_1.pk}",
reference_product=ref_product_1,
),
AttributeValue(
attribute=product_type_product_reference_attribute,
name=f"Product {ref_product_2.pk}",
slug=f"product-{ref_product_2.pk}",
reference_product=ref_product_2,
),
]
)
product_variant_with_both_references = product_variant_list[0]
associate_attribute_values_to_instance(
product_variant_with_both_references,
{
product_type_product_reference_attribute.pk: [
attribute_value_1,
attribute_value_2,
]
},
)
product_variant_with_single_reference = product_variant_list[1]
associate_attribute_values_to_instance(
product_variant_with_single_reference,
{product_type_product_reference_attribute.pk: [attribute_value_2]},
)
variables = {
"where": {
"attributes": [
{
"slug": "product-reference",
"value": {
"reference": {
"productSlugs": {
filter_type: [ref_product_1.slug, ref_product_2.slug]
}
}
},
}
]
},
"channel": channel_USD.slug,
}
# when
response = staff_api_client.post_graphql(
PRODUCT_VARIANTS_WHERE_QUERY,
variables,
)
# then
content = get_graphql_content(response)
product_variants_nodes = content["data"]["productVariants"]["edges"]
assert len(product_variants_nodes) == expected_count
assert product_variants_nodes[0]["node"]["id"] == graphene.Node.to_global_id(
"ProductVariant", product_variant_list[0].pk
)
@pytest.mark.parametrize(
("filter_type", "expected_count"),
[("containsAny", 2), ("containsAll", 1)],
)
def test_product_variants_query_with_attribute_value_reference_to_products(
filter_type,
expected_count,
staff_api_client,
product_variant_list,
product_type_product_reference_attribute,
channel_USD,
):
# given
product_type = product_variant_list[0].product.product_type
second_product_reference_attribute = Attribute.objects.create(
slug="second-product-reference",
name="Product reference",
type=AttributeType.PRODUCT_TYPE,
input_type=AttributeInputType.REFERENCE,
entity_type=AttributeEntityType.PRODUCT,
)
product_type.variant_attributes.add(
product_type_product_reference_attribute,
second_product_reference_attribute,
)
ref_product_1, ref_product_2 = Product.objects.bulk_create(
[
Product(
name="Reference Product 1",
slug="ref-1",
product_type=product_type,
),
Product(
name="Reference Product 2",
slug="ref-2",
product_type=product_type,
),
]
)
attribute_value_1, attribute_value_2 = AttributeValue.objects.bulk_create(
[
AttributeValue(
attribute=product_type_product_reference_attribute,
name=f"Product {ref_product_1.pk}",
slug=f"product-{ref_product_1.pk}",
reference_product=ref_product_1,
),
AttributeValue(
attribute=second_product_reference_attribute,
name=f"Product {ref_product_2.pk}",
slug=f"product-{ref_product_2.pk}",
reference_product=ref_product_2,
),
]
)
product_variant_with_both_references = product_variant_list[0]
associate_attribute_values_to_instance(
product_variant_with_both_references,
{
product_type_product_reference_attribute.pk: [attribute_value_1],
second_product_reference_attribute.pk: [attribute_value_2],
},
)
product_variant_with_single_reference = product_variant_list[1]
associate_attribute_values_to_instance(
product_variant_with_single_reference,
{second_product_reference_attribute.pk: [attribute_value_2]},
)
variables = {
"where": {
"attributes": [
{
"value": {
"reference": {
"productSlugs": {
filter_type: [ref_product_1.slug, ref_product_2.slug]
}
}
},
}
]
},
"channel": channel_USD.slug,
}
# when
response = staff_api_client.post_graphql(
PRODUCT_VARIANTS_WHERE_QUERY,
variables,
)
# then
content = get_graphql_content(response)
product_variants_nodes = content["data"]["productVariants"]["edges"]
assert len(product_variants_nodes) == expected_count
assert product_variants_nodes[0]["node"]["id"] == graphene.Node.to_global_id(
"ProductVariant", product_variant_list[0].pk
)
@pytest.mark.parametrize(
("filter_type", "expected_count"), [("containsAny", 3), ("containsAll", 2)]
)
def test_product_variants_query_with_attr_slug_and_attribute_value_referenced_product_ids(
filter_type,
expected_count,
staff_api_client,
product_variant_list,
product_type_product_reference_attribute,
channel_USD,
):
# given
product_type = product_variant_list[0].product.product_type
product_type.variant_attributes.add(
product_type_product_reference_attribute,
)
ref_product_1, ref_product_2, ref_product_3 = Product.objects.bulk_create(
[
Product(
name="Reference Product 1",
slug="ref-1",
product_type=product_type,
),
Product(
name="Reference Product 2",
slug="ref-2",
product_type=product_type,
),
Product(
name="Reference Product 3",
slug="ref-3",
product_type=product_type,
),
]
)
first_attr_value, second_attr_value, third_attr_value = (
AttributeValue.objects.bulk_create(
[
AttributeValue(
attribute=product_type_product_reference_attribute,
name=f"Product {ref_product_1.pk}",
slug=f"product-{ref_product_1.pk}",
reference_product=ref_product_1,
),
AttributeValue(
attribute=product_type_product_reference_attribute,
name=f"Product {ref_product_2.pk}",
slug=f"product-{ref_product_2.pk}",
reference_product=ref_product_2,
),
AttributeValue(
attribute=product_type_product_reference_attribute,
name=f"Product {ref_product_3.pk}",
slug=f"product-{ref_product_3.pk}",
reference_product=ref_product_3,
),
]
)
)
first_product_variant_with_all_ids = product_variant_list[0]
second_product_variant_with_all_ids = product_variant_list[1]
product_variant_with_single_id = product_variant_list[3]
associate_attribute_values_to_instance(
first_product_variant_with_all_ids,
{
product_type_product_reference_attribute.pk: [
first_attr_value,
second_attr_value,
third_attr_value,
],
},
)
associate_attribute_values_to_instance(
second_product_variant_with_all_ids,
{
product_type_product_reference_attribute.pk: [
first_attr_value,
second_attr_value,
third_attr_value,
],
},
)
associate_attribute_values_to_instance(
product_variant_with_single_id,
{
product_type_product_reference_attribute.pk: [
first_attr_value,
],
},
)
ref_1_global_id = to_global_id_or_none(ref_product_1)
ref_2_global_id = to_global_id_or_none(ref_product_2)
ref_3_global_id = to_global_id_or_none(ref_product_3)
variables = {
"where": {
"attributes": [
{
"slug": product_type_product_reference_attribute.slug,
"value": {
"reference": {
"referencedIds": {
filter_type: [
ref_1_global_id,
ref_2_global_id,
ref_3_global_id,
]
}
}
},
},
]
},
"channel": channel_USD.slug,
}
# when
response = staff_api_client.post_graphql(
PRODUCT_VARIANTS_WHERE_QUERY,
variables,
)
# then
content = get_graphql_content(response)
product_variants_nodes = content["data"]["productVariants"]["edges"]
assert len(product_variants_nodes) == expected_count
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/product/tests/queries/variants_where/test_over_references_products.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 317,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/graphql/product/tests/queries/variants_where/test_over_references_variants.py | import graphene
import pytest
from ......attribute import AttributeEntityType, AttributeInputType, AttributeType
from ......attribute.models import Attribute, AttributeValue
from ......attribute.utils import associate_attribute_values_to_instance
from .....core.utils import to_global_id_or_none
from .....tests.utils import get_graphql_content
from .shared import PRODUCT_VARIANTS_WHERE_QUERY
@pytest.mark.parametrize(
("filter_type", "expected_count"), [("containsAny", 2), ("containsAll", 1)]
)
def test_product_variants_query_with_attribute_value_reference_to_product_variants(
filter_type,
expected_count,
staff_api_client,
product_variant_list,
product_type_variant_reference_attribute,
channel_USD,
variant,
variant_without_inventory_tracking,
):
# given
product_type = product_variant_list[0].product.product_type
second_variant_reference_attribute = Attribute.objects.create(
slug="second-product-reference",
name="Product reference",
type=AttributeType.PRODUCT_TYPE,
input_type=AttributeInputType.REFERENCE,
entity_type=AttributeEntityType.PRODUCT_VARIANT,
)
product_type.variant_attributes.set(
[product_type_variant_reference_attribute, second_variant_reference_attribute]
)
first_variant_sku = "test-variant-1"
second_variant_sku = "test-variant-2"
first_variant = variant
first_variant.sku = first_variant_sku
first_variant.save()
second_variant = variant_without_inventory_tracking
second_variant.sku = second_variant_sku
second_variant.save()
attribute_value_1, attribute_value_2 = AttributeValue.objects.bulk_create(
[
AttributeValue(
attribute=product_type_variant_reference_attribute,
name=f"Variant {first_variant.pk}",
slug=f"variant-{first_variant.pk}",
reference_variant=first_variant,
),
AttributeValue(
attribute=second_variant_reference_attribute,
name=f"Variant {second_variant.pk}",
slug=f"variant-{second_variant.pk}",
reference_variant=second_variant,
),
]
)
product_variant_with_both_references = product_variant_list[0]
associate_attribute_values_to_instance(
product_variant_with_both_references,
{
product_type_variant_reference_attribute.pk: [attribute_value_1],
second_variant_reference_attribute.pk: [attribute_value_2],
},
)
product_variant_with_single_reference = product_variant_list[1]
associate_attribute_values_to_instance(
product_variant_with_single_reference,
{second_variant_reference_attribute.pk: [attribute_value_2]},
)
variables = {
"where": {
"attributes": [
{
"value": {
"reference": {
"productVariantSkus": {
filter_type: [
first_variant_sku,
second_variant_sku,
]
}
}
},
}
]
},
"channel": channel_USD.slug,
}
# when
response = staff_api_client.post_graphql(
PRODUCT_VARIANTS_WHERE_QUERY,
variables,
)
# then
content = get_graphql_content(response)
product_variants_nodes = content["data"]["productVariants"]["edges"]
assert len(product_variants_nodes) == expected_count
assert product_variants_nodes[0]["node"]["id"] == graphene.Node.to_global_id(
"ProductVariant", product_variant_list[0].pk
)
@pytest.mark.parametrize(
("filter_type", "expected_count"), [("containsAny", 2), ("containsAll", 1)]
)
def test_product_variants_query_with_attr_slug_and_attribute_value_reference_to_product_variants(
filter_type,
expected_count,
staff_api_client,
product_variant_list,
product_type_variant_reference_attribute,
channel_USD,
):
# given
product_type = product_variant_list[0].product.product_type
product_type.variant_attributes.add(product_type_variant_reference_attribute)
first_variant_sku = "test-variant-1"
second_variant_sku = "test-variant-2"
first_variant = product_variant_list[0]
first_variant.sku = first_variant_sku
first_variant.save()
second_variant = product_variant_list[1]
second_variant.sku = second_variant_sku
second_variant.save()
attribute_value_1, attribute_value_2 = AttributeValue.objects.bulk_create(
[
AttributeValue(
attribute=product_type_variant_reference_attribute,
name=f"Variant {first_variant.pk}",
slug=f"variant-{first_variant.pk}",
reference_variant=first_variant,
),
AttributeValue(
attribute=product_type_variant_reference_attribute,
name=f"Variant {second_variant.pk}",
slug=f"variant-{second_variant.pk}",
reference_variant=second_variant,
),
]
)
product_variant_with_both_references = product_variant_list[0]
associate_attribute_values_to_instance(
product_variant_with_both_references,
{
product_type_variant_reference_attribute.pk: [
attribute_value_1,
attribute_value_2,
]
},
)
product_variant_with_single_reference = product_variant_list[1]
associate_attribute_values_to_instance(
product_variant_with_single_reference,
{product_type_variant_reference_attribute.pk: [attribute_value_2]},
)
variables = {
"where": {
"attributes": [
{
"slug": "variant-reference",
"value": {
"reference": {
"productVariantSkus": {
filter_type: [
first_variant_sku,
second_variant_sku,
]
}
}
},
}
]
},
"channel": channel_USD.slug,
}
# when
response = staff_api_client.post_graphql(
PRODUCT_VARIANTS_WHERE_QUERY,
variables,
)
# then
content = get_graphql_content(response)
product_variants_nodes = content["data"]["productVariants"]["edges"]
assert len(product_variants_nodes) == expected_count
assert product_variants_nodes[0]["node"]["id"] == graphene.Node.to_global_id(
"ProductVariant", product_variant_list[0].pk
)
@pytest.mark.parametrize(
("filter_type", "expected_count"), [("containsAny", 3), ("containsAll", 2)]
)
def test_product_variants_query_with_attr_slug_attribute_value_referenced_variant_ids(
filter_type,
expected_count,
staff_api_client,
product_variant_list,
product_type_variant_reference_attribute,
channel_USD,
):
# given
product_type = product_variant_list[0].product.product_type
product_type.variant_attributes.add(
product_type_variant_reference_attribute,
)
first_variant = product_variant_list[0]
second_variant = product_variant_list[1]
third_variant = product_variant_list[3]
first_attr_value, second_attr_value, third_attr_value = (
AttributeValue.objects.bulk_create(
[
AttributeValue(
attribute=product_type_variant_reference_attribute,
name=f"Variant {first_variant.pk}",
slug=f"variant-{first_variant.pk}",
reference_variant=first_variant,
),
AttributeValue(
attribute=product_type_variant_reference_attribute,
name=f"Variant {second_variant.pk}",
slug=f"variant-{second_variant.pk}",
reference_variant=second_variant,
),
AttributeValue(
attribute=product_type_variant_reference_attribute,
name=f"Variant {third_variant.pk}",
slug=f"variant-{third_variant.pk}",
reference_variant=third_variant,
),
]
)
)
first_product_variant_with_all_ids = product_variant_list[0]
second_product_variant_with_all_ids = product_variant_list[1]
product_variant_with_single_id = product_variant_list[3]
associate_attribute_values_to_instance(
first_product_variant_with_all_ids,
{
product_type_variant_reference_attribute.pk: [
first_attr_value,
second_attr_value,
third_attr_value,
],
},
)
associate_attribute_values_to_instance(
second_product_variant_with_all_ids,
{
product_type_variant_reference_attribute.pk: [
first_attr_value,
second_attr_value,
third_attr_value,
],
},
)
associate_attribute_values_to_instance(
product_variant_with_single_id,
{product_type_variant_reference_attribute.pk: [first_attr_value]},
)
referenced_first_global_id = to_global_id_or_none(first_variant)
referenced_second_global_id = to_global_id_or_none(second_variant)
referenced_third_global_id = to_global_id_or_none(third_variant)
variables = {
"where": {
"attributes": [
{
"slug": product_type_variant_reference_attribute.slug,
"value": {
"reference": {
"referencedIds": {
filter_type: [
referenced_first_global_id,
referenced_second_global_id,
referenced_third_global_id,
]
}
}
},
}
]
},
"channel": channel_USD.slug,
}
# when
response = staff_api_client.post_graphql(
PRODUCT_VARIANTS_WHERE_QUERY,
variables,
)
# then
content = get_graphql_content(response)
product_variants_nodes = content["data"]["productVariants"]["edges"]
assert len(product_variants_nodes) == expected_count
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/product/tests/queries/variants_where/test_over_references_variants.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 289,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/graphql/product/tests/queries/variants_where/shared.py | PRODUCT_VARIANTS_WHERE_QUERY = """
query($where: ProductVariantWhereInput!, $channel: String) {
productVariants(first: 10, where: $where, channel: $channel) {
edges {
node {
id
name
sku
}
}
}
}
"""
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/product/tests/queries/variants_where/shared.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 13,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/graphql/product/tests/queries/variants_where/test_over_attributes.py | import graphene
import pytest
from ......attribute.utils import associate_attribute_values_to_instance
from .....tests.utils import get_graphql_content
from .shared import PRODUCT_VARIANTS_WHERE_QUERY
def test_product_variants_query_with_attribute_slug(
staff_api_client, product_variant_list, weight_attribute, channel_USD
):
# given
product_type = product_variant_list[0].product.product_type
product_type.variant_attributes.add(weight_attribute)
attr_value = weight_attribute.values.first()
associate_attribute_values_to_instance(
product_variant_list[0], {weight_attribute.pk: [attr_value]}
)
variables = {
"where": {"attributes": [{"slug": weight_attribute.slug}]},
"channel": channel_USD.slug,
}
# when
response = staff_api_client.post_graphql(
PRODUCT_VARIANTS_WHERE_QUERY,
variables,
)
# then
content = get_graphql_content(response)
product_variants_nodes = content["data"]["productVariants"]["edges"]
assert len(product_variants_nodes) == 1
assert product_variants_nodes[0]["node"]["id"] == graphene.Node.to_global_id(
"ProductVariant", product_variant_list[0].pk
)
@pytest.mark.parametrize(
("attribute_input", "expected_count"),
[
({"value": {"slug": {"eq": "test-slug-1"}}}, 1),
({"value": {"slug": {"oneOf": ["test-slug-1", "test-slug-2"]}}}, 2),
({"slug": "weight_attribute", "value": {"slug": {"eq": "test-slug-1"}}}, 1),
(
{
"slug": "weight_attribute",
"value": {"slug": {"oneOf": ["test-slug-1", "test-slug-2"]}},
},
2,
),
],
)
def test_product_variants_query_with_attribute_value_slug(
attribute_input,
expected_count,
staff_api_client,
product_variant_list,
weight_attribute,
channel_USD,
):
# given
weight_attribute.slug = "weight_attribute"
weight_attribute.save()
product_variant_list[0].product.product_type.variant_attributes.add(
weight_attribute
)
attr_value_1 = weight_attribute.values.first()
attr_value_1.slug = "test-slug-1"
attr_value_1.save()
attr_value_2 = weight_attribute.values.last()
attr_value_2.slug = "test-slug-2"
attr_value_2.save()
associate_attribute_values_to_instance(
product_variant_list[0], {weight_attribute.pk: [attr_value_1]}
)
associate_attribute_values_to_instance(
product_variant_list[1], {weight_attribute.pk: [attr_value_2]}
)
variables = {
"where": {"attributes": [attribute_input]},
"channel": channel_USD.slug,
}
# when
response = staff_api_client.post_graphql(
PRODUCT_VARIANTS_WHERE_QUERY,
variables,
)
# then
content = get_graphql_content(response)
product_variants_nodes = content["data"]["productVariants"]["edges"]
assert len(product_variants_nodes) == expected_count
@pytest.mark.parametrize(
("attribute_input", "expected_count"),
[
({"value": {"name": {"eq": "test-name-1"}}}, 1),
({"value": {"name": {"oneOf": ["test-name-1", "test-name-2"]}}}, 2),
({"slug": "weight_attribute", "value": {"name": {"eq": "test-name-1"}}}, 1),
(
{
"slug": "weight_attribute",
"value": {"name": {"oneOf": ["test-name-1", "test-name-2"]}},
},
2,
),
],
)
def test_product_variants_query_with_attribute_value_name(
attribute_input,
expected_count,
staff_api_client,
product_variant_list,
weight_attribute,
channel_USD,
):
# given
weight_attribute.slug = "weight_attribute"
weight_attribute.save()
product_variant_list[0].product.product_type.variant_attributes.add(
weight_attribute
)
attr_value_1 = weight_attribute.values.first()
attr_value_1.name = "test-name-1"
attr_value_1.save()
attr_value_2 = weight_attribute.values.last()
attr_value_2.name = "test-name-2"
attr_value_2.save()
associate_attribute_values_to_instance(
product_variant_list[0], {weight_attribute.pk: [attr_value_1]}
)
associate_attribute_values_to_instance(
product_variant_list[1], {weight_attribute.pk: [attr_value_2]}
)
variables = {
"where": {"attributes": [attribute_input]},
"channel": channel_USD.slug,
}
# when
response = staff_api_client.post_graphql(
PRODUCT_VARIANTS_WHERE_QUERY,
variables,
)
# then
content = get_graphql_content(response)
product_variants_nodes = content["data"]["productVariants"]["edges"]
assert len(product_variants_nodes) == expected_count
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/product/tests/queries/variants_where/test_over_attributes.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 139,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/graphql/product/tests/queries/variants_where/test_over_attributes_boolean.py | import graphene
import pytest
from ......attribute import AttributeInputType, AttributeType
from ......attribute.models import Attribute, AttributeValue
from ......attribute.utils import associate_attribute_values_to_instance
from .....tests.utils import get_graphql_content
from .shared import PRODUCT_VARIANTS_WHERE_QUERY
@pytest.mark.parametrize(
"boolean_input",
[
{"value": {"boolean": True}},
{"value": {"name": {"eq": "True-name"}}},
{"value": {"slug": {"eq": "true_slug"}}},
{"value": {"name": {"oneOf": ["True-name", "non-existing"]}}},
{"value": {"slug": {"oneOf": ["true_slug"]}}},
{"slug": "b_s", "value": {"boolean": True}},
{"slug": "b_s", "value": {"name": {"eq": "True-name"}}},
{"slug": "b_s", "value": {"slug": {"eq": "true_slug"}}},
{"slug": "b_s", "value": {"name": {"oneOf": ["True-name", "non-existing"]}}},
{"slug": "b_s", "value": {"slug": {"oneOf": ["true_slug"]}}},
],
)
def test_product_variants_query_with_attribute_value_boolean(
boolean_input,
staff_api_client,
product_variant_list,
boolean_attribute,
channel_USD,
):
# given
product = product_variant_list[0].product
product_type = product.product_type
boolean_attribute.slug = "b_s"
boolean_attribute.save()
second_attribute = Attribute.objects.create(
slug="s_boolean",
name="Boolean",
type=AttributeType.PRODUCT_TYPE,
input_type=AttributeInputType.BOOLEAN,
)
product_type.variant_attributes.add(boolean_attribute, second_attribute)
true_value = boolean_attribute.values.filter(boolean=True).first()
true_value.name = "True-name"
true_value.slug = "true_slug"
true_value.save()
variant_1 = product_variant_list[0]
associate_attribute_values_to_instance(
variant_1, {boolean_attribute.pk: [true_value]}
)
variant_2 = product_variant_list[1]
value_for_second_attr = AttributeValue.objects.create(
attribute=second_attribute,
name=f"{second_attribute.name}: Yes",
slug=f"{second_attribute.id}_false",
boolean=False,
)
associate_attribute_values_to_instance(
variant_2, {second_attribute.pk: [value_for_second_attr]}
)
variables = {"where": {"attributes": [boolean_input]}, "channel": channel_USD.slug}
# when
response = staff_api_client.post_graphql(
PRODUCT_VARIANTS_WHERE_QUERY,
variables,
)
# then
content = get_graphql_content(response)
variants_nodes = content["data"]["productVariants"]["edges"]
assert len(variants_nodes) == 1
assert variants_nodes[0]["node"]["id"] == graphene.Node.to_global_id(
"ProductVariant", variant_1.pk
)
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/product/tests/queries/variants_where/test_over_attributes_boolean.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 72,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
saleor/saleor:saleor/graphql/product/tests/queries/variants_where/test_over_attributes_date.py | import datetime
import pytest
from ......attribute import AttributeInputType, AttributeType
from ......attribute.models import Attribute
from ......attribute.utils import associate_attribute_values_to_instance
from .....tests.utils import get_graphql_content
from .shared import PRODUCT_VARIANTS_WHERE_QUERY
@pytest.mark.parametrize(
("date_input", "expected_count"),
[
({"slug": "date", "value": {"date": {"gte": "2021-01-01"}}}, 1),
({"slug": "date", "value": {"name": {"eq": "date-name-1"}}}, 1),
({"slug": "date", "value": {"slug": {"eq": "date-slug-1"}}}, 1),
(
{
"slug": "date",
"value": {"name": {"oneOf": ["date-name-1", "date-name-2"]}},
},
1,
),
(
{
"slug": "date",
"value": {"slug": {"oneOf": ["date-slug-1", "date-slug-2"]}},
},
1,
),
(
{
"slug": "date",
"value": {"date": {"gte": "2021-01-02", "lte": "2021-01-03"}},
},
1,
),
({"value": {"date": {"gte": "2021-01-01"}}}, 2),
({"value": {"name": {"eq": "date-name-1"}}}, 1),
({"value": {"slug": {"eq": "date-slug-1"}}}, 1),
({"value": {"name": {"oneOf": ["date-name-1", "date-name-2"]}}}, 2),
({"value": {"slug": {"oneOf": ["date-slug-1", "date-slug-2"]}}}, 2),
({"value": {"date": {"gte": "2021-01-01", "lte": "2021-01-02"}}}, 1),
],
)
def test_product_variants_query_with_attribute_value_date(
date_input,
expected_count,
staff_api_client,
product_variant_list,
date_attribute,
channel_USD,
):
# given
product = product_variant_list[0].product
product_type = product.product_type
date_attribute.type = "PRODUCT_TYPE"
date_attribute.slug = "date"
date_attribute.save()
second_date_attribute = Attribute.objects.create(
slug="second_date",
name="Second date",
type=AttributeType.PRODUCT_TYPE,
input_type=AttributeInputType.DATE,
)
product_type.variant_attributes.add(date_attribute, second_date_attribute)
attr_value_1 = date_attribute.values.first()
attr_value_1.date_time = datetime.datetime(2021, 1, 3, tzinfo=datetime.UTC)
attr_value_1.name = "date-name-1"
attr_value_1.slug = "date-slug-1"
attr_value_1.save()
variant_1 = product_variant_list[0]
associate_attribute_values_to_instance(
variant_1, {date_attribute.pk: [attr_value_1]}
)
second_attr_value = second_date_attribute.values.create(
date_time=datetime.datetime(2021, 1, 2, tzinfo=datetime.UTC),
name="date-name-2",
slug="date-slug-2",
)
variant_2 = product_variant_list[1]
associate_attribute_values_to_instance(
variant_2, {second_date_attribute.pk: [second_attr_value]}
)
variables = {"where": {"attributes": [date_input]}, "channel": channel_USD.slug}
# when
response = staff_api_client.post_graphql(
PRODUCT_VARIANTS_WHERE_QUERY,
variables,
)
# then
content = get_graphql_content(response)
variants_nodes = content["data"]["productVariants"]["edges"]
assert len(variants_nodes) == expected_count
| {
"repo_id": "saleor/saleor",
"file_path": "saleor/graphql/product/tests/queries/variants_where/test_over_attributes_date.py",
"license": "BSD 3-Clause \"New\" or \"Revised\" License",
"lines": 91,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.