id int64 0 190k | prompt stringlengths 21 13.4M | docstring stringlengths 1 12k ⌀ |
|---|---|---|
154,435 | from dataclasses import asdict, dataclass, field
from datetime import datetime, timezone
import hashlib
import logging
import re
import string
from typing import Any, Literal
from waffle import get_waffle_flag_model
import django_ftl
import phonenumbers
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from django.forms import model_to_dict
from drf_spectacular.utils import extend_schema, OpenApiParameter
from rest_framework import (
decorators,
permissions,
response,
throttling,
viewsets,
exceptions,
)
from rest_framework.generics import get_object_or_404
from rest_framework.request import Request
from twilio.base.exceptions import TwilioRestException
from waffle import flag_is_active
from api.views import SaveToRequestUser
from emails.utils import incr_if_enabled
from phones.iq_utils import send_iq_sms
from phones.apps import phones_config, twilio_client
from phones.models import (
InboundContact,
RealPhone,
RelayNumber,
get_last_text_sender,
get_pending_unverified_realphone_records,
get_valid_realphone_verification_record,
get_verified_realphone_record,
get_verified_realphone_records,
send_welcome_message,
suggested_numbers,
location_numbers,
area_code_numbers,
)
from privaterelay.ftl_bundles import main as ftl_bundle
from ..exceptions import ConflictError, ErrorContextType
from ..permissions import HasPhoneService
from ..renderers import (
TemplateTwiMLRenderer,
vCardRenderer,
)
from ..serializers.phones import (
InboundContactSerializer,
RealPhoneSerializer,
RelayNumberSerializer,
)
def message_body(from_num, body):
return f"[Relay 📲 {from_num}] {body}"
def _get_user_error_message(real_phone: RealPhone, sms_exception) -> Any:
# Send a translated message to the user
ftl_code = sms_exception.get_codes().replace("_", "-")
ftl_id = f"sms-error-{ftl_code}"
# log the error in English
with django_ftl.override("en"):
logger.exception(ftl_bundle.format(ftl_id, sms_exception.error_context()))
with django_ftl.override(real_phone.user.profile.language):
user_message = ftl_bundle.format(ftl_id, sms_exception.error_context())
return user_message
def _get_phone_objects(inbound_to):
# Get RelayNumber and RealPhone
try:
relay_number = RelayNumber.objects.get(number=inbound_to)
real_phone = RealPhone.objects.get(user=relay_number.user, verified=True)
except ObjectDoesNotExist:
raise exceptions.ValidationError("Could not find relay number.")
return relay_number, real_phone
class RelaySMSException(Exception):
"""
Base class for exceptions when handling SMS messages.
Modeled after restframework.APIException, but without a status_code.
TODO MPP-3722: Refactor to a common base class with api.exceptions.RelayAPIException
"""
critical: bool
default_code: str
default_detail: str | None = None
default_detail_template: str | None = None
def __init__(self, critical=False, *args, **kwargs):
self.critical = critical
assert (
self.default_detail is not None and self.default_detail_template is None
) or (self.default_detail is None and self.default_detail_template is not None)
super().__init__(*args, **kwargs)
def detail(self):
if self.default_detail:
return self.default_detail
else:
assert self.default_detail_template is not None
return self.default_detail_template.format(**self.error_context())
def get_codes(self):
return self.default_code
def error_context(self) -> ErrorContextType:
"""Return context variables for client-side translation."""
return {}
def _prepare_sms_reply(
relay_number: RelayNumber, inbound_body: str
) -> tuple[RelayNumber, str, str]:
incr_if_enabled("phones_handle_sms_reply")
if not relay_number.storing_phone_log:
# We do not store user's contacts in our database
raise NoPhoneLog(critical=True)
match = _match_senders_by_prefix(relay_number, inbound_body)
# Fail if prefix match is ambiguous
if match and not match.contacts and match.match_type == "short":
raise ShortPrefixMatchesNoSenders(short_prefix=match.detected)
if match and not match.contacts and match.match_type == "full":
raise FullNumberMatchesNoSenders(full_number=match.detected)
if match and len(match.contacts) > 1:
assert match.match_type == "short"
raise MultipleNumberMatches(short_prefix=match.detected)
# Determine the destination number
destination_number: str | None = None
if match:
# Use the sender matched by the prefix
assert len(match.contacts) == 1
destination_number = match.contacts[0].inbound_number
else:
# No prefix, default to last sender if any
last_sender = get_last_text_sender(relay_number)
destination_number = getattr(last_sender, "inbound_number", None)
# Fail if no last sender
if destination_number is None:
raise NoPreviousSender(critical=True)
# Determine the message body
if match:
body = inbound_body.removeprefix(match.prefix)
else:
body = inbound_body
# Fail if the prefix matches a sender, but there is no body to send
if match and not body and match.match_type == "short":
raise NoBodyAfterShortPrefix(short_prefix=match.detected)
if match and not body and match.match_type == "full":
raise NoBodyAfterFullNumber(full_number=match.detected)
return (relay_number, destination_number, body)
def _check_disabled(relay_number, contact_type):
# Check if RelayNumber is disabled
if not relay_number.enabled:
attr = f"{contact_type}_blocked"
incr_if_enabled(f"phones_{contact_type}_global_blocked")
setattr(relay_number, attr, getattr(relay_number, attr) + 1)
relay_number.save()
return True
def _check_remaining(relay_number, resource_type):
# Check the owner of the relay number (still) has phone service
if not relay_number.user.profile.has_phone:
raise exceptions.ValidationError("Number owner does not have phone service")
model_attr = f"remaining_{resource_type}"
if getattr(relay_number, model_attr) <= 0:
incr_if_enabled(f"phones_out_of_{resource_type}")
raise exceptions.ValidationError(f"Number is out of {resource_type}.")
return True
def _get_inbound_contact(relay_number, inbound_from):
# Check if RelayNumber is storing phone log
if not relay_number.storing_phone_log:
return None
# Check if RelayNumber is blocking this inbound_from
inbound_contact, _ = InboundContact.objects.get_or_create(
relay_number=relay_number, inbound_number=inbound_from
)
return inbound_contact
def _check_and_update_contact(inbound_contact, contact_type, relay_number):
if inbound_contact.blocked:
incr_if_enabled(f"phones_{contact_type}_specific_blocked")
contact_attr = f"num_{contact_type}_blocked"
setattr(
inbound_contact, contact_attr, getattr(inbound_contact, contact_attr) + 1
)
inbound_contact.save()
relay_attr = f"{contact_type}_blocked"
setattr(relay_number, relay_attr, getattr(relay_number, relay_attr) + 1)
relay_number.save()
raise exceptions.ValidationError(f"Number is not accepting {contact_type}.")
inbound_contact.last_inbound_date = datetime.now(timezone.utc)
singular_contact_type = contact_type[:-1] # strip trailing "s"
inbound_contact.last_inbound_type = singular_contact_type
attr = f"num_{contact_type}"
setattr(inbound_contact, attr, getattr(inbound_contact, attr) + 1)
last_date_attr = f"last_{singular_contact_type}_date"
setattr(inbound_contact, last_date_attr, inbound_contact.last_inbound_date)
inbound_contact.save()
def _validate_iq_request(request: Request) -> None:
if "Verificationtoken" not in request._request.headers:
raise exceptions.AuthenticationFailed("missing Verificationtoken header.")
if "MessageId" not in request._request.headers:
raise exceptions.AuthenticationFailed("missing MessageId header.")
message_id = request._request.headers["Messageid"]
mac = compute_iq_mac(message_id)
token = request._request.headers["verificationToken"]
if mac != token:
raise exceptions.AuthenticationFailed("verficiationToken != computed sha256")
def incr_if_enabled(name, value=1, tags=None):
if settings.STATSD_ENABLED:
metrics.incr(name, value, tags)
def send_iq_sms(to_num: str, from_num: str, text: str) -> None:
iq_formatted_to_num = to_num.replace("+", "")
iq_formatted_from_num = from_num.replace("+", "")
json_body = {
"from": iq_formatted_from_num,
"to": [iq_formatted_to_num],
"text": text,
}
resp = requests.post(
settings.IQ_PUBLISH_MESSAGE_URL,
headers={"Authorization": f"Bearer {settings.IQ_OUTBOUND_API_KEY}"},
json=json_body,
)
if resp.status_code < 200 or resp.status_code > 299:
raise exceptions.ValidationError(json.loads(resp.content.decode()))
def inbound_sms_iq(request: Request) -> response.Response:
incr_if_enabled("phones_inbound_sms_iq")
_validate_iq_request(request)
inbound_body = request.data.get("text", None)
inbound_from = request.data.get("from", None)
inbound_to = request.data.get("to", None)
if inbound_body is None or inbound_from is None or inbound_to is None:
raise exceptions.ValidationError("Request missing from, to, or text.")
from_num = phonenumbers.format_number(
phonenumbers.parse(inbound_from, "US"),
phonenumbers.PhoneNumberFormat.E164,
)
single_num = inbound_to[0]
relay_num = phonenumbers.format_number(
phonenumbers.parse(single_num, "US"), phonenumbers.PhoneNumberFormat.E164
)
relay_number, real_phone = _get_phone_objects(relay_num)
_check_remaining(relay_number, "texts")
if from_num == real_phone.number:
try:
relay_number, destination_number, body = _prepare_sms_reply(
relay_number, inbound_body
)
send_iq_sms(destination_number, relay_number.number, body)
relay_number.remaining_texts -= 1
relay_number.texts_forwarded += 1
relay_number.save()
incr_if_enabled("phones_send_sms_reply_iq")
except RelaySMSException as sms_exception:
user_error_message = _get_user_error_message(real_phone, sms_exception)
send_iq_sms(real_phone.number, relay_number.number, user_error_message)
# Return 400 on critical exceptions
if sms_exception.critical:
raise exceptions.ValidationError(
sms_exception.detail
) from sms_exception
return response.Response(
status=200,
template_name="twiml_empty_response.xml",
)
number_disabled = _check_disabled(relay_number, "texts")
if number_disabled:
return response.Response(status=200)
inbound_contact = _get_inbound_contact(relay_number, from_num)
if inbound_contact:
_check_and_update_contact(inbound_contact, "texts", relay_number)
text = message_body(inbound_from, inbound_body)
send_iq_sms(real_phone.number, relay_number.number, text)
relay_number.remaining_texts -= 1
relay_number.texts_forwarded += 1
relay_number.save()
return response.Response(status=200) | null |
154,436 | from dataclasses import asdict, dataclass, field
from datetime import datetime, timezone
import hashlib
import logging
import re
import string
from typing import Any, Literal
from waffle import get_waffle_flag_model
import django_ftl
import phonenumbers
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from django.forms import model_to_dict
from drf_spectacular.utils import extend_schema, OpenApiParameter
from rest_framework import (
decorators,
permissions,
response,
throttling,
viewsets,
exceptions,
)
from rest_framework.generics import get_object_or_404
from rest_framework.request import Request
from twilio.base.exceptions import TwilioRestException
from waffle import flag_is_active
from api.views import SaveToRequestUser
from emails.utils import incr_if_enabled
from phones.iq_utils import send_iq_sms
from phones.apps import phones_config, twilio_client
from phones.models import (
InboundContact,
RealPhone,
RelayNumber,
get_last_text_sender,
get_pending_unverified_realphone_records,
get_valid_realphone_verification_record,
get_verified_realphone_record,
get_verified_realphone_records,
send_welcome_message,
suggested_numbers,
location_numbers,
area_code_numbers,
)
from privaterelay.ftl_bundles import main as ftl_bundle
from ..exceptions import ConflictError, ErrorContextType
from ..permissions import HasPhoneService
from ..renderers import (
TemplateTwiMLRenderer,
vCardRenderer,
)
from ..serializers.phones import (
InboundContactSerializer,
RealPhoneSerializer,
RelayNumberSerializer,
)
def _get_phone_objects(inbound_to):
def _check_disabled(relay_number, contact_type):
def _check_remaining(relay_number, resource_type):
def _get_inbound_contact(relay_number, inbound_from):
def _check_and_update_contact(inbound_contact, contact_type, relay_number):
def _validate_twilio_request(request):
def incr_if_enabled(name, value=1, tags=None):
def inbound_call(request):
incr_if_enabled("phones_inbound_call")
_validate_twilio_request(request)
inbound_from = request.data.get("Caller", None)
inbound_to = request.data.get("Called", None)
if inbound_from is None or inbound_to is None:
raise exceptions.ValidationError("Call data missing Caller or Called.")
relay_number, real_phone = _get_phone_objects(inbound_to)
number_disabled = _check_disabled(relay_number, "calls")
if number_disabled:
say = "Sorry, that number is not available."
return response.Response(
{"say": say}, status=200, template_name="twiml_blocked.xml"
)
_check_remaining(relay_number, "seconds")
inbound_contact = _get_inbound_contact(relay_number, inbound_from)
if inbound_contact:
_check_and_update_contact(inbound_contact, "calls", relay_number)
relay_number.calls_forwarded += 1
relay_number.save()
# Note: TemplateTwiMLRenderer will render this as TwiML
incr_if_enabled("phones_outbound_call")
return response.Response(
{"inbound_from": inbound_from, "real_number": real_phone.number},
status=201,
template_name="twiml_dial.xml",
) | null |
154,437 | from dataclasses import asdict, dataclass, field
from datetime import datetime, timezone
import hashlib
import logging
import re
import string
from typing import Any, Literal
from waffle import get_waffle_flag_model
import django_ftl
import phonenumbers
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from django.forms import model_to_dict
from drf_spectacular.utils import extend_schema, OpenApiParameter
from rest_framework import (
decorators,
permissions,
response,
throttling,
viewsets,
exceptions,
)
from rest_framework.generics import get_object_or_404
from rest_framework.request import Request
from twilio.base.exceptions import TwilioRestException
from waffle import flag_is_active
from api.views import SaveToRequestUser
from emails.utils import incr_if_enabled
from phones.iq_utils import send_iq_sms
from phones.apps import phones_config, twilio_client
from phones.models import (
InboundContact,
RealPhone,
RelayNumber,
get_last_text_sender,
get_pending_unverified_realphone_records,
get_valid_realphone_verification_record,
get_verified_realphone_record,
get_verified_realphone_records,
send_welcome_message,
suggested_numbers,
location_numbers,
area_code_numbers,
)
from privaterelay.ftl_bundles import main as ftl_bundle
from ..exceptions import ConflictError, ErrorContextType
from ..permissions import HasPhoneService
from ..renderers import (
TemplateTwiMLRenderer,
vCardRenderer,
)
from ..serializers.phones import (
InboundContactSerializer,
RealPhoneSerializer,
RelayNumberSerializer,
)
info_logger = logging.getLogger("eventsinfo")
def _get_phone_objects(inbound_to):
# Get RelayNumber and RealPhone
try:
relay_number = RelayNumber.objects.get(number=inbound_to)
real_phone = RealPhone.objects.get(user=relay_number.user, verified=True)
except ObjectDoesNotExist:
raise exceptions.ValidationError("Could not find relay number.")
return relay_number, real_phone
def _validate_twilio_request(request):
if "X-Twilio-Signature" not in request._request.headers:
raise exceptions.ValidationError(
"Invalid request: missing X-Twilio-Signature header."
)
url = request._request.build_absolute_uri()
sorted_params = {}
for param_key in sorted(request.data):
sorted_params[param_key] = request.data.get(param_key)
request_signature = request._request.headers["X-Twilio-Signature"]
validator = twilio_validator()
if not validator.validate(url, sorted_params, request_signature):
incr_if_enabled("phones_invalid_twilio_signature")
raise exceptions.ValidationError("Invalid request: invalid signature")
def incr_if_enabled(name, value=1, tags=None):
if settings.STATSD_ENABLED:
metrics.incr(name, value, tags)
def twilio_client() -> Client:
assert not settings.PHONES_NO_CLIENT_CALLS_IN_TEST
return phones_config().twilio_client
def voice_status(request):
incr_if_enabled("phones_voice_status")
_validate_twilio_request(request)
call_sid = request.data.get("CallSid", None)
called = request.data.get("Called", None)
call_status = request.data.get("CallStatus", None)
if call_sid is None or called is None or call_status is None:
raise exceptions.ValidationError("Call data missing Called, CallStatus")
if call_status != "completed":
return response.Response(status=200)
call_duration = request.data.get("CallDuration", None)
if call_duration is None:
raise exceptions.ValidationError("completed call data missing CallDuration")
relay_number, _ = _get_phone_objects(called)
relay_number.remaining_seconds = relay_number.remaining_seconds - int(call_duration)
relay_number.save()
if relay_number.remaining_seconds < 0:
info_logger.info(
"phone_limit_exceeded",
extra={
"fxa_uid": relay_number.user.profile.fxa.uid,
"call_duration_in_seconds": int(call_duration),
"relay_number_enabled": relay_number.enabled,
"remaining_seconds": relay_number.remaining_seconds,
"remaining_minutes": relay_number.remaining_minutes,
},
)
client = twilio_client()
client.calls(call_sid).delete()
return response.Response(status=200) | null |
154,438 | from dataclasses import asdict, dataclass, field
from datetime import datetime, timezone
import hashlib
import logging
import re
import string
from typing import Any, Literal
from waffle import get_waffle_flag_model
import django_ftl
import phonenumbers
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from django.forms import model_to_dict
from drf_spectacular.utils import extend_schema, OpenApiParameter
from rest_framework import (
decorators,
permissions,
response,
throttling,
viewsets,
exceptions,
)
from rest_framework.generics import get_object_or_404
from rest_framework.request import Request
from twilio.base.exceptions import TwilioRestException
from waffle import flag_is_active
from api.views import SaveToRequestUser
from emails.utils import incr_if_enabled
from phones.iq_utils import send_iq_sms
from phones.apps import phones_config, twilio_client
from phones.models import (
InboundContact,
RealPhone,
RelayNumber,
get_last_text_sender,
get_pending_unverified_realphone_records,
get_valid_realphone_verification_record,
get_verified_realphone_record,
get_verified_realphone_records,
send_welcome_message,
suggested_numbers,
location_numbers,
area_code_numbers,
)
from privaterelay.ftl_bundles import main as ftl_bundle
from ..exceptions import ConflictError, ErrorContextType
from ..permissions import HasPhoneService
from ..renderers import (
TemplateTwiMLRenderer,
vCardRenderer,
)
from ..serializers.phones import (
InboundContactSerializer,
RealPhoneSerializer,
RelayNumberSerializer,
)
def _try_delete_from_twilio(message):
def _validate_twilio_request(request):
def twilio_client() -> Client:
def sms_status(request):
_validate_twilio_request(request)
sms_status = request.data.get("SmsStatus", None)
message_sid = request.data.get("MessageSid", None)
if sms_status is None or message_sid is None:
raise exceptions.ValidationError(
"Text status data missing SmsStatus or MessageSid"
)
if sms_status != "delivered":
return response.Response(status=200)
client = twilio_client()
message = client.messages(message_sid)
_try_delete_from_twilio(message)
return response.Response(status=200) | null |
154,439 | from dataclasses import asdict, dataclass, field
from datetime import datetime, timezone
import hashlib
import logging
import re
import string
from typing import Any, Literal
from waffle import get_waffle_flag_model
import django_ftl
import phonenumbers
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from django.forms import model_to_dict
from drf_spectacular.utils import extend_schema, OpenApiParameter
from rest_framework import (
decorators,
permissions,
response,
throttling,
viewsets,
exceptions,
)
from rest_framework.generics import get_object_or_404
from rest_framework.request import Request
from twilio.base.exceptions import TwilioRestException
from waffle import flag_is_active
from api.views import SaveToRequestUser
from emails.utils import incr_if_enabled
from phones.iq_utils import send_iq_sms
from phones.apps import phones_config, twilio_client
from phones.models import (
InboundContact,
RealPhone,
RelayNumber,
get_last_text_sender,
get_pending_unverified_realphone_records,
get_valid_realphone_verification_record,
get_verified_realphone_record,
get_verified_realphone_records,
send_welcome_message,
suggested_numbers,
location_numbers,
area_code_numbers,
)
from privaterelay.ftl_bundles import main as ftl_bundle
from ..exceptions import ConflictError, ErrorContextType
from ..permissions import HasPhoneService
from ..renderers import (
TemplateTwiMLRenderer,
vCardRenderer,
)
from ..serializers.phones import (
InboundContactSerializer,
RealPhoneSerializer,
RelayNumberSerializer,
)
def _validate_number(request, number_field="number"):
if number_field not in request.data:
raise exceptions.ValidationError({number_field: "A number is required."})
parsed_number = _parse_number(
request.data[number_field], getattr(request, "country", None)
)
if not parsed_number:
country = None
if hasattr(request, "country"):
country = request.country
error_message = (
"number must be in E.164 format, or in local national format of the"
f" country detected: {country}"
)
raise exceptions.ValidationError(error_message)
e164_number = f"+{parsed_number.country_code}{parsed_number.national_number}"
number_details = _get_number_details(e164_number)
if not number_details:
raise exceptions.ValidationError(
f"Could not get number details for {e164_number}"
)
if number_details.country_code.upper() not in settings.TWILIO_ALLOWED_COUNTRY_CODES:
incr_if_enabled("phones_validate_number_unsupported_country")
raise exceptions.ValidationError(
"Relay Phone is currently only available for these country codes: "
f"{sorted(settings.TWILIO_ALLOWED_COUNTRY_CODES)!r}. "
"Your phone number country code is: "
f"'{number_details.country_code.upper()}'."
)
return number_details
def twilio_client() -> Client:
assert not settings.PHONES_NO_CLIENT_CALLS_IN_TEST
return phones_config().twilio_client
class RealPhone(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
number = models.CharField(max_length=15)
verification_code = models.CharField(
max_length=8, default=verification_code_default
)
verification_sent_date = models.DateTimeField(
blank=True, null=True, default=verification_sent_date_default
)
verified = models.BooleanField(default=False)
verified_date = models.DateTimeField(blank=True, null=True)
country_code = models.CharField(max_length=2, default="US")
class Meta:
constraints = [
models.UniqueConstraint(
fields=["number", "verified"],
condition=models.Q(verified=True),
name="unique_verified_number",
)
]
def save(self, *args, **kwargs):
# delete any expired unverified RealPhone records for this number
# note: it doesn't matter which user is trying to create a new
# RealPhone record - any expired unverified record for the number
# should be deleted
expired_verification_records = get_expired_unverified_realphone_records(
self.number
)
expired_verification_records.delete()
# We are not ready to support multiple real phone numbers per user,
# so raise an exception if this save() would create a second
# RealPhone record for the user
user_verified_number_records = get_verified_realphone_records(self.user)
for verified_number in user_verified_number_records:
if (
verified_number.number == self.number
and verified_number.verification_code == self.verification_code
):
# User is verifying the same number twice
return super().save(*args, **kwargs)
else:
raise BadRequest("User already has a verified number.")
# call super save to save into the DB
# See also: realphone_post_save receiver below
return super().save(*args, **kwargs)
def mark_verified(self):
incr_if_enabled("phones_RealPhone.mark_verified")
self.verified = True
self.verified_date = datetime.now(timezone.utc)
self.save(force_update=True)
return self
class RelayNumber(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
number = models.CharField(max_length=15, db_index=True, unique=True)
vendor = models.CharField(max_length=15, default="twilio")
location = models.CharField(max_length=255)
country_code = models.CharField(max_length=2, default="US")
vcard_lookup_key = models.CharField(
max_length=6, default=vcard_lookup_key_default, unique=True
)
enabled = models.BooleanField(default=True)
remaining_seconds = models.IntegerField(
default=settings.MAX_MINUTES_PER_BILLING_CYCLE * 60
)
remaining_texts = models.IntegerField(default=settings.MAX_TEXTS_PER_BILLING_CYCLE)
calls_forwarded = models.IntegerField(default=0)
calls_blocked = models.IntegerField(default=0)
texts_forwarded = models.IntegerField(default=0)
texts_blocked = models.IntegerField(default=0)
created_at = models.DateTimeField(null=True, auto_now_add=True)
def remaining_minutes(self):
# return a 0 or positive int for remaining minutes
return floor(max(self.remaining_seconds, 0) / 60)
def calls_and_texts_forwarded(self):
return self.calls_forwarded + self.texts_forwarded
def calls_and_texts_blocked(self):
return self.calls_blocked + self.texts_blocked
def storing_phone_log(self) -> bool:
return bool(self.user.profile.store_phone_log)
def save(self, *args, **kwargs):
realphone = get_verified_realphone_records(self.user).first()
if not realphone:
raise ValidationError("User does not have a verified real phone.")
# if this number exists for this user, this is an update call
existing_numbers = RelayNumber.objects.filter(user=self.user)
this_number = existing_numbers.filter(number=self.number).first()
if this_number and this_number.id == self.id:
return super().save(*args, **kwargs)
elif existing_numbers.exists():
raise ValidationError("User can have only one relay number.")
if RelayNumber.objects.filter(number=self.number).exists():
raise ValidationError("This number is already claimed.")
use_twilio = (
self.vendor == "twilio" and not settings.PHONES_NO_CLIENT_CALLS_IN_TEST
)
if use_twilio:
# Before saving into DB provision the number in Twilio
client = twilio_client()
# Since this will charge the Twilio account, first see if this
# is running with TEST creds to avoid charges.
if settings.TWILIO_TEST_ACCOUNT_SID:
client = phones_config().twilio_test_client
twilio_incoming_number = client.incoming_phone_numbers.create(
phone_number=self.number,
sms_application_sid=settings.TWILIO_SMS_APPLICATION_SID,
voice_application_sid=settings.TWILIO_SMS_APPLICATION_SID,
)
# Assume number was selected through suggested_numbers, so same country
# as realphone
self.country_code = realphone.country_code.upper()
# Add US numbers to the Relay messaging service, so it goes into our
# US A2P 10DLC campaign
if use_twilio and self.country_code == "US":
if settings.TWILIO_MESSAGING_SERVICE_SID:
register_with_messaging_service(client, twilio_incoming_number.sid)
else:
logger.warning(
"Skipping Twilio Messaging Service registration, since"
" TWILIO_MESSAGING_SERVICE_SID is empty.",
extra={"number_sid": twilio_incoming_number.sid},
)
return super().save(*args, **kwargs)
The provided code snippet includes necessary dependencies for implementing the `outbound_call` function. Write a Python function `def outbound_call(request)` to solve the following problem:
Make a call from the authenticated user's relay number.
Here is the function:
def outbound_call(request):
"""Make a call from the authenticated user's relay number."""
# TODO: Create or update an OutboundContact (new model) on send, or limit
# to InboundContacts.
if not flag_is_active(request, "outbound_phone"):
# Return Permission Denied error
return response.Response(
{"detail": "Requires outbound_phone waffle flag."}, status=403
)
try:
real_phone = RealPhone.objects.get(user=request.user, verified=True)
except RealPhone.DoesNotExist:
return response.Response(
{"detail": "Requires a verified real phone and phone mask."}, status=400
)
try:
relay_number = RelayNumber.objects.get(user=request.user)
except RelayNumber.DoesNotExist:
return response.Response({"detail": "Requires a phone mask."}, status=400)
client = twilio_client()
to = _validate_number(request, "to") # Raises ValidationError on invalid number
client.calls.create(
twiml=(
f"<Response><Say>Dialing {to.national_format} ...</Say>"
f"<Dial>{to.phone_number}</Dial></Response>"
),
to=real_phone.number,
from_=relay_number.number,
)
return response.Response(status=200) | Make a call from the authenticated user's relay number. |
154,440 | from dataclasses import asdict, dataclass, field
from datetime import datetime, timezone
import hashlib
import logging
import re
import string
from typing import Any, Literal
from waffle import get_waffle_flag_model
import django_ftl
import phonenumbers
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from django.forms import model_to_dict
from drf_spectacular.utils import extend_schema, OpenApiParameter
from rest_framework import (
decorators,
permissions,
response,
throttling,
viewsets,
exceptions,
)
from rest_framework.generics import get_object_or_404
from rest_framework.request import Request
from twilio.base.exceptions import TwilioRestException
from waffle import flag_is_active
from api.views import SaveToRequestUser
from emails.utils import incr_if_enabled
from phones.iq_utils import send_iq_sms
from phones.apps import phones_config, twilio_client
from phones.models import (
InboundContact,
RealPhone,
RelayNumber,
get_last_text_sender,
get_pending_unverified_realphone_records,
get_valid_realphone_verification_record,
get_verified_realphone_record,
get_verified_realphone_records,
send_welcome_message,
suggested_numbers,
location_numbers,
area_code_numbers,
)
from privaterelay.ftl_bundles import main as ftl_bundle
from ..exceptions import ConflictError, ErrorContextType
from ..permissions import HasPhoneService
from ..renderers import (
TemplateTwiMLRenderer,
vCardRenderer,
)
from ..serializers.phones import (
InboundContactSerializer,
RealPhoneSerializer,
RelayNumberSerializer,
)
def _validate_number(request, number_field="number"):
if number_field not in request.data:
raise exceptions.ValidationError({number_field: "A number is required."})
parsed_number = _parse_number(
request.data[number_field], getattr(request, "country", None)
)
if not parsed_number:
country = None
if hasattr(request, "country"):
country = request.country
error_message = (
"number must be in E.164 format, or in local national format of the"
f" country detected: {country}"
)
raise exceptions.ValidationError(error_message)
e164_number = f"+{parsed_number.country_code}{parsed_number.national_number}"
number_details = _get_number_details(e164_number)
if not number_details:
raise exceptions.ValidationError(
f"Could not get number details for {e164_number}"
)
if number_details.country_code.upper() not in settings.TWILIO_ALLOWED_COUNTRY_CODES:
incr_if_enabled("phones_validate_number_unsupported_country")
raise exceptions.ValidationError(
"Relay Phone is currently only available for these country codes: "
f"{sorted(settings.TWILIO_ALLOWED_COUNTRY_CODES)!r}. "
"Your phone number country code is: "
f"'{number_details.country_code.upper()}'."
)
return number_details
def twilio_client() -> Client:
assert not settings.PHONES_NO_CLIENT_CALLS_IN_TEST
return phones_config().twilio_client
class RelayNumber(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
number = models.CharField(max_length=15, db_index=True, unique=True)
vendor = models.CharField(max_length=15, default="twilio")
location = models.CharField(max_length=255)
country_code = models.CharField(max_length=2, default="US")
vcard_lookup_key = models.CharField(
max_length=6, default=vcard_lookup_key_default, unique=True
)
enabled = models.BooleanField(default=True)
remaining_seconds = models.IntegerField(
default=settings.MAX_MINUTES_PER_BILLING_CYCLE * 60
)
remaining_texts = models.IntegerField(default=settings.MAX_TEXTS_PER_BILLING_CYCLE)
calls_forwarded = models.IntegerField(default=0)
calls_blocked = models.IntegerField(default=0)
texts_forwarded = models.IntegerField(default=0)
texts_blocked = models.IntegerField(default=0)
created_at = models.DateTimeField(null=True, auto_now_add=True)
def remaining_minutes(self):
# return a 0 or positive int for remaining minutes
return floor(max(self.remaining_seconds, 0) / 60)
def calls_and_texts_forwarded(self):
return self.calls_forwarded + self.texts_forwarded
def calls_and_texts_blocked(self):
return self.calls_blocked + self.texts_blocked
def storing_phone_log(self) -> bool:
return bool(self.user.profile.store_phone_log)
def save(self, *args, **kwargs):
realphone = get_verified_realphone_records(self.user).first()
if not realphone:
raise ValidationError("User does not have a verified real phone.")
# if this number exists for this user, this is an update call
existing_numbers = RelayNumber.objects.filter(user=self.user)
this_number = existing_numbers.filter(number=self.number).first()
if this_number and this_number.id == self.id:
return super().save(*args, **kwargs)
elif existing_numbers.exists():
raise ValidationError("User can have only one relay number.")
if RelayNumber.objects.filter(number=self.number).exists():
raise ValidationError("This number is already claimed.")
use_twilio = (
self.vendor == "twilio" and not settings.PHONES_NO_CLIENT_CALLS_IN_TEST
)
if use_twilio:
# Before saving into DB provision the number in Twilio
client = twilio_client()
# Since this will charge the Twilio account, first see if this
# is running with TEST creds to avoid charges.
if settings.TWILIO_TEST_ACCOUNT_SID:
client = phones_config().twilio_test_client
twilio_incoming_number = client.incoming_phone_numbers.create(
phone_number=self.number,
sms_application_sid=settings.TWILIO_SMS_APPLICATION_SID,
voice_application_sid=settings.TWILIO_SMS_APPLICATION_SID,
)
# Assume number was selected through suggested_numbers, so same country
# as realphone
self.country_code = realphone.country_code.upper()
# Add US numbers to the Relay messaging service, so it goes into our
# US A2P 10DLC campaign
if use_twilio and self.country_code == "US":
if settings.TWILIO_MESSAGING_SERVICE_SID:
register_with_messaging_service(client, twilio_incoming_number.sid)
else:
logger.warning(
"Skipping Twilio Messaging Service registration, since"
" TWILIO_MESSAGING_SERVICE_SID is empty.",
extra={"number_sid": twilio_incoming_number.sid},
)
return super().save(*args, **kwargs)
The provided code snippet includes necessary dependencies for implementing the `outbound_sms` function. Write a Python function `def outbound_sms(request)` to solve the following problem:
Send a message from the user's relay number. POST params: body: the body of the message destination: E.164-formatted phone number
Here is the function:
def outbound_sms(request):
"""
Send a message from the user's relay number.
POST params:
body: the body of the message
destination: E.164-formatted phone number
"""
# TODO: Create or update an OutboundContact (new model) on send, or limit
# to InboundContacts.
# TODO: Reduce user's SMS messages for the month by one
if not flag_is_active(request, "outbound_phone"):
return response.Response(
{"detail": "Requires outbound_phone waffle flag."}, status=403
)
try:
relay_number = RelayNumber.objects.get(user=request.user)
except RelayNumber.DoesNotExist:
return response.Response({"detail": "Requires a phone mask."}, status=400)
errors = {}
body = request.data.get("body")
if not body:
errors["body"] = "A message body is required."
destination_number = request.data.get("destination")
if not destination_number:
errors["destination"] = "A destination number is required."
if errors:
return response.Response(errors, status=400)
# Raises ValidationError on invalid number
to = _validate_number(request, "destination")
client = twilio_client()
client.messages.create(from_=relay_number.number, body=body, to=to.phone_number)
return response.Response(status=200) | Send a message from the user's relay number. POST params: body: the body of the message destination: E.164-formatted phone number |
154,441 | from dataclasses import asdict, dataclass, field
from datetime import datetime, timezone
import hashlib
import logging
import re
import string
from typing import Any, Literal
from waffle import get_waffle_flag_model
import django_ftl
import phonenumbers
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from django.forms import model_to_dict
from drf_spectacular.utils import extend_schema, OpenApiParameter
from rest_framework import (
decorators,
permissions,
response,
throttling,
viewsets,
exceptions,
)
from rest_framework.generics import get_object_or_404
from rest_framework.request import Request
from twilio.base.exceptions import TwilioRestException
from waffle import flag_is_active
from api.views import SaveToRequestUser
from emails.utils import incr_if_enabled
from phones.iq_utils import send_iq_sms
from phones.apps import phones_config, twilio_client
from phones.models import (
InboundContact,
RealPhone,
RelayNumber,
get_last_text_sender,
get_pending_unverified_realphone_records,
get_valid_realphone_verification_record,
get_verified_realphone_record,
get_verified_realphone_records,
send_welcome_message,
suggested_numbers,
location_numbers,
area_code_numbers,
)
from privaterelay.ftl_bundles import main as ftl_bundle
from ..exceptions import ConflictError, ErrorContextType
from ..permissions import HasPhoneService
from ..renderers import (
TemplateTwiMLRenderer,
vCardRenderer,
)
from ..serializers.phones import (
InboundContactSerializer,
RealPhoneSerializer,
RelayNumberSerializer,
)
def convert_twilio_messages_to_dict(twilio_messages):
"""
To serialize twilio messages to JSON for the API,
we need to convert them into dictionaries.
"""
messages_as_dicts = []
for twilio_message in twilio_messages:
message = {}
message["from"] = twilio_message.from_
message["to"] = twilio_message.to
message["date_sent"] = twilio_message.date_sent
message["body"] = twilio_message.body
messages_as_dicts.append(message)
return messages_as_dicts
def twilio_client() -> Client:
assert not settings.PHONES_NO_CLIENT_CALLS_IN_TEST
return phones_config().twilio_client
class RelayNumber(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
number = models.CharField(max_length=15, db_index=True, unique=True)
vendor = models.CharField(max_length=15, default="twilio")
location = models.CharField(max_length=255)
country_code = models.CharField(max_length=2, default="US")
vcard_lookup_key = models.CharField(
max_length=6, default=vcard_lookup_key_default, unique=True
)
enabled = models.BooleanField(default=True)
remaining_seconds = models.IntegerField(
default=settings.MAX_MINUTES_PER_BILLING_CYCLE * 60
)
remaining_texts = models.IntegerField(default=settings.MAX_TEXTS_PER_BILLING_CYCLE)
calls_forwarded = models.IntegerField(default=0)
calls_blocked = models.IntegerField(default=0)
texts_forwarded = models.IntegerField(default=0)
texts_blocked = models.IntegerField(default=0)
created_at = models.DateTimeField(null=True, auto_now_add=True)
def remaining_minutes(self):
# return a 0 or positive int for remaining minutes
return floor(max(self.remaining_seconds, 0) / 60)
def calls_and_texts_forwarded(self):
return self.calls_forwarded + self.texts_forwarded
def calls_and_texts_blocked(self):
return self.calls_blocked + self.texts_blocked
def storing_phone_log(self) -> bool:
return bool(self.user.profile.store_phone_log)
def save(self, *args, **kwargs):
realphone = get_verified_realphone_records(self.user).first()
if not realphone:
raise ValidationError("User does not have a verified real phone.")
# if this number exists for this user, this is an update call
existing_numbers = RelayNumber.objects.filter(user=self.user)
this_number = existing_numbers.filter(number=self.number).first()
if this_number and this_number.id == self.id:
return super().save(*args, **kwargs)
elif existing_numbers.exists():
raise ValidationError("User can have only one relay number.")
if RelayNumber.objects.filter(number=self.number).exists():
raise ValidationError("This number is already claimed.")
use_twilio = (
self.vendor == "twilio" and not settings.PHONES_NO_CLIENT_CALLS_IN_TEST
)
if use_twilio:
# Before saving into DB provision the number in Twilio
client = twilio_client()
# Since this will charge the Twilio account, first see if this
# is running with TEST creds to avoid charges.
if settings.TWILIO_TEST_ACCOUNT_SID:
client = phones_config().twilio_test_client
twilio_incoming_number = client.incoming_phone_numbers.create(
phone_number=self.number,
sms_application_sid=settings.TWILIO_SMS_APPLICATION_SID,
voice_application_sid=settings.TWILIO_SMS_APPLICATION_SID,
)
# Assume number was selected through suggested_numbers, so same country
# as realphone
self.country_code = realphone.country_code.upper()
# Add US numbers to the Relay messaging service, so it goes into our
# US A2P 10DLC campaign
if use_twilio and self.country_code == "US":
if settings.TWILIO_MESSAGING_SERVICE_SID:
register_with_messaging_service(client, twilio_incoming_number.sid)
else:
logger.warning(
"Skipping Twilio Messaging Service registration, since"
" TWILIO_MESSAGING_SERVICE_SID is empty.",
extra={"number_sid": twilio_incoming_number.sid},
)
return super().save(*args, **kwargs)
class InboundContact(models.Model):
relay_number = models.ForeignKey(RelayNumber, on_delete=models.CASCADE)
inbound_number = models.CharField(max_length=15)
last_inbound_date = models.DateTimeField(default=last_inbound_date_default)
last_inbound_type = models.CharField(
max_length=4, choices=LAST_CONTACT_TYPE_CHOICES, default="text"
)
num_calls = models.PositiveIntegerField(default=0)
num_calls_blocked = models.PositiveIntegerField(default=0)
last_call_date = models.DateTimeField(null=True)
num_texts = models.PositiveIntegerField(default=0)
num_texts_blocked = models.PositiveIntegerField(default=0)
last_text_date = models.DateTimeField(null=True)
blocked = models.BooleanField(default=False)
class Meta:
indexes = [models.Index(fields=["relay_number", "inbound_number"])]
The provided code snippet includes necessary dependencies for implementing the `list_messages` function. Write a Python function `def list_messages(request)` to solve the following problem:
Get the user's SMS messages sent to or from the phone mask Pass ?with=<E.164> parameter to filter the messages to only the ones sent between the phone mask and the <E.164> number. Pass ?direction=inbound|outbound to filter the messages to only the inbound or outbound messages. If omitted, return both.
Here is the function:
def list_messages(request):
"""
Get the user's SMS messages sent to or from the phone mask
Pass ?with=<E.164> parameter to filter the messages to only the ones sent between
the phone mask and the <E.164> number.
Pass ?direction=inbound|outbound to filter the messages to only the inbound or
outbound messages. If omitted, return both.
"""
# TODO: Support filtering to messages for outbound-only phones.
# TODO: Show data from our own (encrypted) store, rather than from Twilio's
if not flag_is_active(request, "outbound_phone"):
return response.Response(
{"detail": "Requires outbound_phone waffle flag."}, status=403
)
try:
relay_number = RelayNumber.objects.get(user=request.user)
except RelayNumber.DoesNotExist:
return response.Response({"detail": "Requires a phone mask."}, status=400)
_with = request.query_params.get("with", None)
_direction = request.query_params.get("direction", None)
if _direction and _direction not in ("inbound", "outbound"):
return response.Response(
{"direction": "Invalid value, valid values are 'inbound' or 'outbound'"},
status=400,
)
contact = None
if _with:
try:
contact = InboundContact.objects.get(
relay_number=relay_number, inbound_number=_with
)
except InboundContact.DoesNotExist:
return response.Response(
{"with": "No inbound contacts matching the number"}, status=400
)
data = {}
client = twilio_client()
if not _direction or _direction == "inbound":
# Query Twilio for SMS messages to the user's phone mask
params = {"to": relay_number.number}
if contact:
# Filter query to SMS from this contact to the phone mask
params["from_"] = contact.inbound_number
data["inbound_messages"] = convert_twilio_messages_to_dict(
client.messages.list(**params)
)
if not _direction or _direction == "outbound":
# Query Twilio for SMS messages from the user's phone mask
params = {"from_": relay_number.number}
if contact:
# Filter query to SMS from the phone mask to this contact
params["to"] = contact.inbound_number
data["outbound_messages"] = convert_twilio_messages_to_dict(
client.messages.list(**params)
)
return response.Response(data, status=200) | Get the user's SMS messages sent to or from the phone mask Pass ?with=<E.164> parameter to filter the messages to only the ones sent between the phone mask and the <E.164> number. Pass ?direction=inbound|outbound to filter the messages to only the inbound or outbound messages. If omitted, return both. |
154,442 | from datetime import datetime, timezone
from typing import Any
import logging
import shlex
import requests
from django.conf import settings
from django.core.cache import cache
from allauth.socialaccount.models import SocialAccount
from rest_framework.authentication import BaseAuthentication, get_authorization_header
from rest_framework.exceptions import (
APIException,
AuthenticationFailed,
NotFound,
ParseError,
PermissionDenied,
)
def get_cache_key(token):
return hash(token)
def introspect_token(token: str) -> dict[str, Any]:
try:
fxa_resp = requests.post(INTROSPECT_TOKEN_URL, json={"token": token})
except Exception as exc:
logger.error(
"Could not introspect token with FXA.",
extra={"error_cls": type(exc), "error": shlex.quote(str(exc))},
)
raise AuthenticationFailed("Could not introspect token with FXA.")
fxa_resp_data = {"status_code": fxa_resp.status_code, "json": {}}
try:
fxa_resp_data["json"] = fxa_resp.json()
except requests.exceptions.JSONDecodeError:
logger.error(
"JSONDecodeError from FXA introspect response.",
extra={"fxa_response": shlex.quote(fxa_resp.text)},
)
raise AuthenticationFailed("JSONDecodeError from FXA introspect response")
return fxa_resp_data
def get_fxa_uid_from_oauth_token(token: str, use_cache=True) -> str:
# set a default cache_timeout, but this will be overriden to match
# the 'exp' time in the JWT returned by FxA
cache_timeout = 60
cache_key = get_cache_key(token)
if not use_cache:
fxa_resp_data = introspect_token(token)
else:
# set a default fxa_resp_data, so any error during introspection
# will still cache for at least cache_timeout to prevent an outage
# from causing useless run-away repetitive introspection requests
fxa_resp_data = {"status_code": None, "json": {}}
try:
cached_fxa_resp_data = cache.get(cache_key)
if cached_fxa_resp_data:
fxa_resp_data = cached_fxa_resp_data
else:
# no cached data, get new
fxa_resp_data = introspect_token(token)
except AuthenticationFailed:
raise
finally:
# Store potential valid response, errors, inactive users, etc. from FxA
# for at least 60 seconds. Valid access_token cache extended after checking.
cache.set(cache_key, fxa_resp_data, cache_timeout)
if fxa_resp_data["status_code"] is None:
raise APIException("Previous FXA call failed, wait to retry.")
if not fxa_resp_data["status_code"] == 200:
raise APIException("Did not receive a 200 response from FXA.")
if not fxa_resp_data["json"].get("active"):
raise AuthenticationFailed("FXA returned active: False for token.")
# FxA user is active, check for the associated Relay account
if (raw_fxa_uid := fxa_resp_data.get("json", {}).get("sub")) is None:
raise NotFound("FXA did not return an FXA UID.")
fxa_uid = str(raw_fxa_uid)
# cache valid access_token and fxa_resp_data until access_token expiration
# TODO: revisit this since the token can expire before its time
if type(fxa_resp_data.get("json", {}).get("exp")) is int:
# Note: FXA iat and exp are timestamps in *milliseconds*
fxa_token_exp_time = int(fxa_resp_data["json"]["exp"] / 1000)
now_time = int(datetime.now(timezone.utc).timestamp())
fxa_token_exp_cache_timeout = fxa_token_exp_time - now_time
if fxa_token_exp_cache_timeout > cache_timeout:
# cache until access_token expires (matched Relay user)
# this handles cases where the token already expired
cache_timeout = fxa_token_exp_cache_timeout
cache.set(cache_key, fxa_resp_data, cache_timeout)
return fxa_uid | null |
154,443 | from __future__ import annotations
import base64
import contextlib
from email.errors import InvalidHeaderDefect
from email.headerregistry import Address, AddressHeader
from email.message import EmailMessage
from email.utils import formataddr, parseaddr
from functools import cache
from typing import cast, Any, Callable, TypeVar
import json
import pathlib
import re
from django.template.loader import render_to_string
from django.utils.text import Truncator
import requests
from botocore.exceptions import ClientError
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.kdf.hkdf import HKDFExpand
from mypy_boto3_ses.type_defs import ContentTypeDef, SendRawEmailResponseTypeDef
import jwcrypto.jwe
import jwcrypto.jwk
import markus
import logging
from urllib.parse import quote_plus, urlparse
from django.conf import settings
from django.contrib.auth.models import Group, User
from django.template.defaultfilters import linebreaksbr, urlize
from allauth.socialaccount.models import SocialAccount
from privaterelay.plans import get_bundle_country_language_mapping
from privaterelay.utils import get_countries_info_from_lang_and_mapping
from .apps import s3_client, ses_client
metrics = markus.get_metrics("fx-private-relay")
_TimedFunction = TypeVar("_TimedFunction", bound=Callable[..., Any])
def time_if_enabled(name: str) -> Callable[[_TimedFunction], _TimedFunction]:
def timing_decorator(func: _TimedFunction) -> _TimedFunction:
def func_wrapper(*args, **kwargs):
ctx_manager = (
metrics.timer(name)
if settings.STATSD_ENABLED
else contextlib.nullcontext()
)
with ctx_manager:
return func(*args, **kwargs)
return cast(_TimedFunction, func_wrapper)
return timing_decorator | null |
154,444 | from __future__ import annotations
import base64
import contextlib
from email.errors import InvalidHeaderDefect
from email.headerregistry import Address, AddressHeader
from email.message import EmailMessage
from email.utils import formataddr, parseaddr
from functools import cache
from typing import cast, Any, Callable, TypeVar
import json
import pathlib
import re
from django.template.loader import render_to_string
from django.utils.text import Truncator
import requests
from botocore.exceptions import ClientError
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.kdf.hkdf import HKDFExpand
from mypy_boto3_ses.type_defs import ContentTypeDef, SendRawEmailResponseTypeDef
import jwcrypto.jwe
import jwcrypto.jwk
import markus
import logging
from urllib.parse import quote_plus, urlparse
from django.conf import settings
from django.contrib.auth.models import Group, User
from django.template.defaultfilters import linebreaksbr, urlize
from allauth.socialaccount.models import SocialAccount
from privaterelay.plans import get_bundle_country_language_mapping
from privaterelay.utils import get_countries_info_from_lang_and_mapping
from .apps import s3_client, ses_client
metrics = markus.get_metrics("fx-private-relay")
def gauge_if_enabled(name, value, tags=None):
if settings.STATSD_ENABLED:
metrics.gauge(name, value, tags) | null |
154,445 | from __future__ import annotations
import base64
import contextlib
from email.errors import InvalidHeaderDefect
from email.headerregistry import Address, AddressHeader
from email.message import EmailMessage
from email.utils import formataddr, parseaddr
from functools import cache
from typing import cast, Any, Callable, TypeVar
import json
import pathlib
import re
from django.template.loader import render_to_string
from django.utils.text import Truncator
import requests
from botocore.exceptions import ClientError
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.kdf.hkdf import HKDFExpand
from mypy_boto3_ses.type_defs import ContentTypeDef, SendRawEmailResponseTypeDef
import jwcrypto.jwe
import jwcrypto.jwk
import markus
import logging
from urllib.parse import quote_plus, urlparse
from django.conf import settings
from django.contrib.auth.models import Group, User
from django.template.defaultfilters import linebreaksbr, urlize
from allauth.socialaccount.models import SocialAccount
from privaterelay.plans import get_bundle_country_language_mapping
from privaterelay.utils import get_countries_info_from_lang_and_mapping
from .apps import s3_client, ses_client
def get_email_domain_from_settings() -> str:
email_network_locality = str(urlparse(settings.SITE_ORIGIN).netloc)
# on dev server we need to add "mail" prefix
# because we can’t publish MX records on Heroku
if settings.RELAY_CHANNEL == "dev":
email_network_locality = f"mail.{email_network_locality}"
return email_network_locality | null |
154,446 | from django import template
from django.template.defaultfilters import stringfilter
from django.utils.html import conditional_escape
from django.utils.safestring import mark_safe, SafeString
def bold_violet_text(text):
return (
'<span style="font-family: sans-serif; font-weight: bolder; color: #20123a;'
f' text-decoration: none; font-size: 13px;">{text}</span>'
) | null |
154,447 | from django import template
from django.template.defaultfilters import stringfilter
from django.utils.html import conditional_escape
from django.utils.safestring import mark_safe, SafeString
def bold_violet_link(href, link_text):
return (
f'<a href="{href}" target="_blank" style="font-family: sans-serif;'
" color: #20123a; text-decoration: underline; font-weight: bolder;"
f' font-size: 13px;">{link_text}</a>'
) | null |
154,448 | from django import template
from django.template.defaultfilters import stringfilter
from django.utils.html import conditional_escape
from django.utils.safestring import mark_safe, SafeString
The provided code snippet includes necessary dependencies for implementing the `convert_fsi_to_span` function. Write a Python function `def convert_fsi_to_span(text: str | SafeString, autoescape=True) -> str | SafeString` to solve the following problem:
Replace Fluent's unicode isolating characters with HTML markup. U+2068 is FIRST-STRONG ISOLATE (FSI), direction depends on content U+2069 is POP DIRECTIONAL ISOLATE (PDI), ends FSI and other isolates HTML equivalent is <span dir="auto">...</span> See: https://www.w3.org/International/questions/qa-bidi-unicode-controls
Here is the function:
def convert_fsi_to_span(text: str | SafeString, autoescape=True) -> str | SafeString:
"""
Replace Fluent's unicode isolating characters with HTML markup.
U+2068 is FIRST-STRONG ISOLATE (FSI), direction depends on content
U+2069 is POP DIRECTIONAL ISOLATE (PDI), ends FSI and other isolates
HTML equivalent is <span dir="auto">...</span>
See:
https://www.w3.org/International/questions/qa-bidi-unicode-controls
"""
try:
pre_fsi, after_fsi = text.split("\u2068", 1)
middle, post_pdi = after_fsi.split("\u2069", 1)
except ValueError:
# No FSI or POP DIRECTIONAL ISOLATE, or in wrong sequence
return text
if autoescape:
result = (
f"{conditional_escape(pre_fsi)}"
f'<span dir="auto">{conditional_escape(middle)}</span>'
f"{conditional_escape(post_pdi)}"
)
else:
result = f'{pre_fsi}<span dir="auto">{middle}</span>{post_pdi}'
return mark_safe(result) | Replace Fluent's unicode isolating characters with HTML markup. U+2068 is FIRST-STRONG ISOLATE (FSI), direction depends on content U+2069 is POP DIRECTIONAL ISOLATE (PDI), ends FSI and other isolates HTML equivalent is <span dir="auto">...</span> See: https://www.w3.org/International/questions/qa-bidi-unicode-controls |
154,449 | from __future__ import annotations
from collections import namedtuple
from datetime import datetime, timedelta, timezone
from hashlib import sha256
from typing import Iterable, Literal
import logging
import random
import re
import string
import uuid
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import BadRequest
from django.core.validators import MinLengthValidator
from django.db import models, transaction
from django.dispatch import receiver
from django.utils.translation.trans_real import (
parse_accept_lang_header,
get_supported_language_variant,
)
from allauth.socialaccount.models import SocialAccount
from rest_framework.authtoken.models import Token
from api.exceptions import ErrorContextType, RelayAPIException
from privaterelay.plans import get_premium_countries
from privaterelay.utils import (
AcceptLanguageError,
flag_is_active_in_task,
guess_country_from_accept_lang,
)
from .apps import emails_config
from .utils import get_domains_from_settings, incr_if_enabled
def default_server_storage():
return True | null |
154,450 | from __future__ import annotations
from collections import namedtuple
from datetime import datetime, timedelta, timezone
from hashlib import sha256
from typing import Iterable, Literal
import logging
import random
import re
import string
import uuid
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import BadRequest
from django.core.validators import MinLengthValidator
from django.db import models, transaction
from django.dispatch import receiver
from django.utils.translation.trans_real import (
parse_accept_lang_header,
get_supported_language_variant,
)
from allauth.socialaccount.models import SocialAccount
from rest_framework.authtoken.models import Token
from api.exceptions import ErrorContextType, RelayAPIException
from privaterelay.plans import get_premium_countries
from privaterelay.utils import (
AcceptLanguageError,
flag_is_active_in_task,
guess_country_from_accept_lang,
)
from .apps import emails_config
from .utils import get_domains_from_settings, incr_if_enabled
def get_domain_numerical(domain_address):
# get domain name from the address
domains = get_domains_from_settings()
domains_keys = list(domains.keys())
domains_values = list(domains.values())
domain_name = domains_keys[domains_values.index(domain_address)]
# get domain numerical value from domain name
choices = dict(DOMAIN_CHOICES)
choices_keys = list(choices.keys())
choices_values = list(choices.values())
return choices_keys[choices_values.index(domain_name)]
def get_domains_from_settings():
# HACK: detect if code is running in django tests
if "testserver" in settings.ALLOWED_HOSTS:
return {"RELAY_FIREFOX_DOMAIN": "default.com", "MOZMAIL_DOMAIN": "test.com"}
return {
"RELAY_FIREFOX_DOMAIN": settings.RELAY_FIREFOX_DOMAIN,
"MOZMAIL_DOMAIN": settings.MOZMAIL_DOMAIN,
}
def default_domain_numerical():
domains = get_domains_from_settings()
domain = domains["MOZMAIL_DOMAIN"]
return get_domain_numerical(domain) | null |
154,451 | from __future__ import annotations
from collections import namedtuple
from datetime import datetime, timedelta, timezone
from hashlib import sha256
from typing import Iterable, Literal
import logging
import random
import re
import string
import uuid
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import BadRequest
from django.core.validators import MinLengthValidator
from django.db import models, transaction
from django.dispatch import receiver
from django.utils.translation.trans_real import (
parse_accept_lang_header,
get_supported_language_variant,
)
from allauth.socialaccount.models import SocialAccount
from rest_framework.authtoken.models import Token
from api.exceptions import ErrorContextType, RelayAPIException
from privaterelay.plans import get_premium_countries
from privaterelay.utils import (
AcceptLanguageError,
flag_is_active_in_task,
guess_country_from_accept_lang,
)
from .apps import emails_config
from .utils import get_domains_from_settings, incr_if_enabled
def copy_auth_token(sender, instance=None, created=False, **kwargs):
if created:
# baker triggers created during tests
# so first check the user doesn't already have a Token
try:
Token.objects.get(user=instance.user)
return
except Token.DoesNotExist:
Token.objects.create(user=instance.user, key=instance.api_token) | null |
154,452 | from __future__ import annotations
from collections import namedtuple
from datetime import datetime, timedelta, timezone
from hashlib import sha256
from typing import Iterable, Literal
import logging
import random
import re
import string
import uuid
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import BadRequest
from django.core.validators import MinLengthValidator
from django.db import models, transaction
from django.dispatch import receiver
from django.utils.translation.trans_real import (
parse_accept_lang_header,
get_supported_language_variant,
)
from allauth.socialaccount.models import SocialAccount
from rest_framework.authtoken.models import Token
from api.exceptions import ErrorContextType, RelayAPIException
from privaterelay.plans import get_premium_countries
from privaterelay.utils import (
AcceptLanguageError,
flag_is_active_in_task,
guess_country_from_accept_lang,
)
from .apps import emails_config
from .utils import get_domains_from_settings, incr_if_enabled
def address_default():
return "".join(random.choices(string.ascii_lowercase + string.digits, k=9)) | null |
154,453 | from __future__ import annotations
from collections import namedtuple
from datetime import datetime, timedelta, timezone
from hashlib import sha256
from typing import Iterable, Literal
import logging
import random
import re
import string
import uuid
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import BadRequest
from django.core.validators import MinLengthValidator
from django.db import models, transaction
from django.dispatch import receiver
from django.utils.translation.trans_real import (
parse_accept_lang_header,
get_supported_language_variant,
)
from allauth.socialaccount.models import SocialAccount
from rest_framework.authtoken.models import Token
from api.exceptions import ErrorContextType, RelayAPIException
from privaterelay.plans import get_premium_countries
from privaterelay.utils import (
AcceptLanguageError,
flag_is_active_in_task,
guess_country_from_accept_lang,
)
from .apps import emails_config
from .utils import get_domains_from_settings, incr_if_enabled
class Profile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
api_token = models.UUIDField(default=uuid.uuid4)
num_address_deleted = models.PositiveIntegerField(default=0)
date_subscribed = models.DateTimeField(blank=True, null=True)
date_subscribed_phone = models.DateTimeField(blank=True, null=True)
# TODO MPP-2972: delete date_phone_subscription_checked in favor of
# date_phone_subscription_next_reset
date_phone_subscription_checked = models.DateTimeField(blank=True, null=True)
date_phone_subscription_start = models.DateTimeField(blank=True, null=True)
date_phone_subscription_reset = models.DateTimeField(blank=True, null=True)
date_phone_subscription_end = models.DateTimeField(blank=True, null=True)
address_last_deleted = models.DateTimeField(blank=True, null=True, db_index=True)
last_soft_bounce = models.DateTimeField(blank=True, null=True, db_index=True)
last_hard_bounce = models.DateTimeField(blank=True, null=True, db_index=True)
last_account_flagged = models.DateTimeField(blank=True, null=True, db_index=True)
num_deleted_relay_addresses = models.PositiveIntegerField(default=0)
num_deleted_domain_addresses = models.PositiveIntegerField(default=0)
num_email_forwarded_in_deleted_address = models.PositiveIntegerField(default=0)
num_email_blocked_in_deleted_address = models.PositiveIntegerField(default=0)
num_level_one_trackers_blocked_in_deleted_address = models.PositiveIntegerField(
default=0, null=True
)
num_email_replied_in_deleted_address = models.PositiveIntegerField(default=0)
num_email_spam_in_deleted_address = models.PositiveIntegerField(default=0)
subdomain = models.CharField(
blank=True,
null=True,
unique=True,
max_length=63,
db_index=True,
validators=[valid_available_subdomain],
)
# Whether we store the user's alias labels in the server
server_storage = models.BooleanField(default=True)
# Whether we store the caller/sender log for the user's relay number
store_phone_log = models.BooleanField(default=True)
# TODO: Data migration to set null to false
# TODO: Schema migration to remove null=True
remove_level_one_email_trackers = models.BooleanField(null=True, default=False)
onboarding_state = models.PositiveIntegerField(default=0)
onboarding_free_state = models.PositiveIntegerField(default=0)
auto_block_spam = models.BooleanField(default=False)
forwarded_first_reply = models.BooleanField(default=False)
# Empty string means the profile was created through relying party flow
created_by = models.CharField(blank=True, null=True, max_length=63)
sent_welcome_email = models.BooleanField(default=False)
last_engagement = models.DateTimeField(blank=True, null=True, db_index=True)
def __str__(self):
return "%s Profile" % self.user
def save(
self,
force_insert: bool = False,
force_update: bool = False,
using: str | None = None,
update_fields: Iterable[str] | None = None,
) -> None:
# always lower-case the subdomain before saving it
# TODO: change subdomain field as a custom field inheriting from
# CharField to validate constraints on the field update too
if self.subdomain and not self.subdomain.islower():
self.subdomain = self.subdomain.lower()
if update_fields is not None:
update_fields = {"subdomain"}.union(update_fields)
super().save(
force_insert=force_insert,
force_update=force_update,
using=using,
update_fields=update_fields,
)
# any time a profile is saved with server_storage False, delete the
# appropriate server-stored Relay address data.
if not self.server_storage:
relay_addresses = RelayAddress.objects.filter(user=self.user)
relay_addresses.update(description="", generated_for="", used_on="")
domain_addresses = DomainAddress.objects.filter(user=self.user)
domain_addresses.update(description="", used_on="")
if settings.PHONES_ENABLED:
# any time a profile is saved with store_phone_log False, delete the
# appropriate server-stored InboundContact records
from phones.models import InboundContact, RelayNumber
if not self.store_phone_log:
try:
relay_number = RelayNumber.objects.get(user=self.user)
InboundContact.objects.filter(relay_number=relay_number).delete()
except RelayNumber.DoesNotExist:
pass
def language(self):
if self.fxa and self.fxa.extra_data.get("locale"):
for accept_lang, _ in parse_accept_lang_header(
self.fxa.extra_data.get("locale")
):
try:
return get_supported_language_variant(accept_lang)
except LookupError:
continue
return "en"
# This method returns whether the locale associated with the user's Mozilla account
# includes a country code from a Premium country. This is less accurate than using
# get_countries_info_from_request_and_mapping(), which can use a GeoIP lookup, so
# prefer using that if a request context is available. In other contexts, for
# example when sending an email, this method can be useful.
def fxa_locale_in_premium_country(self) -> bool:
if self.fxa and self.fxa.extra_data.get("locale"):
try:
country = guess_country_from_accept_lang(self.fxa.extra_data["locale"])
except AcceptLanguageError:
return False
premium_countries = get_premium_countries()
if country in premium_countries:
return True
return False
def avatar(self) -> str | None:
if fxa := self.fxa:
return str(fxa.extra_data.get("avatar"))
return None
def relay_addresses(self):
return RelayAddress.objects.filter(user=self.user)
def domain_addresses(self):
return DomainAddress.objects.filter(user=self.user)
def total_masks(self) -> int:
ra_count: int = self.relay_addresses.count()
da_count: int = self.domain_addresses.count()
return ra_count + da_count
def at_mask_limit(self) -> bool:
if self.has_premium:
return False
ra_count: int = self.relay_addresses.count()
return ra_count >= settings.MAX_NUM_FREE_ALIASES
def check_bounce_pause(self):
if self.last_hard_bounce:
last_hard_bounce_allowed = datetime.now(timezone.utc) - timedelta(
days=settings.HARD_BOUNCE_ALLOWED_DAYS
)
if self.last_hard_bounce > last_hard_bounce_allowed:
return BounceStatus(True, "hard")
self.last_hard_bounce = None
self.save()
if self.last_soft_bounce:
last_soft_bounce_allowed = datetime.now(timezone.utc) - timedelta(
days=settings.SOFT_BOUNCE_ALLOWED_DAYS
)
if self.last_soft_bounce > last_soft_bounce_allowed:
return BounceStatus(True, "soft")
self.last_soft_bounce = None
self.save()
return BounceStatus(False, "")
def bounce_status(self):
return self.check_bounce_pause()
def next_email_try(self):
bounce_pause, bounce_type = self.check_bounce_pause()
if not bounce_pause:
return datetime.now(timezone.utc)
if bounce_type == "soft":
assert self.last_soft_bounce
return self.last_soft_bounce + timedelta(
days=settings.SOFT_BOUNCE_ALLOWED_DAYS
)
assert bounce_type == "hard"
assert self.last_hard_bounce
return self.last_hard_bounce + timedelta(days=settings.HARD_BOUNCE_ALLOWED_DAYS)
def last_bounce_date(self):
if self.last_hard_bounce:
return self.last_hard_bounce
if self.last_soft_bounce:
return self.last_soft_bounce
return None
def at_max_free_aliases(self) -> bool:
relay_addresses_count: int = self.relay_addresses.count()
return relay_addresses_count >= settings.MAX_NUM_FREE_ALIASES
def fxa(self) -> SocialAccount | None:
# Note: we are NOT using .filter() here because it invalidates
# any profile instances that were queried with prefetch_related, which
# we use in at least the profile view to minimize queries
assert hasattr(self.user, "socialaccount_set")
for sa in self.user.socialaccount_set.all():
if sa.provider == "fxa":
return sa
return None
def display_name(self) -> str | None:
# if display name is not set on FxA the
# displayName key will not exist on the extra_data
if fxa := self.fxa:
name = fxa.extra_data.get("displayName")
return name if name is None else str(name)
return None
def custom_domain(self) -> str:
assert self.subdomain
return f"@{self.subdomain}.{settings.MOZMAIL_DOMAIN}"
def has_premium(self) -> bool:
# FIXME: as we don't have all the tiers defined we are over-defining
# this to mark the user as a premium user as well
if not self.fxa:
return False
for premium_domain in PREMIUM_DOMAINS:
if self.user.email.endswith(f"@{premium_domain}"):
return True
user_subscriptions = self.fxa.extra_data.get("subscriptions", [])
for sub in settings.SUBSCRIPTIONS_WITH_UNLIMITED:
if sub in user_subscriptions:
return True
return False
def has_phone(self) -> bool:
if not self.fxa:
return False
if settings.RELAY_CHANNEL != "prod" and not settings.IN_PYTEST:
if not flag_is_active_in_task("phones", self.user):
return False
if flag_is_active_in_task("free_phones", self.user):
return True
user_subscriptions = self.fxa.extra_data.get("subscriptions", [])
for sub in settings.SUBSCRIPTIONS_WITH_PHONE:
if sub in user_subscriptions:
return True
return False
def has_vpn(self):
if not self.fxa:
return False
user_subscriptions = self.fxa.extra_data.get("subscriptions", [])
for sub in settings.SUBSCRIPTIONS_WITH_VPN:
if sub in user_subscriptions:
return True
return False
def emails_forwarded(self):
return (
sum(ra.num_forwarded for ra in self.relay_addresses)
+ sum(da.num_forwarded for da in self.domain_addresses)
+ self.num_email_forwarded_in_deleted_address
)
def emails_blocked(self):
return (
sum(ra.num_blocked for ra in self.relay_addresses)
+ sum(da.num_blocked for da in self.domain_addresses)
+ self.num_email_blocked_in_deleted_address
)
def emails_replied(self):
# Once Django is on version 4.0 and above, we can set the default=0
# and return a int instead of None
# https://docs.djangoproject.com/en/4.0/ref/models/querysets/#default
totals = [self.relay_addresses.aggregate(models.Sum("num_replied"))]
totals.append(self.domain_addresses.aggregate(models.Sum("num_replied")))
total_num_replied = 0
for num in totals:
total_num_replied += (
num.get("num_replied__sum") if num.get("num_replied__sum") else 0
)
return total_num_replied + self.num_email_replied_in_deleted_address
def level_one_trackers_blocked(self):
return (
sum(ra.num_level_one_trackers_blocked or 0 for ra in self.relay_addresses)
+ sum(
da.num_level_one_trackers_blocked or 0 for da in self.domain_addresses
)
+ (self.num_level_one_trackers_blocked_in_deleted_address or 0)
)
def joined_before_premium_release(self):
date_created = self.user.date_joined
return date_created < datetime.fromisoformat("2021-10-22 17:00:00+00:00")
def date_phone_registered(self) -> datetime | None:
if not settings.PHONES_ENABLED:
return None
try:
real_phone = RealPhone.objects.get(user=self.user)
relay_number = RelayNumber.objects.get(user=self.user)
except RealPhone.DoesNotExist:
return None
except RelayNumber.DoesNotExist:
return real_phone.verified_date
return relay_number.created_at or real_phone.verified_date
def add_subdomain(self, subdomain):
# Handles if the subdomain is "" or None
if not subdomain:
raise CannotMakeSubdomainException(
"error-subdomain-cannot-be-empty-or-null"
)
# subdomain must be all lowercase
subdomain = subdomain.lower()
if not self.has_premium:
raise CannotMakeSubdomainException("error-premium-set-subdomain")
if self.subdomain is not None:
raise CannotMakeSubdomainException("error-premium-cannot-change-subdomain")
self.subdomain = subdomain
# The validator defined in the subdomain field does not get run in full_clean()
# when self.subdomain is "" or None, so we need to run the validator again to
# catch these cases.
valid_available_subdomain(subdomain)
self.full_clean()
self.save()
RegisteredSubdomain.objects.create(subdomain_hash=hash_subdomain(subdomain))
return subdomain
def update_abuse_metric(
self,
address_created=False,
replied=False,
email_forwarded=False,
forwarded_email_size=0,
) -> datetime | None:
# TODO MPP-3720: This should be wrapped in atomic or select_for_update to ensure
# race conditions are properly handled.
# look for abuse metrics created on the same UTC date, regardless of time.
midnight_utc_today = datetime.combine(
datetime.now(timezone.utc).date(), datetime.min.time()
).astimezone(timezone.utc)
midnight_utc_tomorow = midnight_utc_today + timedelta(days=1)
abuse_metric = self.user.abusemetrics_set.filter(
first_recorded__gte=midnight_utc_today,
first_recorded__lt=midnight_utc_tomorow,
).first()
if not abuse_metric:
abuse_metric = AbuseMetrics.objects.create(user=self.user)
AbuseMetrics.objects.filter(first_recorded__lt=midnight_utc_today).delete()
# increment the abuse metric
if address_created:
abuse_metric.num_address_created_per_day += 1
if replied:
abuse_metric.num_replies_per_day += 1
if email_forwarded:
abuse_metric.num_email_forwarded_per_day += 1
if forwarded_email_size > 0:
abuse_metric.forwarded_email_size_per_day += forwarded_email_size
abuse_metric.last_recorded = datetime.now(timezone.utc)
abuse_metric.save()
# check user should be flagged for abuse
hit_max_create = False
hit_max_replies = False
hit_max_forwarded = False
hit_max_forwarded_email_size = False
hit_max_create = (
abuse_metric.num_address_created_per_day
>= settings.MAX_ADDRESS_CREATION_PER_DAY
)
hit_max_replies = (
abuse_metric.num_replies_per_day >= settings.MAX_REPLIES_PER_DAY
)
hit_max_forwarded = (
abuse_metric.num_email_forwarded_per_day >= settings.MAX_FORWARDED_PER_DAY
)
hit_max_forwarded_email_size = (
abuse_metric.forwarded_email_size_per_day
>= settings.MAX_FORWARDED_EMAIL_SIZE_PER_DAY
)
if (
hit_max_create
or hit_max_replies
or hit_max_forwarded
or hit_max_forwarded_email_size
):
self.last_account_flagged = datetime.now(timezone.utc)
self.save()
data = {
"uid": self.fxa.uid if self.fxa else None,
"flagged": self.last_account_flagged.timestamp(),
"replies": abuse_metric.num_replies_per_day,
"addresses": abuse_metric.num_address_created_per_day,
"forwarded": abuse_metric.num_email_forwarded_per_day,
"forwarded_size_in_bytes": abuse_metric.forwarded_email_size_per_day,
}
# log for further secops review
abuse_logger.info("Abuse flagged", extra=data)
return self.last_account_flagged
def is_flagged(self):
if not self.last_account_flagged:
return False
account_premium_feature_resumed = self.last_account_flagged + timedelta(
days=settings.PREMIUM_FEATURE_PAUSED_DAYS
)
if datetime.now(timezone.utc) > account_premium_feature_resumed:
# premium feature has been resumed
return False
# user was flagged and the premium feature pause period is not yet over
return True
def metrics_enabled(self) -> bool:
"""
Does the user allow us to record technical and interaction data?
This is based on the Mozilla accounts opt-out option, added around 2022. A user
can go to their Mozilla account profile settings, Data Collection and Use, and
deselect "Help improve Mozilla Account". This setting defaults to On, and is
sent as "metricsEnabled". Some older Relay accounts do not have
"metricsEnabled", and we default to On.
"""
if self.fxa:
return bool(self.fxa.extra_data.get("metricsEnabled", True))
return True
def plan(self) -> Literal["free", "email", "phone", "bundle"]:
"""The user's Relay plan as a string."""
if self.has_premium:
if self.has_phone:
return "bundle" if self.has_vpn else "phone"
else:
return "email"
else:
return "free"
def plan_term(self) -> Literal[None, "unknown", "1_month", "1_year"]:
"""The user's Relay plan term as a string."""
plan = self.plan
if plan == "free":
return None
if plan == "phone":
start_date = self.date_phone_subscription_start
end_date = self.date_phone_subscription_end
if start_date and end_date:
span = end_date - start_date
return "1_year" if span.days > 32 else "1_month"
return "unknown"
def metrics_premium_status(self) -> str:
plan = self.plan
if plan == "free":
return "free"
return f"{plan}_{self.plan_term}"
class AccountIsPausedException(CannotMakeAddressException):
default_code = "account_is_paused"
default_detail = "Your account is on pause."
status_code = 403
class RelayAddrFreeTierLimitException(CannotMakeAddressException):
default_code = "free_tier_limit"
default_detail_template = (
"You’ve used all {free_tier_limit} email masks included with your free account."
" You can reuse an existing mask, but using a unique mask for each account is"
" the most secure option."
)
status_code = 403
def __init__(self, free_tier_limit: int | None = None, *args, **kwargs):
self.free_tier_limit = free_tier_limit or settings.MAX_NUM_FREE_ALIASES
super().__init__(*args, **kwargs)
def error_context(self) -> ErrorContextType:
return {"free_tier_limit": self.free_tier_limit}
def check_user_can_make_another_address(profile: Profile) -> None:
if profile.is_flagged:
raise AccountIsPausedException()
# MPP-3021: return early for premium users to avoid at_max_free_aliases DB query
if profile.has_premium:
return
if profile.at_max_free_aliases:
raise RelayAddrFreeTierLimitException() | null |
154,454 | from __future__ import annotations
from collections import namedtuple
from datetime import datetime, timedelta, timezone
from hashlib import sha256
from typing import Iterable, Literal
import logging
import random
import re
import string
import uuid
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import BadRequest
from django.core.validators import MinLengthValidator
from django.db import models, transaction
from django.dispatch import receiver
from django.utils.translation.trans_real import (
parse_accept_lang_header,
get_supported_language_variant,
)
from allauth.socialaccount.models import SocialAccount
from rest_framework.authtoken.models import Token
from api.exceptions import ErrorContextType, RelayAPIException
from privaterelay.plans import get_premium_countries
from privaterelay.utils import (
AcceptLanguageError,
flag_is_active_in_task,
guess_country_from_accept_lang,
)
from .apps import emails_config
from .utils import get_domains_from_settings, incr_if_enabled
def address_hash(address, subdomain=None, domain=None):
def has_bad_words(value) -> bool:
def valid_address_pattern(address):
class DeletedAddress(models.Model):
def __str__(self):
def flag_is_active_in_task(flag_name: str, user: AbstractBaseUser | None) -> bool:
def valid_address(address: str, domain: str, subdomain: str | None = None) -> bool:
address_pattern_valid = valid_address_pattern(address)
address_contains_badword = has_bad_words(address)
address_already_deleted = 0
if not subdomain or flag_is_active_in_task(
"custom_domain_management_redesign", None
):
address_already_deleted = DeletedAddress.objects.filter(
address_hash=address_hash(address, domain=domain, subdomain=subdomain)
).count()
if (
address_already_deleted > 0
or address_contains_badword
or not address_pattern_valid
):
return False
return True | null |
154,455 | from __future__ import annotations
from collections import namedtuple
from datetime import datetime, timedelta, timezone
from hashlib import sha256
from typing import Iterable, Literal
import logging
import random
import re
import string
import uuid
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import BadRequest
from django.core.validators import MinLengthValidator
from django.db import models, transaction
from django.dispatch import receiver
from django.utils.translation.trans_real import (
parse_accept_lang_header,
get_supported_language_variant,
)
from allauth.socialaccount.models import SocialAccount
from rest_framework.authtoken.models import Token
from api.exceptions import ErrorContextType, RelayAPIException
from privaterelay.plans import get_premium_countries
from privaterelay.utils import (
AcceptLanguageError,
flag_is_active_in_task,
guess_country_from_accept_lang,
)
from .apps import emails_config
from .utils import get_domains_from_settings, incr_if_enabled
class Profile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
api_token = models.UUIDField(default=uuid.uuid4)
num_address_deleted = models.PositiveIntegerField(default=0)
date_subscribed = models.DateTimeField(blank=True, null=True)
date_subscribed_phone = models.DateTimeField(blank=True, null=True)
# TODO MPP-2972: delete date_phone_subscription_checked in favor of
# date_phone_subscription_next_reset
date_phone_subscription_checked = models.DateTimeField(blank=True, null=True)
date_phone_subscription_start = models.DateTimeField(blank=True, null=True)
date_phone_subscription_reset = models.DateTimeField(blank=True, null=True)
date_phone_subscription_end = models.DateTimeField(blank=True, null=True)
address_last_deleted = models.DateTimeField(blank=True, null=True, db_index=True)
last_soft_bounce = models.DateTimeField(blank=True, null=True, db_index=True)
last_hard_bounce = models.DateTimeField(blank=True, null=True, db_index=True)
last_account_flagged = models.DateTimeField(blank=True, null=True, db_index=True)
num_deleted_relay_addresses = models.PositiveIntegerField(default=0)
num_deleted_domain_addresses = models.PositiveIntegerField(default=0)
num_email_forwarded_in_deleted_address = models.PositiveIntegerField(default=0)
num_email_blocked_in_deleted_address = models.PositiveIntegerField(default=0)
num_level_one_trackers_blocked_in_deleted_address = models.PositiveIntegerField(
default=0, null=True
)
num_email_replied_in_deleted_address = models.PositiveIntegerField(default=0)
num_email_spam_in_deleted_address = models.PositiveIntegerField(default=0)
subdomain = models.CharField(
blank=True,
null=True,
unique=True,
max_length=63,
db_index=True,
validators=[valid_available_subdomain],
)
# Whether we store the user's alias labels in the server
server_storage = models.BooleanField(default=True)
# Whether we store the caller/sender log for the user's relay number
store_phone_log = models.BooleanField(default=True)
# TODO: Data migration to set null to false
# TODO: Schema migration to remove null=True
remove_level_one_email_trackers = models.BooleanField(null=True, default=False)
onboarding_state = models.PositiveIntegerField(default=0)
onboarding_free_state = models.PositiveIntegerField(default=0)
auto_block_spam = models.BooleanField(default=False)
forwarded_first_reply = models.BooleanField(default=False)
# Empty string means the profile was created through relying party flow
created_by = models.CharField(blank=True, null=True, max_length=63)
sent_welcome_email = models.BooleanField(default=False)
last_engagement = models.DateTimeField(blank=True, null=True, db_index=True)
def __str__(self):
return "%s Profile" % self.user
def save(
self,
force_insert: bool = False,
force_update: bool = False,
using: str | None = None,
update_fields: Iterable[str] | None = None,
) -> None:
# always lower-case the subdomain before saving it
# TODO: change subdomain field as a custom field inheriting from
# CharField to validate constraints on the field update too
if self.subdomain and not self.subdomain.islower():
self.subdomain = self.subdomain.lower()
if update_fields is not None:
update_fields = {"subdomain"}.union(update_fields)
super().save(
force_insert=force_insert,
force_update=force_update,
using=using,
update_fields=update_fields,
)
# any time a profile is saved with server_storage False, delete the
# appropriate server-stored Relay address data.
if not self.server_storage:
relay_addresses = RelayAddress.objects.filter(user=self.user)
relay_addresses.update(description="", generated_for="", used_on="")
domain_addresses = DomainAddress.objects.filter(user=self.user)
domain_addresses.update(description="", used_on="")
if settings.PHONES_ENABLED:
# any time a profile is saved with store_phone_log False, delete the
# appropriate server-stored InboundContact records
from phones.models import InboundContact, RelayNumber
if not self.store_phone_log:
try:
relay_number = RelayNumber.objects.get(user=self.user)
InboundContact.objects.filter(relay_number=relay_number).delete()
except RelayNumber.DoesNotExist:
pass
def language(self):
if self.fxa and self.fxa.extra_data.get("locale"):
for accept_lang, _ in parse_accept_lang_header(
self.fxa.extra_data.get("locale")
):
try:
return get_supported_language_variant(accept_lang)
except LookupError:
continue
return "en"
# This method returns whether the locale associated with the user's Mozilla account
# includes a country code from a Premium country. This is less accurate than using
# get_countries_info_from_request_and_mapping(), which can use a GeoIP lookup, so
# prefer using that if a request context is available. In other contexts, for
# example when sending an email, this method can be useful.
def fxa_locale_in_premium_country(self) -> bool:
if self.fxa and self.fxa.extra_data.get("locale"):
try:
country = guess_country_from_accept_lang(self.fxa.extra_data["locale"])
except AcceptLanguageError:
return False
premium_countries = get_premium_countries()
if country in premium_countries:
return True
return False
def avatar(self) -> str | None:
if fxa := self.fxa:
return str(fxa.extra_data.get("avatar"))
return None
def relay_addresses(self):
return RelayAddress.objects.filter(user=self.user)
def domain_addresses(self):
return DomainAddress.objects.filter(user=self.user)
def total_masks(self) -> int:
ra_count: int = self.relay_addresses.count()
da_count: int = self.domain_addresses.count()
return ra_count + da_count
def at_mask_limit(self) -> bool:
if self.has_premium:
return False
ra_count: int = self.relay_addresses.count()
return ra_count >= settings.MAX_NUM_FREE_ALIASES
def check_bounce_pause(self):
if self.last_hard_bounce:
last_hard_bounce_allowed = datetime.now(timezone.utc) - timedelta(
days=settings.HARD_BOUNCE_ALLOWED_DAYS
)
if self.last_hard_bounce > last_hard_bounce_allowed:
return BounceStatus(True, "hard")
self.last_hard_bounce = None
self.save()
if self.last_soft_bounce:
last_soft_bounce_allowed = datetime.now(timezone.utc) - timedelta(
days=settings.SOFT_BOUNCE_ALLOWED_DAYS
)
if self.last_soft_bounce > last_soft_bounce_allowed:
return BounceStatus(True, "soft")
self.last_soft_bounce = None
self.save()
return BounceStatus(False, "")
def bounce_status(self):
return self.check_bounce_pause()
def next_email_try(self):
bounce_pause, bounce_type = self.check_bounce_pause()
if not bounce_pause:
return datetime.now(timezone.utc)
if bounce_type == "soft":
assert self.last_soft_bounce
return self.last_soft_bounce + timedelta(
days=settings.SOFT_BOUNCE_ALLOWED_DAYS
)
assert bounce_type == "hard"
assert self.last_hard_bounce
return self.last_hard_bounce + timedelta(days=settings.HARD_BOUNCE_ALLOWED_DAYS)
def last_bounce_date(self):
if self.last_hard_bounce:
return self.last_hard_bounce
if self.last_soft_bounce:
return self.last_soft_bounce
return None
def at_max_free_aliases(self) -> bool:
relay_addresses_count: int = self.relay_addresses.count()
return relay_addresses_count >= settings.MAX_NUM_FREE_ALIASES
def fxa(self) -> SocialAccount | None:
# Note: we are NOT using .filter() here because it invalidates
# any profile instances that were queried with prefetch_related, which
# we use in at least the profile view to minimize queries
assert hasattr(self.user, "socialaccount_set")
for sa in self.user.socialaccount_set.all():
if sa.provider == "fxa":
return sa
return None
def display_name(self) -> str | None:
# if display name is not set on FxA the
# displayName key will not exist on the extra_data
if fxa := self.fxa:
name = fxa.extra_data.get("displayName")
return name if name is None else str(name)
return None
def custom_domain(self) -> str:
assert self.subdomain
return f"@{self.subdomain}.{settings.MOZMAIL_DOMAIN}"
def has_premium(self) -> bool:
# FIXME: as we don't have all the tiers defined we are over-defining
# this to mark the user as a premium user as well
if not self.fxa:
return False
for premium_domain in PREMIUM_DOMAINS:
if self.user.email.endswith(f"@{premium_domain}"):
return True
user_subscriptions = self.fxa.extra_data.get("subscriptions", [])
for sub in settings.SUBSCRIPTIONS_WITH_UNLIMITED:
if sub in user_subscriptions:
return True
return False
def has_phone(self) -> bool:
if not self.fxa:
return False
if settings.RELAY_CHANNEL != "prod" and not settings.IN_PYTEST:
if not flag_is_active_in_task("phones", self.user):
return False
if flag_is_active_in_task("free_phones", self.user):
return True
user_subscriptions = self.fxa.extra_data.get("subscriptions", [])
for sub in settings.SUBSCRIPTIONS_WITH_PHONE:
if sub in user_subscriptions:
return True
return False
def has_vpn(self):
if not self.fxa:
return False
user_subscriptions = self.fxa.extra_data.get("subscriptions", [])
for sub in settings.SUBSCRIPTIONS_WITH_VPN:
if sub in user_subscriptions:
return True
return False
def emails_forwarded(self):
return (
sum(ra.num_forwarded for ra in self.relay_addresses)
+ sum(da.num_forwarded for da in self.domain_addresses)
+ self.num_email_forwarded_in_deleted_address
)
def emails_blocked(self):
return (
sum(ra.num_blocked for ra in self.relay_addresses)
+ sum(da.num_blocked for da in self.domain_addresses)
+ self.num_email_blocked_in_deleted_address
)
def emails_replied(self):
# Once Django is on version 4.0 and above, we can set the default=0
# and return a int instead of None
# https://docs.djangoproject.com/en/4.0/ref/models/querysets/#default
totals = [self.relay_addresses.aggregate(models.Sum("num_replied"))]
totals.append(self.domain_addresses.aggregate(models.Sum("num_replied")))
total_num_replied = 0
for num in totals:
total_num_replied += (
num.get("num_replied__sum") if num.get("num_replied__sum") else 0
)
return total_num_replied + self.num_email_replied_in_deleted_address
def level_one_trackers_blocked(self):
return (
sum(ra.num_level_one_trackers_blocked or 0 for ra in self.relay_addresses)
+ sum(
da.num_level_one_trackers_blocked or 0 for da in self.domain_addresses
)
+ (self.num_level_one_trackers_blocked_in_deleted_address or 0)
)
def joined_before_premium_release(self):
date_created = self.user.date_joined
return date_created < datetime.fromisoformat("2021-10-22 17:00:00+00:00")
def date_phone_registered(self) -> datetime | None:
if not settings.PHONES_ENABLED:
return None
try:
real_phone = RealPhone.objects.get(user=self.user)
relay_number = RelayNumber.objects.get(user=self.user)
except RealPhone.DoesNotExist:
return None
except RelayNumber.DoesNotExist:
return real_phone.verified_date
return relay_number.created_at or real_phone.verified_date
def add_subdomain(self, subdomain):
# Handles if the subdomain is "" or None
if not subdomain:
raise CannotMakeSubdomainException(
"error-subdomain-cannot-be-empty-or-null"
)
# subdomain must be all lowercase
subdomain = subdomain.lower()
if not self.has_premium:
raise CannotMakeSubdomainException("error-premium-set-subdomain")
if self.subdomain is not None:
raise CannotMakeSubdomainException("error-premium-cannot-change-subdomain")
self.subdomain = subdomain
# The validator defined in the subdomain field does not get run in full_clean()
# when self.subdomain is "" or None, so we need to run the validator again to
# catch these cases.
valid_available_subdomain(subdomain)
self.full_clean()
self.save()
RegisteredSubdomain.objects.create(subdomain_hash=hash_subdomain(subdomain))
return subdomain
def update_abuse_metric(
self,
address_created=False,
replied=False,
email_forwarded=False,
forwarded_email_size=0,
) -> datetime | None:
# TODO MPP-3720: This should be wrapped in atomic or select_for_update to ensure
# race conditions are properly handled.
# look for abuse metrics created on the same UTC date, regardless of time.
midnight_utc_today = datetime.combine(
datetime.now(timezone.utc).date(), datetime.min.time()
).astimezone(timezone.utc)
midnight_utc_tomorow = midnight_utc_today + timedelta(days=1)
abuse_metric = self.user.abusemetrics_set.filter(
first_recorded__gte=midnight_utc_today,
first_recorded__lt=midnight_utc_tomorow,
).first()
if not abuse_metric:
abuse_metric = AbuseMetrics.objects.create(user=self.user)
AbuseMetrics.objects.filter(first_recorded__lt=midnight_utc_today).delete()
# increment the abuse metric
if address_created:
abuse_metric.num_address_created_per_day += 1
if replied:
abuse_metric.num_replies_per_day += 1
if email_forwarded:
abuse_metric.num_email_forwarded_per_day += 1
if forwarded_email_size > 0:
abuse_metric.forwarded_email_size_per_day += forwarded_email_size
abuse_metric.last_recorded = datetime.now(timezone.utc)
abuse_metric.save()
# check user should be flagged for abuse
hit_max_create = False
hit_max_replies = False
hit_max_forwarded = False
hit_max_forwarded_email_size = False
hit_max_create = (
abuse_metric.num_address_created_per_day
>= settings.MAX_ADDRESS_CREATION_PER_DAY
)
hit_max_replies = (
abuse_metric.num_replies_per_day >= settings.MAX_REPLIES_PER_DAY
)
hit_max_forwarded = (
abuse_metric.num_email_forwarded_per_day >= settings.MAX_FORWARDED_PER_DAY
)
hit_max_forwarded_email_size = (
abuse_metric.forwarded_email_size_per_day
>= settings.MAX_FORWARDED_EMAIL_SIZE_PER_DAY
)
if (
hit_max_create
or hit_max_replies
or hit_max_forwarded
or hit_max_forwarded_email_size
):
self.last_account_flagged = datetime.now(timezone.utc)
self.save()
data = {
"uid": self.fxa.uid if self.fxa else None,
"flagged": self.last_account_flagged.timestamp(),
"replies": abuse_metric.num_replies_per_day,
"addresses": abuse_metric.num_address_created_per_day,
"forwarded": abuse_metric.num_email_forwarded_per_day,
"forwarded_size_in_bytes": abuse_metric.forwarded_email_size_per_day,
}
# log for further secops review
abuse_logger.info("Abuse flagged", extra=data)
return self.last_account_flagged
def is_flagged(self):
if not self.last_account_flagged:
return False
account_premium_feature_resumed = self.last_account_flagged + timedelta(
days=settings.PREMIUM_FEATURE_PAUSED_DAYS
)
if datetime.now(timezone.utc) > account_premium_feature_resumed:
# premium feature has been resumed
return False
# user was flagged and the premium feature pause period is not yet over
return True
def metrics_enabled(self) -> bool:
"""
Does the user allow us to record technical and interaction data?
This is based on the Mozilla accounts opt-out option, added around 2022. A user
can go to their Mozilla account profile settings, Data Collection and Use, and
deselect "Help improve Mozilla Account". This setting defaults to On, and is
sent as "metricsEnabled". Some older Relay accounts do not have
"metricsEnabled", and we default to On.
"""
if self.fxa:
return bool(self.fxa.extra_data.get("metricsEnabled", True))
return True
def plan(self) -> Literal["free", "email", "phone", "bundle"]:
"""The user's Relay plan as a string."""
if self.has_premium:
if self.has_phone:
return "bundle" if self.has_vpn else "phone"
else:
return "email"
else:
return "free"
def plan_term(self) -> Literal[None, "unknown", "1_month", "1_year"]:
"""The user's Relay plan term as a string."""
plan = self.plan
if plan == "free":
return None
if plan == "phone":
start_date = self.date_phone_subscription_start
end_date = self.date_phone_subscription_end
if start_date and end_date:
span = end_date - start_date
return "1_year" if span.days > 32 else "1_month"
return "unknown"
def metrics_premium_status(self) -> str:
plan = self.plan
if plan == "free":
return "free"
return f"{plan}_{self.plan_term}"
class AccountIsPausedException(CannotMakeAddressException):
default_code = "account_is_paused"
default_detail = "Your account is on pause."
status_code = 403
class DomainAddrFreeTierException(CannotMakeAddressException):
default_code = "free_tier_no_subdomain_masks"
default_detail = (
"Your free account does not include custom subdomains for masks."
" To create custom masks, upgrade to Relay Premium."
)
status_code = 403
class DomainAddrNeedSubdomainException(CannotMakeAddressException):
default_code = "need_subdomain"
default_detail = "Please select a subdomain before creating a custom email address."
status_code = 400
def check_user_can_make_domain_address(user_profile: Profile) -> None:
if not user_profile.has_premium:
raise DomainAddrFreeTierException()
if not user_profile.subdomain:
raise DomainAddrNeedSubdomainException()
if user_profile.is_flagged:
raise AccountIsPausedException() | null |
154,456 | from django.db import migrations, models
The provided code snippet includes necessary dependencies for implementing the `add_db_default_forward_func` function. Write a Python function `def add_db_default_forward_func(apps, schema_editor)` to solve the following problem:
Add a database default of false for sent_welcome_email, for PostgreSQL and SQLite3 Note: set sent_welcome_email = true for existing users Using `./manage.py sqlmigrate` for the SQL, and the technique from: https://stackoverflow.com/a/45232678/10612
Here is the function:
def add_db_default_forward_func(apps, schema_editor):
"""
Add a database default of false for sent_welcome_email, for PostgreSQL and SQLite3
Note: set sent_welcome_email = true for existing users
Using `./manage.py sqlmigrate` for the SQL, and the technique from:
https://stackoverflow.com/a/45232678/10612
"""
if schema_editor.connection.vendor.startswith("postgres"):
schema_editor.execute(
'ALTER TABLE "emails_profile"'
' ALTER COLUMN "num_deleted_relay_addresses" SET DEFAULT 0,'
' ALTER COLUMN "num_deleted_domain_addresses" SET DEFAULT 0;'
)
elif schema_editor.connection.vendor.startswith("sqlite"):
schema_editor.execute(
"""
CREATE TABLE "new__emails_profile" (
"id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,
"num_deleted_relay_addresses" integer unsigned NOT NULL CHECK (
"num_deleted_relay_addresses" >= 0
) DEFAULT 0,
"num_deleted_domain_addresses" integer unsigned NOT NULL CHECK (
"num_deleted_domain_addresses" >= 0
) DEFAULT 0,
"api_token" char(32) NOT NULL,
"user_id" integer NOT NULL UNIQUE REFERENCES "auth_user" ("id") DEFERRABLE INITIALLY DEFERRED,
"address_last_deleted" datetime NULL,
"num_address_deleted" integer unsigned NOT NULL CHECK ("num_address_deleted" >= 0),
"last_hard_bounce" datetime NULL,
"last_soft_bounce" datetime NULL,
"subdomain" varchar(63) NULL UNIQUE,
"server_storage" bool NOT NULL,
"num_email_blocked_in_deleted_address" integer unsigned NOT NULL CHECK (
"num_email_blocked_in_deleted_address" >= 0
),
"num_email_forwarded_in_deleted_address" integer unsigned NOT NULL CHECK (
"num_email_forwarded_in_deleted_address" >= 0
),
"num_email_spam_in_deleted_address" integer unsigned NOT NULL CHECK (
"num_email_spam_in_deleted_address" >= 0
),
"onboarding_state" integer unsigned NOT NULL CHECK ("onboarding_state" >= 0),
"last_account_flagged" datetime NULL,
"date_subscribed" datetime NULL,
"auto_block_spam" bool NOT NULL,
"num_email_replied_in_deleted_address" integer unsigned NOT NULL CHECK (
"num_email_replied_in_deleted_address" >= 0
),
"remove_level_one_email_trackers" bool NULL,
"num_level_one_trackers_blocked_in_deleted_address" integer unsigned NULL CHECK (
"num_level_one_trackers_blocked_in_deleted_address" >= 0
),
"store_phone_log" bool NOT NULL,
"date_phone_subscription_checked" datetime NULL,
"date_subscribed_phone" datetime NULL,
"forwarded_first_reply" bool NOT NULL,
"date_phone_subscription_end" datetime NULL,
"date_phone_subscription_reset" datetime NULL,
"date_phone_subscription_start" datetime NULL,
"created_by" varchar(63) NULL,
"sent_welcome_email" bool NOT NULL,
"onboarding_free_state" integer unsigned NOT NULL CHECK ("onboarding_free_state" >= 0),
"last_engagement" datetime NULL
);
"""
)
schema_editor.execute(
"""
INSERT INTO "new__emails_profile" (
"id", "api_token", "user_id", "address_last_deleted",
"num_address_deleted", "last_hard_bounce",
"last_soft_bounce", "subdomain",
"server_storage", "num_email_blocked_in_deleted_address",
"num_email_forwarded_in_deleted_address",
"num_email_spam_in_deleted_address",
"onboarding_state", "last_account_flagged",
"date_subscribed", "auto_block_spam",
"num_email_replied_in_deleted_address",
"remove_level_one_email_trackers",
"num_level_one_trackers_blocked_in_deleted_address",
"store_phone_log", "date_phone_subscription_checked",
"date_subscribed_phone", "forwarded_first_reply",
"date_phone_subscription_end",
"date_phone_subscription_reset",
"date_phone_subscription_start",
"created_by", "sent_welcome_email",
"onboarding_free_state", "last_engagement",
"num_deleted_domain_addresses",
"num_deleted_relay_addresses"
)
SELECT
"id",
"api_token",
"user_id",
"address_last_deleted",
"num_address_deleted",
"last_hard_bounce",
"last_soft_bounce",
"subdomain",
"server_storage",
"num_email_blocked_in_deleted_address",
"num_email_forwarded_in_deleted_address",
"num_email_spam_in_deleted_address",
"onboarding_state",
"last_account_flagged",
"date_subscribed",
"auto_block_spam",
"num_email_replied_in_deleted_address",
"remove_level_one_email_trackers",
"num_level_one_trackers_blocked_in_deleted_address",
"store_phone_log",
"date_phone_subscription_checked",
"date_subscribed_phone",
"forwarded_first_reply",
"date_phone_subscription_end",
"date_phone_subscription_reset",
"date_phone_subscription_start",
"created_by",
"sent_welcome_email",
"onboarding_free_state",
"last_engagement",
0,
0
FROM
"emails_profile";
"""
)
schema_editor.execute('DROP TABLE "emails_profile";')
schema_editor.execute(
'ALTER TABLE "new__emails_profile" RENAME TO "emails_profile";'
)
schema_editor.execute(
'CREATE INDEX "emails_profile_address_last_deleted_188d9e79" ON "emails_profile" ("address_last_deleted");'
)
schema_editor.execute(
'CREATE INDEX "emails_profile_last_hard_bounce_fefe494f" ON "emails_profile" ("last_hard_bounce");'
)
schema_editor.execute(
'CREATE INDEX "emails_profile_last_soft_bounce_642ab37d" ON "emails_profile" ("last_soft_bounce");'
)
schema_editor.execute(
'CREATE INDEX "emails_profile_last_account_flagged_f40cbf85" ON "emails_profile" ("last_account_flagged");'
)
schema_editor.execute(
'CREATE INDEX "emails_profile_last_engagement_0c398b6a" ON "emails_profile" ("last_engagement");'
)
else:
raise Exception(f'Unknown database vendor "{schema_editor.connection.vendor}"') | Add a database default of false for sent_welcome_email, for PostgreSQL and SQLite3 Note: set sent_welcome_email = true for existing users Using `./manage.py sqlmigrate` for the SQL, and the technique from: https://stackoverflow.com/a/45232678/10612 |
154,457 | from django.db import migrations
from emails.models import hash_subdomain
def hash_subdomain(subdomain, domain=settings.MOZMAIL_DOMAIN):
return sha256(f"{subdomain}.{domain}".encode("utf-8")).hexdigest()
def delete_all_later_duplicate_subdomains(apps, schema_editor):
Profile = apps.get_model("emails", "Profile")
RegisteredSubdomain = apps.get_model("emails", "RegisteredSubdomain")
profiles_with_subdomain = (
Profile.objects.all().exclude(subdomain=None).order_by("user__date_joined")
)
# find all duplicate subdomains
duplicate_subdomains = set()
for profile in profiles_with_subdomain:
num_later_subdomain_registrations = Profile.objects.filter(
subdomain__iexact=profile.subdomain, # check this
user__date_joined__gt=profile.user.date_joined,
).count()
if num_later_subdomain_registrations > 0:
duplicate_subdomains.add(profile.subdomain.lower())
# remove duplicate subdomains
for dupe in duplicate_subdomains:
profile = (
Profile.objects.filter(subdomain__iexact=dupe)
.order_by("user__date_joined")
.first()
)
later_subdomain_registrations = Profile.objects.filter(
subdomain__iexact=profile.subdomain,
user__date_joined__gt=profile.user.date_joined,
)
print(
"found case-insensitive duplicate subdomains of "
+ f"{profile.user.username}"
)
for dupe_subdomain_profile in later_subdomain_registrations:
# empty out the subdomain of any new profiles that were
# erroneously allowed to register a duplicate subdomain
print(
"clearing subdomain for: " + f"{dupe_subdomain_profile.user.username}"
)
dupe_subdomain_profile.subdomain = None
dupe_subdomain_profile.save()
# lowercase all subdomains and
# create RegisteredSubdomain for the lower cased subdomain
reduced_profiles_with_subdomain = (
Profile.objects.all().exclude(subdomain=None).order_by("user__date_joined")
)
for oldest_profile in reduced_profiles_with_subdomain:
# lowercase subdomain of every profile
oldest_profile.subdomain = oldest_profile.subdomain.lower()
oldest_profile.save()
registered_subdomain_exists = (
RegisteredSubdomain.objects.filter(
subdomain_hash=hash_subdomain(oldest_profile.subdomain)
).count()
> 0
)
if not registered_subdomain_exists:
RegisteredSubdomain.objects.create(
subdomain_hash=hash_subdomain(oldest_profile.subdomain)
) | null |
154,458 | from django.db import migrations, models
The provided code snippet includes necessary dependencies for implementing the `add_db_default_forward_func` function. Write a Python function `def add_db_default_forward_func(apps, schema_editor)` to solve the following problem:
Add a database default of TRUE for store_phone_log Using `./manage.py sqlmigrate` for the SQL, and the technique from: https://stackoverflow.com/a/45232678/10612
Here is the function:
def add_db_default_forward_func(apps, schema_editor):
"""
Add a database default of TRUE for store_phone_log
Using `./manage.py sqlmigrate` for the SQL, and the technique from:
https://stackoverflow.com/a/45232678/10612
"""
if schema_editor.connection.vendor.startswith("postgres"):
schema_editor.execute(
'ALTER TABLE "emails_profile"'
' ALTER COLUMN "store_phone_log" SET DEFAULT true;'
)
elif schema_editor.connection.vendor.startswith("sqlite"):
schema_editor.execute(
'CREATE TABLE "new__emails_profile"'
' ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,'
' "api_token" char(32) NOT NULL,'
' "user_id" integer NOT NULL REFERENCES "auth_user" ("id") DEFERRABLE INITIALLY DEFERRED,'
' "address_last_deleted" datetime NULL,'
' "num_address_deleted" integer unsigned NOT NULL CHECK ("num_address_deleted" >= 0),'
' "last_hard_bounce" datetime NULL,'
' "last_soft_bounce" datetime NULL,'
' "subdomain" varchar(63) NULL UNIQUE,'
' "server_storage" bool NOT NULL,'
' "store_phone_log" bool NOT NULL DEFAULT 1,'
' "num_email_blocked_in_deleted_address" integer unsigned NOT NULL CHECK ("num_email_blocked_in_deleted_address" >= 0),'
' "num_email_forwarded_in_deleted_address" integer unsigned NOT NULL CHECK ("num_email_forwarded_in_deleted_address" >= 0),'
' "num_email_spam_in_deleted_address" integer unsigned NOT NULL CHECK ("num_email_spam_in_deleted_address" >= 0),'
' "onboarding_state" integer unsigned NOT NULL CHECK ("onboarding_state" >= 0),'
' "last_account_flagged" datetime NULL,'
' "date_subscribed" datetime NULL,'
' "auto_block_spam" bool NOT NULL,'
' "num_email_replied_in_deleted_address" integer unsigned NOT NULL CHECK ("num_email_replied_in_deleted_address" >= 0),'
' "remove_level_one_email_trackers" bool NULL,'
' "num_level_one_trackers_blocked_in_deleted_address" integer unsigned NULL CHECK ("num_level_one_trackers_blocked_in_deleted_address" >= 0));'
)
schema_editor.execute(
'INSERT INTO "new__emails_profile"'
' ("id",'
' "api_token",'
' "user_id",'
' "address_last_deleted",'
' "num_address_deleted",'
' "last_hard_bounce",'
' "last_soft_bounce",'
' "subdomain",'
' "server_storage",'
' "store_phone_log",'
' "num_email_blocked_in_deleted_address",'
' "num_email_forwarded_in_deleted_address",'
' "num_email_spam_in_deleted_address",'
' "onboarding_state",'
' "last_account_flagged",'
' "date_subscribed",'
' "auto_block_spam",'
' "num_email_replied_in_deleted_address",'
' "remove_level_one_email_trackers",'
' "num_level_one_trackers_blocked_in_deleted_address")'
"SELECT "
' "id",'
' "api_token",'
' "user_id",'
' "address_last_deleted",'
' "num_address_deleted",'
' "last_hard_bounce",'
' "last_soft_bounce",'
' "subdomain",'
' "server_storage",'
" 1,"
' "num_email_blocked_in_deleted_address",'
' "num_email_forwarded_in_deleted_address",'
' "num_email_spam_in_deleted_address",'
' "onboarding_state",'
' "last_account_flagged",'
' "date_subscribed",'
' "auto_block_spam",'
' "num_email_replied_in_deleted_address",'
' "remove_level_one_email_trackers",'
' "num_level_one_trackers_blocked_in_deleted_address"'
' FROM "emails_profile";'
)
schema_editor.execute('DROP TABLE "emails_profile";')
schema_editor.execute(
'ALTER TABLE "new__emails_profile" RENAME TO "emails_profile";'
)
schema_editor.execute(
'CREATE INDEX "emails_profile_user_id_4780e024" ON "emails_profile" ("user_id");'
)
schema_editor.execute(
'CREATE INDEX "emails_profile_address_last_deleted_188d9e79" ON "emails_profile" ("address_last_deleted");'
)
schema_editor.execute(
'CREATE INDEX "emails_profile_last_hard_bounce_fefe494f" ON "emails_profile" ("last_hard_bounce")'
)
schema_editor.execute(
'CREATE INDEX "emails_profile_last_soft_bounce_642ab37d" ON "emails_profile" ("last_soft_bounce")'
)
schema_editor.execute(
'CREATE INDEX "emails_profile_last_account_flagged_f40cbf85" ON "emails_profile" ("last_account_flagged");'
)
else:
raise Exception(f'Unknown database vendor "{schema_editor.connection.vendor}"') | Add a database default of TRUE for store_phone_log Using `./manage.py sqlmigrate` for the SQL, and the technique from: https://stackoverflow.com/a/45232678/10612 |
154,459 | from django.db import migrations, models
The provided code snippet includes necessary dependencies for implementing the `add_db_default_forward_func` function. Write a Python function `def add_db_default_forward_func(apps, schema_editor)` to solve the following problem:
Add a database default of 3000 for remaining_seconds, for PostgreSQL and SQLite3 Using `./manage.py sqlmigrate` for the SQL, and the technique from: https://stackoverflow.com/a/45232678/10612
Here is the function:
def add_db_default_forward_func(apps, schema_editor):
"""
Add a database default of 3000 for remaining_seconds, for PostgreSQL and SQLite3
Using `./manage.py sqlmigrate` for the SQL, and the technique from:
https://stackoverflow.com/a/45232678/10612
"""
if schema_editor.connection.vendor.startswith("postgres"):
schema_editor.execute(
'ALTER TABLE "emails_profile"'
' ALTER COLUMN "forwarded_first_reply" SET DEFAULT false;'
)
elif schema_editor.connection.vendor.startswith("sqlite"):
schema_editor.execute(
'CREATE TABLE "new__emails_profile" '
'("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,'
' "api_token" char(32) NOT NULL,'
' "user_id" integer NOT NULL UNIQUE REFERENCES "auth_user" ("id") DEFERRABLE INITIALLY DEFERRED,'
' "address_last_deleted" datetime NULL,'
' "num_address_deleted" integer unsigned NOT NULL CHECK ("num_address_deleted" >= 0),'
' "last_hard_bounce" datetime NULL,'
' "last_soft_bounce" datetime NULL,'
' "subdomain" varchar(63) NULL UNIQUE,'
' "server_storage" bool NOT NULL,'
' "num_email_blocked_in_deleted_address" integer unsigned NOT NULL CHECK ("num_email_blocked_in_deleted_address" >= 0),'
' "num_email_forwarded_in_deleted_address" integer unsigned NOT NULL CHECK ("num_email_forwarded_in_deleted_address" >= 0),'
' "num_email_spam_in_deleted_address" integer unsigned NOT NULL CHECK ("num_email_spam_in_deleted_address" >= 0),'
' "onboarding_state" integer unsigned NOT NULL CHECK ("onboarding_state" >= 0),'
' "last_account_flagged" datetime NULL,'
' "date_subscribed" datetime NULL,'
' "auto_block_spam" bool NOT NULL,'
' "num_email_replied_in_deleted_address" integer unsigned NOT NULL CHECK ("num_email_replied_in_deleted_address" >= 0),'
' "remove_level_one_email_trackers" bool NULL,'
' "num_level_one_trackers_blocked_in_deleted_address" integer unsigned NULL CHECK ("num_level_one_trackers_blocked_in_deleted_address" >= 0),'
' "store_phone_log" bool NOT NULL,'
' "date_phone_subscription_checked" datetime NULL,'
' "date_subscribed_phone" datetime NULL,'
' "forwarded_first_reply" bool NULL);'
)
schema_editor.execute(
'INSERT INTO "new__emails_profile"'
' ("id",'
' "api_token",'
' "user_id",'
' "address_last_deleted",'
' "num_address_deleted",'
' "last_hard_bounce",'
' "last_soft_bounce",'
' "subdomain",'
' "server_storage",'
' "num_email_blocked_in_deleted_address",'
' "num_email_forwarded_in_deleted_address",'
' "num_email_spam_in_deleted_address",'
' "onboarding_state",'
' "last_account_flagged",'
' "date_subscribed",'
' "auto_block_spam",'
' "num_email_replied_in_deleted_address",'
' "remove_level_one_email_trackers",'
' "num_level_one_trackers_blocked_in_deleted_address",'
' "store_phone_log",'
' "date_phone_subscription_checked",'
' "date_subscribed_phone",'
' "forwarded_first_reply")'
" SELECT "
' "id",'
' "api_token",'
' "user_id",'
' "address_last_deleted",'
' "num_address_deleted",'
' "last_hard_bounce",'
' "last_soft_bounce",'
' "subdomain",'
' "server_storage",'
' "num_email_blocked_in_deleted_address",'
' "num_email_forwarded_in_deleted_address",'
' "num_email_spam_in_deleted_address",'
' "onboarding_state",'
' "last_account_flagged",'
' "date_subscribed",'
' "auto_block_spam",'
' "num_email_replied_in_deleted_address",'
' "remove_level_one_email_trackers",'
' "num_level_one_trackers_blocked_in_deleted_address",'
' "store_phone_log",'
' "date_phone_subscription_checked",'
' "date_subscribed_phone",'
" false"
' FROM "emails_profile";'
)
schema_editor.execute('DROP TABLE "emails_profile";')
schema_editor.execute(
'ALTER TABLE "new__emails_profile" RENAME TO "emails_profile";'
)
schema_editor.execute(
'CREATE INDEX "emails_profile_address_last_deleted_188d9e79" ON "emails_profile" ("address_last_deleted");'
)
schema_editor.execute(
'CREATE INDEX "emails_profile_last_hard_bounce_fefe494f" ON "emails_profile" ("last_hard_bounce");'
)
schema_editor.execute(
'CREATE INDEX "emails_profile_last_soft_bounce_642ab37d" ON "emails_profile" ("last_soft_bounce");'
)
schema_editor.execute(
'CREATE INDEX "emails_profile_last_account_flagged_f40cbf85" ON "emails_profile" ("last_account_flagged");'
)
else:
raise Exception(f'Unknown database vendor "{schema_editor.connection.vendor}"') | Add a database default of 3000 for remaining_seconds, for PostgreSQL and SQLite3 Using `./manage.py sqlmigrate` for the SQL, and the technique from: https://stackoverflow.com/a/45232678/10612 |
154,460 | from django.db import migrations, models
The provided code snippet includes necessary dependencies for implementing the `add_db_default_forward_func` function. Write a Python function `def add_db_default_forward_func(apps, schema_editor)` to solve the following problem:
Add a database default of false for sent_welcome_email, for PostgreSQL and SQLite3 Note: set sent_welcome_email = true for existing users Using `./manage.py sqlmigrate` for the SQL, and the technique from: https://stackoverflow.com/a/45232678/10612
Here is the function:
def add_db_default_forward_func(apps, schema_editor):
"""
Add a database default of false for sent_welcome_email, for PostgreSQL and SQLite3
Note: set sent_welcome_email = true for existing users
Using `./manage.py sqlmigrate` for the SQL, and the technique from:
https://stackoverflow.com/a/45232678/10612
"""
if schema_editor.connection.vendor.startswith("postgres"):
schema_editor.execute(
'ALTER TABLE "emails_profile"'
' ALTER COLUMN "sent_welcome_email" SET DEFAULT false;'
)
# Set all existing profiles to true so we don't send them another welcome email
schema_editor.execute(
'UPDATE "emails_profile"' ' SET "sent_welcome_email" = true;'
)
elif schema_editor.connection.vendor.startswith("sqlite"):
schema_editor.execute(
"""
CREATE TABLE "new__emails_profile" (
"id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,
"api_token" char(32) NOT NULL,
"user_id" integer NOT NULL UNIQUE REFERENCES "auth_user" ("id") DEFERRABLE INITIALLY DEFERRED,
"address_last_deleted" datetime NULL,
"num_address_deleted" integer unsigned NOT NULL CHECK ("num_address_deleted" >= 0),
"last_hard_bounce" datetime NULL,
"last_soft_bounce" datetime NULL,
"subdomain" varchar(63) NULL UNIQUE,
"server_storage" bool NOT NULL,
"num_email_blocked_in_deleted_address" integer unsigned NOT NULL CHECK ("num_email_blocked_in_deleted_address" >= 0),
"num_email_forwarded_in_deleted_address" integer unsigned NOT NULL CHECK ("num_email_forwarded_in_deleted_address" >= 0),
"num_email_spam_in_deleted_address" integer unsigned NOT NULL CHECK ("num_email_spam_in_deleted_address" >= 0),
"onboarding_state" integer unsigned NOT NULL CHECK ("onboarding_state" >= 0),
"last_account_flagged" datetime NULL,
"date_subscribed" datetime NULL,
"auto_block_spam" bool NOT NULL,
"num_email_replied_in_deleted_address" integer unsigned NOT NULL CHECK ("num_email_replied_in_deleted_address" >= 0),
"remove_level_one_email_trackers" bool NULL,
"num_level_one_trackers_blocked_in_deleted_address" integer unsigned NULL CHECK ("num_level_one_trackers_blocked_in_deleted_address" >= 0),
"store_phone_log" bool NOT NULL,
"date_phone_subscription_checked" datetime NULL,
"date_subscribed_phone" datetime NULL,
"forwarded_first_reply" bool NOT NULL,
"date_phone_subscription_end" datetime NULL,
"date_phone_subscription_reset" datetime NULL,
"date_phone_subscription_start" datetime NULL,
"created_by" varchar(63) NULL,
"sent_welcome_email" bool NULL
);
"""
)
schema_editor.execute(
'INSERT INTO "new__emails_profile"'
' ("id",'
' "api_token",'
' "user_id",'
' "address_last_deleted",'
' "num_address_deleted",'
' "last_hard_bounce",'
' "last_soft_bounce",'
' "subdomain",'
' "server_storage",'
' "num_email_blocked_in_deleted_address",'
' "num_email_forwarded_in_deleted_address",'
' "num_email_spam_in_deleted_address",'
' "onboarding_state",'
' "last_account_flagged",'
' "date_subscribed",'
' "auto_block_spam",'
' "num_email_replied_in_deleted_address",'
' "remove_level_one_email_trackers",'
' "num_level_one_trackers_blocked_in_deleted_address",'
' "store_phone_log",'
' "date_phone_subscription_checked",'
' "date_subscribed_phone",'
' "forwarded_first_reply",'
' "date_phone_subscription_end",'
' "date_phone_subscription_reset",'
' "date_phone_subscription_start",'
' "created_by",'
' "sent_welcome_email" '
")"
" SELECT "
' "id",'
' "api_token",'
' "user_id",'
' "address_last_deleted",'
' "num_address_deleted",'
' "last_hard_bounce",'
' "last_soft_bounce",'
' "subdomain",'
' "server_storage",'
' "num_email_blocked_in_deleted_address",'
' "num_email_forwarded_in_deleted_address",'
' "num_email_spam_in_deleted_address",'
' "onboarding_state",'
' "last_account_flagged",'
' "date_subscribed",'
' "auto_block_spam",'
' "num_email_replied_in_deleted_address",'
' "remove_level_one_email_trackers",'
' "num_level_one_trackers_blocked_in_deleted_address",'
' "store_phone_log",'
' "date_phone_subscription_checked",'
' "date_subscribed_phone",'
' "forwarded_first_reply",'
' "date_phone_subscription_end",'
' "date_phone_subscription_reset",'
' "date_phone_subscription_start",'
' "created_by",'
" true "
' FROM "emails_profile";'
)
schema_editor.execute('DROP TABLE "emails_profile";')
schema_editor.execute(
'ALTER TABLE "new__emails_profile" RENAME TO "emails_profile";'
)
schema_editor.execute(
'CREATE INDEX "emails_profile_address_last_deleted_188d9e79" ON "emails_profile" ("address_last_deleted");'
)
schema_editor.execute(
'CREATE INDEX "emails_profile_last_hard_bounce_fefe494f" ON "emails_profile" ("last_hard_bounce");'
)
schema_editor.execute(
'CREATE INDEX "emails_profile_last_soft_bounce_642ab37d" ON "emails_profile" ("last_soft_bounce");'
)
schema_editor.execute(
'CREATE INDEX "emails_profile_last_account_flagged_f40cbf85" ON "emails_profile" ("last_account_flagged");'
)
else:
raise Exception(f'Unknown database vendor "{schema_editor.connection.vendor}"') | Add a database default of false for sent_welcome_email, for PostgreSQL and SQLite3 Note: set sent_welcome_email = true for existing users Using `./manage.py sqlmigrate` for the SQL, and the technique from: https://stackoverflow.com/a/45232678/10612 |
154,461 | from django.db import migrations, models
The provided code snippet includes necessary dependencies for implementing the `add_db_default_forward_func` function. Write a Python function `def add_db_default_forward_func(apps, schema_editor)` to solve the following problem:
Add field forwarded_email_size_per_day and num_email_forwarded_per_day to abusemetrics Using `./manage.py sqlmigrate` for the SQL, and the technique from: https://stackoverflow.com/a/45232678/10612
Here is the function:
def add_db_default_forward_func(apps, schema_editor):
"""
Add field forwarded_email_size_per_day and num_email_forwarded_per_day to abusemetrics
Using `./manage.py sqlmigrate` for the SQL, and the technique from:
https://stackoverflow.com/a/45232678/10612
"""
if schema_editor.connection.vendor.startswith("postgres"):
schema_editor.execute(
'ALTER TABLE "emails_abusemetrics"'
' ALTER COLUMN "forwarded_email_size_per_day" SET DEFAULT 0,'
' ALTER COLUMN "num_email_forwarded_per_day" SET DEFAULT 0'
)
elif schema_editor.connection.vendor.startswith("sqlite"):
schema_editor.execute(
'CREATE TABLE "new__emails_abusemetrics" ('
' "id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,'
' "first_recorded" datetime NOT NULL,'
' "last_recorded" datetime NOT NULL,'
' "num_address_created_per_day" smallint unsigned NOT NULL CHECK ("num_address_created_per_day" >= 0),'
' "num_replies_per_day" smallint unsigned NOT NULL CHECK ("num_replies_per_day" >= 0),'
' "user_id" integer NOT NULL REFERENCES "auth_user" ("id") DEFERRABLE INITIALLY DEFERRED,'
' "forwarded_email_size_per_day" bigint unsigned NOT NULL DEFAULT 0 CHECK ("forwarded_email_size_per_day" >= 0),'
' "num_email_forwarded_per_day" smallint unsigned NOT NULL DEFAULT 0 CHECK ("num_email_forwarded_per_day" >= 0));'
)
schema_editor.execute(
'INSERT INTO "new__emails_abusemetrics" ('
' "id",'
' "first_recorded",'
' "last_recorded",'
' "num_address_created_per_day",'
' "num_replies_per_day",'
' "user_id",'
' "forwarded_email_size_per_day",'
' "num_email_forwarded_per_day") SELECT "id",'
' "first_recorded",'
' "last_recorded",'
' "num_address_created_per_day",'
' "num_replies_per_day",'
' "user_id",'
" 0,"
' 0 FROM "emails_abusemetrics";'
)
schema_editor.execute('DROP TABLE "emails_abusemetrics";')
schema_editor.execute(
'ALTER TABLE "new__emails_abusemetrics" RENAME TO "emails_abusemetrics";'
)
schema_editor.execute(
'CREATE UNIQUE INDEX "emails_abusemetrics_user_id_first_recorded_e22d0d1c_uniq" ON "emails_abusemetrics" ("user_id", "first_recorded");'
)
schema_editor.execute(
'CREATE INDEX "emails_abusemetrics_first_recorded_640bb4fd" ON "emails_abusemetrics" ("first_recorded");'
)
schema_editor.execute(
'CREATE INDEX "emails_abusemetrics_last_recorded_d592fe63" ON "emails_abusemetrics" ("last_recorded");'
)
schema_editor.execute(
'CREATE INDEX "emails_abusemetrics_user_id_cf2800c3" ON "emails_abusemetrics" ("user_id");'
)
else:
raise Exception(f'Unknown database vendor "{schema_editor.connection.vendor}"') | Add field forwarded_email_size_per_day and num_email_forwarded_per_day to abusemetrics Using `./manage.py sqlmigrate` for the SQL, and the technique from: https://stackoverflow.com/a/45232678/10612 |
154,462 | import uuid
from django.db import migrations
def populate_api_tokens(apps, schema_editor):
User = apps.get_model("auth", "User")
Profile = apps.get_model("emails", "Profile")
for user in User.objects.all():
profiles = Profile.objects.filter(user=user)
if not profiles:
Profile.objects.create(user=user) | null |
154,463 | from django.db import migrations, models
The provided code snippet includes necessary dependencies for implementing the `add_db_default_forward_func` function. Write a Python function `def add_db_default_forward_func(apps, schema_editor)` to solve the following problem:
Add a database default of false for sent_welcome_email, for PostgreSQL and SQLite3 Note: set sent_welcome_email = true for existing users Using `./manage.py sqlmigrate` for the SQL, and the technique from: https://stackoverflow.com/a/45232678/10612
Here is the function:
def add_db_default_forward_func(apps, schema_editor):
"""
Add a database default of false for sent_welcome_email, for PostgreSQL and SQLite3
Note: set sent_welcome_email = true for existing users
Using `./manage.py sqlmigrate` for the SQL, and the technique from:
https://stackoverflow.com/a/45232678/10612
"""
if schema_editor.connection.vendor.startswith("postgres"):
schema_editor.execute(
'ALTER TABLE "emails_profile"'
' ALTER COLUMN "onboarding_free_state" SET DEFAULT 0;'
)
# Set all existing profiles to 0 so existing users also see the free onboarding
schema_editor.execute(
'UPDATE "emails_profile"' ' SET "onboarding_free_state" = 0;'
)
elif schema_editor.connection.vendor.startswith("sqlite"):
schema_editor.execute(
"""
CREATE TABLE "new__emails_profile" (
"id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,
"onboarding_free_state" integer unsigned NOT NULL CHECK ("onboarding_free_state" >= 0) DEFAULT 0,
"api_token" char(32) NOT NULL,
"user_id" integer NOT NULL UNIQUE REFERENCES "auth_user" ("id") DEFERRABLE INITIALLY DEFERRED,
"address_last_deleted" datetime NULL,
"num_address_deleted" integer unsigned NOT NULL CHECK ("num_address_deleted" >= 0),
"last_hard_bounce" datetime NULL,
"last_soft_bounce" datetime NULL,
"subdomain" varchar(63) NULL UNIQUE,
"server_storage" bool NOT NULL,
"num_email_blocked_in_deleted_address" integer unsigned NOT NULL CHECK (
"num_email_blocked_in_deleted_address" >= 0
),
"num_email_forwarded_in_deleted_address" integer unsigned NOT NULL CHECK (
"num_email_forwarded_in_deleted_address" >= 0
),
"num_email_spam_in_deleted_address" integer unsigned NOT NULL CHECK (
"num_email_spam_in_deleted_address" >= 0
),
"onboarding_state" integer unsigned NOT NULL CHECK ("onboarding_state" >= 0),
"last_account_flagged" datetime NULL,
"date_subscribed" datetime NULL,
"auto_block_spam" bool NOT NULL,
"num_email_replied_in_deleted_address" integer unsigned NOT NULL CHECK (
"num_email_replied_in_deleted_address" >= 0
),
"remove_level_one_email_trackers" bool NULL,
"num_level_one_trackers_blocked_in_deleted_address" integer unsigned NULL CHECK (
"num_level_one_trackers_blocked_in_deleted_address" >= 0
),
"store_phone_log" bool NOT NULL,
"date_phone_subscription_checked" datetime NULL,
"date_subscribed_phone" datetime NULL,
"forwarded_first_reply" bool NOT NULL,
"date_phone_subscription_end" datetime NULL,
"date_phone_subscription_reset" datetime NULL,
"date_phone_subscription_start" datetime NULL,
"created_by" varchar(63) NULL,
"sent_welcome_email" bool NOT NULL
);
"""
)
schema_editor.execute(
"""
INSERT INTO "new__emails_profile" (
"id", "api_token", "user_id", "address_last_deleted",
"num_address_deleted", "last_hard_bounce",
"last_soft_bounce", "subdomain",
"server_storage", "num_email_blocked_in_deleted_address",
"num_email_forwarded_in_deleted_address",
"num_email_spam_in_deleted_address",
"onboarding_state", "last_account_flagged",
"date_subscribed", "auto_block_spam",
"num_email_replied_in_deleted_address",
"remove_level_one_email_trackers",
"num_level_one_trackers_blocked_in_deleted_address",
"store_phone_log", "date_phone_subscription_checked",
"date_subscribed_phone", "forwarded_first_reply",
"date_phone_subscription_end",
"date_phone_subscription_reset",
"date_phone_subscription_start",
"created_by", "sent_welcome_email",
"onboarding_free_state"
)
SELECT
"id",
"api_token",
"user_id",
"address_last_deleted",
"num_address_deleted",
"last_hard_bounce",
"last_soft_bounce",
"subdomain",
"server_storage",
"num_email_blocked_in_deleted_address",
"num_email_forwarded_in_deleted_address",
"num_email_spam_in_deleted_address",
"onboarding_state",
"last_account_flagged",
"date_subscribed",
"auto_block_spam",
"num_email_replied_in_deleted_address",
"remove_level_one_email_trackers",
"num_level_one_trackers_blocked_in_deleted_address",
"store_phone_log",
"date_phone_subscription_checked",
"date_subscribed_phone",
"forwarded_first_reply",
"date_phone_subscription_end",
"date_phone_subscription_reset",
"date_phone_subscription_start",
"created_by",
"sent_welcome_email",
0
FROM
"emails_profile";
"""
)
schema_editor.execute('DROP TABLE "emails_profile";')
schema_editor.execute(
'ALTER TABLE "new__emails_profile" RENAME TO "emails_profile";'
)
schema_editor.execute(
'CREATE INDEX "emails_profile_address_last_deleted_188d9e79" ON "emails_profile" ("address_last_deleted");'
)
schema_editor.execute(
'CREATE INDEX "emails_profile_last_hard_bounce_fefe494f" ON "emails_profile" ("last_hard_bounce");'
)
schema_editor.execute(
'CREATE INDEX "emails_profile_last_soft_bounce_642ab37d" ON "emails_profile" ("last_soft_bounce");'
)
schema_editor.execute(
'CREATE INDEX "emails_profile_last_account_flagged_f40cbf85" ON "emails_profile" ("last_account_flagged");'
)
else:
raise Exception(f'Unknown database vendor "{schema_editor.connection.vendor}"') | Add a database default of false for sent_welcome_email, for PostgreSQL and SQLite3 Note: set sent_welcome_email = true for existing users Using `./manage.py sqlmigrate` for the SQL, and the technique from: https://stackoverflow.com/a/45232678/10612 |
154,464 | from django.db import migrations
def copy_profile_tokens_to_rest_framework_authtoken(apps, schema_editor):
Profile = apps.get_model("emails", "Profile")
Token = apps.get_model("authtoken", "Token")
for profile in Profile.objects.all():
Token.objects.create(user=profile.user, key=profile.api_token) | null |
154,465 | from django.db import migrations, models
from emails.models import hash_subdomain
def hash_subdomain(subdomain, domain=settings.MOZMAIL_DOMAIN):
return sha256(f"{subdomain}.{domain}".encode("utf-8")).hexdigest()
def copy_already_registered_subdomains_from_profile(apps, schema_editor):
Profile = apps.get_model("emails", "Profile")
RegisteredSubdomain = apps.get_model("emails", "RegisteredSubdomain")
for profile in Profile.objects.exclude(subdomain=None):
RegisteredSubdomain.objects.create(
subdomain_hash=hash_subdomain(profile.subdomain)
) | null |
154,466 | from hashlib import sha256
import logging
from django.contrib.auth.models import User
from django.db.models.signals import post_save, pre_save
from django.dispatch import receiver
from emails.models import Profile
from emails.utils import incr_if_enabled, set_user_group
class Profile(models.Model):
def __str__(self):
def save(
self,
force_insert: bool = False,
force_update: bool = False,
using: str | None = None,
update_fields: Iterable[str] | None = None,
) -> None:
def language(self):
def fxa_locale_in_premium_country(self) -> bool:
def avatar(self) -> str | None:
def relay_addresses(self):
def domain_addresses(self):
def total_masks(self) -> int:
def at_mask_limit(self) -> bool:
def check_bounce_pause(self):
def bounce_status(self):
def next_email_try(self):
def last_bounce_date(self):
def at_max_free_aliases(self) -> bool:
def fxa(self) -> SocialAccount | None:
def display_name(self) -> str | None:
def custom_domain(self) -> str:
def has_premium(self) -> bool:
def has_phone(self) -> bool:
def has_vpn(self):
def emails_forwarded(self):
def emails_blocked(self):
def emails_replied(self):
def level_one_trackers_blocked(self):
def joined_before_premium_release(self):
def date_phone_registered(self) -> datetime | None:
def add_subdomain(self, subdomain):
def update_abuse_metric(
self,
address_created=False,
replied=False,
email_forwarded=False,
forwarded_email_size=0,
) -> datetime | None:
def is_flagged(self):
def metrics_enabled(self) -> bool:
def plan(self) -> Literal["free", "email", "phone", "bundle"]:
def plan_term(self) -> Literal[None, "unknown", "1_month", "1_year"]:
def metrics_premium_status(self) -> str:
def set_user_group(user):
def create_user_profile(sender, instance, created, **kwargs):
if created:
set_user_group(instance)
Profile.objects.create(user=instance) | null |
154,467 | from hashlib import sha256
import logging
from django.contrib.auth.models import User
from django.db.models.signals import post_save, pre_save
from django.dispatch import receiver
from emails.models import Profile
from emails.utils import incr_if_enabled, set_user_group
info_logger = logging.getLogger("eventsinfo")
class Profile(models.Model):
def __str__(self):
def save(
self,
force_insert: bool = False,
force_update: bool = False,
using: str | None = None,
update_fields: Iterable[str] | None = None,
) -> None:
def language(self):
def fxa_locale_in_premium_country(self) -> bool:
def avatar(self) -> str | None:
def relay_addresses(self):
def domain_addresses(self):
def total_masks(self) -> int:
def at_mask_limit(self) -> bool:
def check_bounce_pause(self):
def bounce_status(self):
def next_email_try(self):
def last_bounce_date(self):
def at_max_free_aliases(self) -> bool:
def fxa(self) -> SocialAccount | None:
def display_name(self) -> str | None:
def custom_domain(self) -> str:
def has_premium(self) -> bool:
def has_phone(self) -> bool:
def has_vpn(self):
def emails_forwarded(self):
def emails_blocked(self):
def emails_replied(self):
def level_one_trackers_blocked(self):
def joined_before_premium_release(self):
def date_phone_registered(self) -> datetime | None:
def add_subdomain(self, subdomain):
def update_abuse_metric(
self,
address_created=False,
replied=False,
email_forwarded=False,
forwarded_email_size=0,
) -> datetime | None:
def is_flagged(self):
def metrics_enabled(self) -> bool:
def plan(self) -> Literal["free", "email", "phone", "bundle"]:
def plan_term(self) -> Literal[None, "unknown", "1_month", "1_year"]:
def metrics_premium_status(self) -> str:
def incr_if_enabled(name, value=1, tags=None):
def measure_feature_usage(sender, instance, **kwargs):
if instance._state.adding:
# if newly created Profile ignore the signal
return
curr_profile = Profile.objects.get(id=instance.id)
# measure tracker removal usage
changed_tracker_removal_setting = (
instance.remove_level_one_email_trackers
!= curr_profile.remove_level_one_email_trackers
)
if changed_tracker_removal_setting:
if instance.remove_level_one_email_trackers:
incr_if_enabled("tracker_removal_enabled")
if not instance.remove_level_one_email_trackers:
incr_if_enabled("tracker_removal_disabled")
info_logger.info(
"tracker_removal_feature",
extra={
"enabled": instance.remove_level_one_email_trackers,
# TODO create a utility function or property for hashed fxa uid
"hashed_uid": sha256(instance.fxa.uid.encode("utf-8")).hexdigest(),
},
) | null |
154,468 | import logging
from mypy_boto3_ses.type_defs import ContentTypeDef
from botocore.exceptions import ClientError
from django.apps import apps
from django.conf import settings
from django.core.management.base import BaseCommand
from allauth.socialaccount.models import SocialAccount
import django_ftl
from emails.apps import EmailsConfig
from emails.models import Profile
from emails.utils import get_welcome_email, ses_message_props
from privaterelay.ftl_bundles import main as ftl_bundle
def _ses_message_props(data: str) -> ContentTypeDef:
return {"Charset": "UTF-8", "Data": data} | null |
154,469 | import logging
from mypy_boto3_ses.type_defs import ContentTypeDef
from botocore.exceptions import ClientError
from django.apps import apps
from django.conf import settings
from django.core.management.base import BaseCommand
from allauth.socialaccount.models import SocialAccount
import django_ftl
from emails.apps import EmailsConfig
from emails.models import Profile
from emails.utils import get_welcome_email, ses_message_props
from privaterelay.ftl_bundles import main as ftl_bundle
logger = logging.getLogger("eventsinfo.send_welcome_emails")
class EmailsConfig(AppConfig):
def ses_client(self) -> SESClient | None:
def s3_client(self):
def __init__(self, app_name, app_module):
def _load_terms(self, filename):
def ready(self): # noqa: F401 (imported but unused warning)
class Profile(models.Model):
def __str__(self):
def save(
self,
force_insert: bool = False,
force_update: bool = False,
using: str | None = None,
update_fields: Iterable[str] | None = None,
) -> None:
def language(self):
def fxa_locale_in_premium_country(self) -> bool:
def avatar(self) -> str | None:
def relay_addresses(self):
def domain_addresses(self):
def total_masks(self) -> int:
def at_mask_limit(self) -> bool:
def check_bounce_pause(self):
def bounce_status(self):
def next_email_try(self):
def last_bounce_date(self):
def at_max_free_aliases(self) -> bool:
def fxa(self) -> SocialAccount | None:
def display_name(self) -> str | None:
def custom_domain(self) -> str:
def has_premium(self) -> bool:
def has_phone(self) -> bool:
def has_vpn(self):
def emails_forwarded(self):
def emails_blocked(self):
def emails_replied(self):
def level_one_trackers_blocked(self):
def joined_before_premium_release(self):
def date_phone_registered(self) -> datetime | None:
def add_subdomain(self, subdomain):
def update_abuse_metric(
self,
address_created=False,
replied=False,
email_forwarded=False,
forwarded_email_size=0,
) -> datetime | None:
def is_flagged(self):
def metrics_enabled(self) -> bool:
def plan(self) -> Literal["free", "email", "phone", "bundle"]:
def plan_term(self) -> Literal[None, "unknown", "1_month", "1_year"]:
def metrics_premium_status(self) -> str:
def ses_message_props(data: str) -> ContentTypeDef:
def get_welcome_email(user: User, format: str) -> str:
def send_welcome_email(profile: Profile, **kwargs):
user = profile.user
app_config = apps.get_app_config("emails")
assert isinstance(app_config, EmailsConfig)
ses_client = app_config.ses_client
assert ses_client
assert settings.RELAY_FROM_ADDRESS
with django_ftl.override(profile.language):
translated_subject = ftl_bundle.format("first-time-user-email-welcome")
try:
ses_client.send_email(
Destination={
"ToAddresses": [user.email],
},
Source=settings.RELAY_FROM_ADDRESS,
Message={
"Subject": ses_message_props(translated_subject),
"Body": {
"Html": ses_message_props(get_welcome_email(user, "html")),
"Text": ses_message_props(get_welcome_email(user, "txt")),
},
},
)
logger.info(f"Sent welcome email to user ID: {profile.user.id}")
profile.sent_welcome_email = True
profile.save()
# Don't send welcome emails to users with no social account.
# E.g., users created thru admin tools.
# TODO?: move this check deeper into get_welcome_email ?
except SocialAccount.DoesNotExist:
profile.sent_welcome_email = True
profile.save()
except ClientError:
logger.error(
f"ClientError while sending welcome email to user ID: {profile.user.id}."
) | null |
154,470 | import json
import logging
import shlex
import sys
import time
import boto3
from botocore.exceptions import ClientError
from django.conf import settings
from django.core.management.base import BaseCommand
from emails.sns import verify_from_sns
from emails.views import _sns_inbound_logic, validate_sns_arn_and_type
from emails.utils import incr_if_enabled
logger = logging.getLogger("events")
info_logger = logging.getLogger("eventsinfo")
def verify_from_sns(json_body):
def validate_sns_arn_and_type(
topic_arn: str | None, message_type: str | None
) -> dict[str, Any] | None:
def _sns_inbound_logic(topic_arn, message_type, json_body):
def incr_if_enabled(name, value=1, tags=None):
def _verify_and_run_sns_inbound_on_message(message):
incr_if_enabled("rerun_message_from_sqs", 1)
json_body = json.loads(message.body)
verified_json_body = verify_from_sns(json_body)
topic_arn = verified_json_body["TopicArn"]
message_type = verified_json_body["Type"]
validate_sns_arn_and_type(topic_arn, message_type)
try:
_sns_inbound_logic(topic_arn, message_type, verified_json_body)
info_logger.info(f"processed sqs message ID: {message.message_id}")
except ClientError as e:
incr_if_enabled("rerun_message_from_sqs_error", 1)
logger.error("sqs_client_error: ", extra=e.response["Error"])
temp_errors = ["throttling", "pause"]
lower_error_code = e.response["Error"]["Code"].lower()
if any(temp_error in lower_error_code for temp_error in temp_errors):
incr_if_enabled("rerun_message_from_sqs_temp_error", 1)
logger.error(
'"temporary" error, sleeping for 1s: ', extra=e.response["Error"]
)
time.sleep(1)
try:
_sns_inbound_logic(topic_arn, message_type, verified_json_body)
info_logger.info(f"processed sqs message ID: {message.message_id}")
except ClientError as e:
incr_if_enabled("rerun_message_from_sqs_error", 1)
logger.error("sqs_client_error: ", extra=e.response["Error"]) | null |
154,471 | from collections import defaultdict
from copy import deepcopy
from datetime import datetime, timezone
from email import message_from_bytes
from email.iterators import _structure
from email.message import EmailMessage
from email.utils import parseaddr
import html
from io import StringIO
import json
from json import JSONDecodeError
import logging
import re
import shlex
from textwrap import dedent
from typing import Any, Literal
from urllib.parse import urlencode
from botocore.exceptions import ClientError
from codetiming import Timer
from decouple import strtobool
from django.shortcuts import render
from sentry_sdk import capture_message
from markus.utils import generate_tag
from waffle import sample_is_active
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from django.db import transaction
from django.db.models import prefetch_related_objects
from django.http import HttpRequest, HttpResponse
from django.template.loader import render_to_string
from django.utils.html import escape
from django.views.decorators.csrf import csrf_exempt
from privaterelay.utils import get_subplat_upgrade_link_by_language, glean_logger
from .models import (
CannotMakeAddressException,
DeletedAddress,
DomainAddress,
Profile,
RelayAddress,
Reply,
address_hash,
get_domain_numerical,
)
from .policy import relay_policy
from .types import (
AWS_MailJSON,
AWS_SNSMessageJSON,
OutgoingHeaders,
EmailForwardingIssues,
EmailHeaderIssues,
)
from .utils import (
_get_bucket_and_key_from_s3_json,
b64_lookup_key,
count_all_trackers,
decrypt_reply_metadata,
derive_reply_keys,
encrypt_reply_metadata,
generate_from_header,
get_domains_from_settings,
get_message_content_from_s3,
get_message_id_bytes,
get_reply_to_address,
histogram_if_enabled,
incr_if_enabled,
remove_message_from_s3,
remove_trackers,
ses_send_raw_email,
urlize_and_linebreaks,
InvalidFromHeader,
parse_email_header,
)
from .sns import verify_from_sns, SUPPORTED_SNS_TYPES
from privaterelay.ftl_bundles import main as ftl_bundle
from privaterelay.utils import flag_is_active_in_task
def strtobool(value: str) -> bool: ...
The provided code snippet includes necessary dependencies for implementing the `first_time_user_test` function. Write a Python function `def first_time_user_test(request)` to solve the following problem:
Demonstrate rendering of the "First time Relay user" email. Settings like language can be given in the querystring, otherwise settings come from a random free profile.
Here is the function:
def first_time_user_test(request):
"""
Demonstrate rendering of the "First time Relay user" email.
Settings like language can be given in the querystring, otherwise settings
come from a random free profile.
"""
in_bundle_country = strtobool(request.GET.get("in_bundle_country", "yes"))
email_context = {
"in_bundle_country": in_bundle_country,
"SITE_ORIGIN": settings.SITE_ORIGIN,
}
if request.GET.get("format", "html") == "text":
return render(
request,
"emails/first_time_user.txt",
email_context,
"text/plain; charset=utf-8",
)
return render(request, "emails/first_time_user.html", email_context) | Demonstrate rendering of the "First time Relay user" email. Settings like language can be given in the querystring, otherwise settings come from a random free profile. |
154,472 | from collections import defaultdict
from copy import deepcopy
from datetime import datetime, timezone
from email import message_from_bytes
from email.iterators import _structure
from email.message import EmailMessage
from email.utils import parseaddr
import html
from io import StringIO
import json
from json import JSONDecodeError
import logging
import re
import shlex
from textwrap import dedent
from typing import Any, Literal
from urllib.parse import urlencode
from botocore.exceptions import ClientError
from codetiming import Timer
from decouple import strtobool
from django.shortcuts import render
from sentry_sdk import capture_message
from markus.utils import generate_tag
from waffle import sample_is_active
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from django.db import transaction
from django.db.models import prefetch_related_objects
from django.http import HttpRequest, HttpResponse
from django.template.loader import render_to_string
from django.utils.html import escape
from django.views.decorators.csrf import csrf_exempt
from privaterelay.utils import get_subplat_upgrade_link_by_language, glean_logger
from .models import (
CannotMakeAddressException,
DeletedAddress,
DomainAddress,
Profile,
RelayAddress,
Reply,
address_hash,
get_domain_numerical,
)
from .policy import relay_policy
from .types import (
AWS_MailJSON,
AWS_SNSMessageJSON,
OutgoingHeaders,
EmailForwardingIssues,
EmailHeaderIssues,
)
from .utils import (
_get_bucket_and_key_from_s3_json,
b64_lookup_key,
count_all_trackers,
decrypt_reply_metadata,
derive_reply_keys,
encrypt_reply_metadata,
generate_from_header,
get_domains_from_settings,
get_message_content_from_s3,
get_message_id_bytes,
get_reply_to_address,
histogram_if_enabled,
incr_if_enabled,
remove_message_from_s3,
remove_trackers,
ses_send_raw_email,
urlize_and_linebreaks,
InvalidFromHeader,
parse_email_header,
)
from .sns import verify_from_sns, SUPPORTED_SNS_TYPES
from privaterelay.ftl_bundles import main as ftl_bundle
from privaterelay.utils import flag_is_active_in_task
The provided code snippet includes necessary dependencies for implementing the `reply_requires_premium_test` function. Write a Python function `def reply_requires_premium_test(request)` to solve the following problem:
Demonstrate rendering of the "Reply requires premium" email. Settings like language can be given in the querystring, otherwise settings come from a random free profile.
Here is the function:
def reply_requires_premium_test(request):
"""
Demonstrate rendering of the "Reply requires premium" email.
Settings like language can be given in the querystring, otherwise settings
come from a random free profile.
"""
email_context = {
"sender": "test@example.com",
"forwarded": True,
"SITE_ORIGIN": settings.SITE_ORIGIN,
}
for param in request.GET:
email_context[param] = request.GET.get(param)
if param == "forwarded" and request.GET[param] == "True":
email_context[param] = True
for param in request.GET:
if param == "content-type" and request.GET[param] == "text/plain":
return render(
request,
"emails/reply_requires_premium.txt",
email_context,
"text/plain; charset=utf-8",
)
return render(request, "emails/reply_requires_premium.html", email_context) | Demonstrate rendering of the "Reply requires premium" email. Settings like language can be given in the querystring, otherwise settings come from a random free profile. |
154,473 | from collections import defaultdict
from copy import deepcopy
from datetime import datetime, timezone
from email import message_from_bytes
from email.iterators import _structure
from email.message import EmailMessage
from email.utils import parseaddr
import html
from io import StringIO
import json
from json import JSONDecodeError
import logging
import re
import shlex
from textwrap import dedent
from typing import Any, Literal
from urllib.parse import urlencode
from botocore.exceptions import ClientError
from codetiming import Timer
from decouple import strtobool
from django.shortcuts import render
from sentry_sdk import capture_message
from markus.utils import generate_tag
from waffle import sample_is_active
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from django.db import transaction
from django.db.models import prefetch_related_objects
from django.http import HttpRequest, HttpResponse
from django.template.loader import render_to_string
from django.utils.html import escape
from django.views.decorators.csrf import csrf_exempt
from privaterelay.utils import get_subplat_upgrade_link_by_language, glean_logger
from .models import (
CannotMakeAddressException,
DeletedAddress,
DomainAddress,
Profile,
RelayAddress,
Reply,
address_hash,
get_domain_numerical,
)
from .policy import relay_policy
from .types import (
AWS_MailJSON,
AWS_SNSMessageJSON,
OutgoingHeaders,
EmailForwardingIssues,
EmailHeaderIssues,
)
from .utils import (
_get_bucket_and_key_from_s3_json,
b64_lookup_key,
count_all_trackers,
decrypt_reply_metadata,
derive_reply_keys,
encrypt_reply_metadata,
generate_from_header,
get_domains_from_settings,
get_message_content_from_s3,
get_message_id_bytes,
get_reply_to_address,
histogram_if_enabled,
incr_if_enabled,
remove_message_from_s3,
remove_trackers,
ses_send_raw_email,
urlize_and_linebreaks,
InvalidFromHeader,
parse_email_header,
)
from .sns import verify_from_sns, SUPPORTED_SNS_TYPES
from privaterelay.ftl_bundles import main as ftl_bundle
from privaterelay.utils import flag_is_active_in_task
def wrap_html_email(
original_html: str,
language: str,
has_premium: bool,
display_email: str,
num_level_one_email_trackers_removed: int | None = None,
tracker_report_link: str | None = None,
) -> str:
def first_forwarded_email_test(request: HttpRequest) -> HttpResponse:
# TO DO: Update with correct context when trigger is created
first_forwarded_email_html = render_to_string(
"emails/first_forwarded_email.html",
{
"SITE_ORIGIN": settings.SITE_ORIGIN,
},
)
wrapped_email = wrap_html_email(
first_forwarded_email_html,
"en-us",
True,
"test@example.com",
0,
)
return HttpResponse(wrapped_email) | null |
154,474 | from collections import defaultdict
from copy import deepcopy
from datetime import datetime, timezone
from email import message_from_bytes
from email.iterators import _structure
from email.message import EmailMessage
from email.utils import parseaddr
import html
from io import StringIO
import json
from json import JSONDecodeError
import logging
import re
import shlex
from textwrap import dedent
from typing import Any, Literal
from urllib.parse import urlencode
from botocore.exceptions import ClientError
from codetiming import Timer
from decouple import strtobool
from django.shortcuts import render
from sentry_sdk import capture_message
from markus.utils import generate_tag
from waffle import sample_is_active
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from django.db import transaction
from django.db.models import prefetch_related_objects
from django.http import HttpRequest, HttpResponse
from django.template.loader import render_to_string
from django.utils.html import escape
from django.views.decorators.csrf import csrf_exempt
from privaterelay.utils import get_subplat_upgrade_link_by_language, glean_logger
from .models import (
CannotMakeAddressException,
DeletedAddress,
DomainAddress,
Profile,
RelayAddress,
Reply,
address_hash,
get_domain_numerical,
)
from .policy import relay_policy
from .types import (
AWS_MailJSON,
AWS_SNSMessageJSON,
OutgoingHeaders,
EmailForwardingIssues,
EmailHeaderIssues,
)
from .utils import (
_get_bucket_and_key_from_s3_json,
b64_lookup_key,
count_all_trackers,
decrypt_reply_metadata,
derive_reply_keys,
encrypt_reply_metadata,
generate_from_header,
get_domains_from_settings,
get_message_content_from_s3,
get_message_id_bytes,
get_reply_to_address,
histogram_if_enabled,
incr_if_enabled,
remove_message_from_s3,
remove_trackers,
ses_send_raw_email,
urlize_and_linebreaks,
InvalidFromHeader,
parse_email_header,
)
from .sns import verify_from_sns, SUPPORTED_SNS_TYPES
from privaterelay.ftl_bundles import main as ftl_bundle
from privaterelay.utils import flag_is_active_in_task
def wrap_html_email(
original_html: str,
language: str,
has_premium: bool,
display_email: str,
num_level_one_email_trackers_removed: int | None = None,
tracker_report_link: str | None = None,
) -> str:
"""Add Relay banners, surveys, etc. to an HTML email"""
subplat_upgrade_link = get_subplat_upgrade_link_by_language(language)
email_context = {
"original_html": original_html,
"language": language,
"has_premium": has_premium,
"subplat_upgrade_link": subplat_upgrade_link,
"display_email": display_email,
"tracker_report_link": tracker_report_link,
"num_level_one_email_trackers_removed": num_level_one_email_trackers_removed,
"SITE_ORIGIN": settings.SITE_ORIGIN,
}
content = render_to_string("emails/wrapped_email.html", email_context)
# Remove empty lines
content_lines = [line for line in content.splitlines() if line.strip()]
return "\n".join(content_lines) + "\n"
def strtobool(value: str) -> bool: ...
class Profile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
api_token = models.UUIDField(default=uuid.uuid4)
num_address_deleted = models.PositiveIntegerField(default=0)
date_subscribed = models.DateTimeField(blank=True, null=True)
date_subscribed_phone = models.DateTimeField(blank=True, null=True)
# TODO MPP-2972: delete date_phone_subscription_checked in favor of
# date_phone_subscription_next_reset
date_phone_subscription_checked = models.DateTimeField(blank=True, null=True)
date_phone_subscription_start = models.DateTimeField(blank=True, null=True)
date_phone_subscription_reset = models.DateTimeField(blank=True, null=True)
date_phone_subscription_end = models.DateTimeField(blank=True, null=True)
address_last_deleted = models.DateTimeField(blank=True, null=True, db_index=True)
last_soft_bounce = models.DateTimeField(blank=True, null=True, db_index=True)
last_hard_bounce = models.DateTimeField(blank=True, null=True, db_index=True)
last_account_flagged = models.DateTimeField(blank=True, null=True, db_index=True)
num_deleted_relay_addresses = models.PositiveIntegerField(default=0)
num_deleted_domain_addresses = models.PositiveIntegerField(default=0)
num_email_forwarded_in_deleted_address = models.PositiveIntegerField(default=0)
num_email_blocked_in_deleted_address = models.PositiveIntegerField(default=0)
num_level_one_trackers_blocked_in_deleted_address = models.PositiveIntegerField(
default=0, null=True
)
num_email_replied_in_deleted_address = models.PositiveIntegerField(default=0)
num_email_spam_in_deleted_address = models.PositiveIntegerField(default=0)
subdomain = models.CharField(
blank=True,
null=True,
unique=True,
max_length=63,
db_index=True,
validators=[valid_available_subdomain],
)
# Whether we store the user's alias labels in the server
server_storage = models.BooleanField(default=True)
# Whether we store the caller/sender log for the user's relay number
store_phone_log = models.BooleanField(default=True)
# TODO: Data migration to set null to false
# TODO: Schema migration to remove null=True
remove_level_one_email_trackers = models.BooleanField(null=True, default=False)
onboarding_state = models.PositiveIntegerField(default=0)
onboarding_free_state = models.PositiveIntegerField(default=0)
auto_block_spam = models.BooleanField(default=False)
forwarded_first_reply = models.BooleanField(default=False)
# Empty string means the profile was created through relying party flow
created_by = models.CharField(blank=True, null=True, max_length=63)
sent_welcome_email = models.BooleanField(default=False)
last_engagement = models.DateTimeField(blank=True, null=True, db_index=True)
def __str__(self):
return "%s Profile" % self.user
def save(
self,
force_insert: bool = False,
force_update: bool = False,
using: str | None = None,
update_fields: Iterable[str] | None = None,
) -> None:
# always lower-case the subdomain before saving it
# TODO: change subdomain field as a custom field inheriting from
# CharField to validate constraints on the field update too
if self.subdomain and not self.subdomain.islower():
self.subdomain = self.subdomain.lower()
if update_fields is not None:
update_fields = {"subdomain"}.union(update_fields)
super().save(
force_insert=force_insert,
force_update=force_update,
using=using,
update_fields=update_fields,
)
# any time a profile is saved with server_storage False, delete the
# appropriate server-stored Relay address data.
if not self.server_storage:
relay_addresses = RelayAddress.objects.filter(user=self.user)
relay_addresses.update(description="", generated_for="", used_on="")
domain_addresses = DomainAddress.objects.filter(user=self.user)
domain_addresses.update(description="", used_on="")
if settings.PHONES_ENABLED:
# any time a profile is saved with store_phone_log False, delete the
# appropriate server-stored InboundContact records
from phones.models import InboundContact, RelayNumber
if not self.store_phone_log:
try:
relay_number = RelayNumber.objects.get(user=self.user)
InboundContact.objects.filter(relay_number=relay_number).delete()
except RelayNumber.DoesNotExist:
pass
def language(self):
if self.fxa and self.fxa.extra_data.get("locale"):
for accept_lang, _ in parse_accept_lang_header(
self.fxa.extra_data.get("locale")
):
try:
return get_supported_language_variant(accept_lang)
except LookupError:
continue
return "en"
# This method returns whether the locale associated with the user's Mozilla account
# includes a country code from a Premium country. This is less accurate than using
# get_countries_info_from_request_and_mapping(), which can use a GeoIP lookup, so
# prefer using that if a request context is available. In other contexts, for
# example when sending an email, this method can be useful.
def fxa_locale_in_premium_country(self) -> bool:
if self.fxa and self.fxa.extra_data.get("locale"):
try:
country = guess_country_from_accept_lang(self.fxa.extra_data["locale"])
except AcceptLanguageError:
return False
premium_countries = get_premium_countries()
if country in premium_countries:
return True
return False
def avatar(self) -> str | None:
if fxa := self.fxa:
return str(fxa.extra_data.get("avatar"))
return None
def relay_addresses(self):
return RelayAddress.objects.filter(user=self.user)
def domain_addresses(self):
return DomainAddress.objects.filter(user=self.user)
def total_masks(self) -> int:
ra_count: int = self.relay_addresses.count()
da_count: int = self.domain_addresses.count()
return ra_count + da_count
def at_mask_limit(self) -> bool:
if self.has_premium:
return False
ra_count: int = self.relay_addresses.count()
return ra_count >= settings.MAX_NUM_FREE_ALIASES
def check_bounce_pause(self):
if self.last_hard_bounce:
last_hard_bounce_allowed = datetime.now(timezone.utc) - timedelta(
days=settings.HARD_BOUNCE_ALLOWED_DAYS
)
if self.last_hard_bounce > last_hard_bounce_allowed:
return BounceStatus(True, "hard")
self.last_hard_bounce = None
self.save()
if self.last_soft_bounce:
last_soft_bounce_allowed = datetime.now(timezone.utc) - timedelta(
days=settings.SOFT_BOUNCE_ALLOWED_DAYS
)
if self.last_soft_bounce > last_soft_bounce_allowed:
return BounceStatus(True, "soft")
self.last_soft_bounce = None
self.save()
return BounceStatus(False, "")
def bounce_status(self):
return self.check_bounce_pause()
def next_email_try(self):
bounce_pause, bounce_type = self.check_bounce_pause()
if not bounce_pause:
return datetime.now(timezone.utc)
if bounce_type == "soft":
assert self.last_soft_bounce
return self.last_soft_bounce + timedelta(
days=settings.SOFT_BOUNCE_ALLOWED_DAYS
)
assert bounce_type == "hard"
assert self.last_hard_bounce
return self.last_hard_bounce + timedelta(days=settings.HARD_BOUNCE_ALLOWED_DAYS)
def last_bounce_date(self):
if self.last_hard_bounce:
return self.last_hard_bounce
if self.last_soft_bounce:
return self.last_soft_bounce
return None
def at_max_free_aliases(self) -> bool:
relay_addresses_count: int = self.relay_addresses.count()
return relay_addresses_count >= settings.MAX_NUM_FREE_ALIASES
def fxa(self) -> SocialAccount | None:
# Note: we are NOT using .filter() here because it invalidates
# any profile instances that were queried with prefetch_related, which
# we use in at least the profile view to minimize queries
assert hasattr(self.user, "socialaccount_set")
for sa in self.user.socialaccount_set.all():
if sa.provider == "fxa":
return sa
return None
def display_name(self) -> str | None:
# if display name is not set on FxA the
# displayName key will not exist on the extra_data
if fxa := self.fxa:
name = fxa.extra_data.get("displayName")
return name if name is None else str(name)
return None
def custom_domain(self) -> str:
assert self.subdomain
return f"@{self.subdomain}.{settings.MOZMAIL_DOMAIN}"
def has_premium(self) -> bool:
# FIXME: as we don't have all the tiers defined we are over-defining
# this to mark the user as a premium user as well
if not self.fxa:
return False
for premium_domain in PREMIUM_DOMAINS:
if self.user.email.endswith(f"@{premium_domain}"):
return True
user_subscriptions = self.fxa.extra_data.get("subscriptions", [])
for sub in settings.SUBSCRIPTIONS_WITH_UNLIMITED:
if sub in user_subscriptions:
return True
return False
def has_phone(self) -> bool:
if not self.fxa:
return False
if settings.RELAY_CHANNEL != "prod" and not settings.IN_PYTEST:
if not flag_is_active_in_task("phones", self.user):
return False
if flag_is_active_in_task("free_phones", self.user):
return True
user_subscriptions = self.fxa.extra_data.get("subscriptions", [])
for sub in settings.SUBSCRIPTIONS_WITH_PHONE:
if sub in user_subscriptions:
return True
return False
def has_vpn(self):
if not self.fxa:
return False
user_subscriptions = self.fxa.extra_data.get("subscriptions", [])
for sub in settings.SUBSCRIPTIONS_WITH_VPN:
if sub in user_subscriptions:
return True
return False
def emails_forwarded(self):
return (
sum(ra.num_forwarded for ra in self.relay_addresses)
+ sum(da.num_forwarded for da in self.domain_addresses)
+ self.num_email_forwarded_in_deleted_address
)
def emails_blocked(self):
return (
sum(ra.num_blocked for ra in self.relay_addresses)
+ sum(da.num_blocked for da in self.domain_addresses)
+ self.num_email_blocked_in_deleted_address
)
def emails_replied(self):
# Once Django is on version 4.0 and above, we can set the default=0
# and return a int instead of None
# https://docs.djangoproject.com/en/4.0/ref/models/querysets/#default
totals = [self.relay_addresses.aggregate(models.Sum("num_replied"))]
totals.append(self.domain_addresses.aggregate(models.Sum("num_replied")))
total_num_replied = 0
for num in totals:
total_num_replied += (
num.get("num_replied__sum") if num.get("num_replied__sum") else 0
)
return total_num_replied + self.num_email_replied_in_deleted_address
def level_one_trackers_blocked(self):
return (
sum(ra.num_level_one_trackers_blocked or 0 for ra in self.relay_addresses)
+ sum(
da.num_level_one_trackers_blocked or 0 for da in self.domain_addresses
)
+ (self.num_level_one_trackers_blocked_in_deleted_address or 0)
)
def joined_before_premium_release(self):
date_created = self.user.date_joined
return date_created < datetime.fromisoformat("2021-10-22 17:00:00+00:00")
def date_phone_registered(self) -> datetime | None:
if not settings.PHONES_ENABLED:
return None
try:
real_phone = RealPhone.objects.get(user=self.user)
relay_number = RelayNumber.objects.get(user=self.user)
except RealPhone.DoesNotExist:
return None
except RelayNumber.DoesNotExist:
return real_phone.verified_date
return relay_number.created_at or real_phone.verified_date
def add_subdomain(self, subdomain):
# Handles if the subdomain is "" or None
if not subdomain:
raise CannotMakeSubdomainException(
"error-subdomain-cannot-be-empty-or-null"
)
# subdomain must be all lowercase
subdomain = subdomain.lower()
if not self.has_premium:
raise CannotMakeSubdomainException("error-premium-set-subdomain")
if self.subdomain is not None:
raise CannotMakeSubdomainException("error-premium-cannot-change-subdomain")
self.subdomain = subdomain
# The validator defined in the subdomain field does not get run in full_clean()
# when self.subdomain is "" or None, so we need to run the validator again to
# catch these cases.
valid_available_subdomain(subdomain)
self.full_clean()
self.save()
RegisteredSubdomain.objects.create(subdomain_hash=hash_subdomain(subdomain))
return subdomain
def update_abuse_metric(
self,
address_created=False,
replied=False,
email_forwarded=False,
forwarded_email_size=0,
) -> datetime | None:
# TODO MPP-3720: This should be wrapped in atomic or select_for_update to ensure
# race conditions are properly handled.
# look for abuse metrics created on the same UTC date, regardless of time.
midnight_utc_today = datetime.combine(
datetime.now(timezone.utc).date(), datetime.min.time()
).astimezone(timezone.utc)
midnight_utc_tomorow = midnight_utc_today + timedelta(days=1)
abuse_metric = self.user.abusemetrics_set.filter(
first_recorded__gte=midnight_utc_today,
first_recorded__lt=midnight_utc_tomorow,
).first()
if not abuse_metric:
abuse_metric = AbuseMetrics.objects.create(user=self.user)
AbuseMetrics.objects.filter(first_recorded__lt=midnight_utc_today).delete()
# increment the abuse metric
if address_created:
abuse_metric.num_address_created_per_day += 1
if replied:
abuse_metric.num_replies_per_day += 1
if email_forwarded:
abuse_metric.num_email_forwarded_per_day += 1
if forwarded_email_size > 0:
abuse_metric.forwarded_email_size_per_day += forwarded_email_size
abuse_metric.last_recorded = datetime.now(timezone.utc)
abuse_metric.save()
# check user should be flagged for abuse
hit_max_create = False
hit_max_replies = False
hit_max_forwarded = False
hit_max_forwarded_email_size = False
hit_max_create = (
abuse_metric.num_address_created_per_day
>= settings.MAX_ADDRESS_CREATION_PER_DAY
)
hit_max_replies = (
abuse_metric.num_replies_per_day >= settings.MAX_REPLIES_PER_DAY
)
hit_max_forwarded = (
abuse_metric.num_email_forwarded_per_day >= settings.MAX_FORWARDED_PER_DAY
)
hit_max_forwarded_email_size = (
abuse_metric.forwarded_email_size_per_day
>= settings.MAX_FORWARDED_EMAIL_SIZE_PER_DAY
)
if (
hit_max_create
or hit_max_replies
or hit_max_forwarded
or hit_max_forwarded_email_size
):
self.last_account_flagged = datetime.now(timezone.utc)
self.save()
data = {
"uid": self.fxa.uid if self.fxa else None,
"flagged": self.last_account_flagged.timestamp(),
"replies": abuse_metric.num_replies_per_day,
"addresses": abuse_metric.num_address_created_per_day,
"forwarded": abuse_metric.num_email_forwarded_per_day,
"forwarded_size_in_bytes": abuse_metric.forwarded_email_size_per_day,
}
# log for further secops review
abuse_logger.info("Abuse flagged", extra=data)
return self.last_account_flagged
def is_flagged(self):
if not self.last_account_flagged:
return False
account_premium_feature_resumed = self.last_account_flagged + timedelta(
days=settings.PREMIUM_FEATURE_PAUSED_DAYS
)
if datetime.now(timezone.utc) > account_premium_feature_resumed:
# premium feature has been resumed
return False
# user was flagged and the premium feature pause period is not yet over
return True
def metrics_enabled(self) -> bool:
"""
Does the user allow us to record technical and interaction data?
This is based on the Mozilla accounts opt-out option, added around 2022. A user
can go to their Mozilla account profile settings, Data Collection and Use, and
deselect "Help improve Mozilla Account". This setting defaults to On, and is
sent as "metricsEnabled". Some older Relay accounts do not have
"metricsEnabled", and we default to On.
"""
if self.fxa:
return bool(self.fxa.extra_data.get("metricsEnabled", True))
return True
def plan(self) -> Literal["free", "email", "phone", "bundle"]:
"""The user's Relay plan as a string."""
if self.has_premium:
if self.has_phone:
return "bundle" if self.has_vpn else "phone"
else:
return "email"
else:
return "free"
def plan_term(self) -> Literal[None, "unknown", "1_month", "1_year"]:
"""The user's Relay plan term as a string."""
plan = self.plan
if plan == "free":
return None
if plan == "phone":
start_date = self.date_phone_subscription_start
end_date = self.date_phone_subscription_end
if start_date and end_date:
span = end_date - start_date
return "1_year" if span.days > 32 else "1_month"
return "unknown"
def metrics_premium_status(self) -> str:
plan = self.plan
if plan == "free":
return "free"
return f"{plan}_{self.plan_term}"
The provided code snippet includes necessary dependencies for implementing the `wrapped_email_test` function. Write a Python function `def wrapped_email_test(request: HttpRequest) -> HttpResponse` to solve the following problem:
Demonstrate rendering of forwarded HTML emails. Settings like language can be given in the querystring, otherwise settings come from a randomly chosen profile.
Here is the function:
def wrapped_email_test(request: HttpRequest) -> HttpResponse:
"""
Demonstrate rendering of forwarded HTML emails.
Settings like language can be given in the querystring, otherwise settings
come from a randomly chosen profile.
"""
if all(key in request.GET for key in ("language", "has_premium")):
user_profile = None
else:
user_profile = Profile.objects.order_by("?").first()
if "language" in request.GET:
language = request.GET["language"]
else:
assert user_profile is not None
language = user_profile.language
if "has_premium" in request.GET:
has_premium = strtobool(request.GET["has_premium"])
else:
assert user_profile is not None
has_premium = user_profile.has_premium
if "num_level_one_email_trackers_removed" in request.GET:
num_level_one_email_trackers_removed = int(
request.GET["num_level_one_email_trackers_removed"]
)
else:
num_level_one_email_trackers_removed = 0
if "has_tracker_report_link" in request.GET:
has_tracker_report_link = strtobool(request.GET["has_tracker_report_link"])
else:
has_tracker_report_link = False
if has_tracker_report_link:
if num_level_one_email_trackers_removed:
trackers = {
"fake-tracker.example.com": num_level_one_email_trackers_removed
}
else:
trackers = {}
tracker_report_link = (
"/tracker-report/#{"
'"sender": "sender@example.com", '
'"received_at": 1658434657, '
f'"trackers": { json.dumps(trackers) }'
"}"
)
else:
tracker_report_link = ""
path = "/emails/wrapped_email_test"
old_query = {
"language": language,
"has_premium": "Yes" if has_premium else "No",
"has_tracker_report_link": "Yes" if has_tracker_report_link else "No",
"num_level_one_email_trackers_removed": str(
num_level_one_email_trackers_removed
),
}
def switch_link(key, value):
if old_query[key] == value:
return str(value)
new_query = old_query.copy()
new_query[key] = value
return f'<a href="{path}?{urlencode(new_query)}">{value}</a>'
html_content = dedent(
f"""\
<p>
<strong>Email rendering Test</strong>
</p>
<p>Settings: (<a href="{path}">clear all</a>)</p>
<ul>
<li>
<strong>language</strong>:
{escape(language)}
(switch to
{switch_link("language", "en-us")},
{switch_link("language", "de")},
{switch_link("language", "en-gb")},
{switch_link("language", "fr")},
{switch_link("language", "ru-ru")},
{switch_link("language", "es-es")},
{switch_link("language", "pt-br")},
{switch_link("language", "it-it")},
{switch_link("language", "en-ca")},
{switch_link("language", "de-de")},
{switch_link("language", "es-mx")})
</li>
<li>
<strong>has_premium</strong>:
{"Yes" if has_premium else "No"}
(switch to
{switch_link("has_premium", "Yes")},
{switch_link("has_premium", "No")})
</li>
<li>
<strong>has_tracker_report_link</strong>:
{"Yes" if has_tracker_report_link else "No"}
(switch to
{switch_link("has_tracker_report_link", "Yes")},
{switch_link("has_tracker_report_link", "No")})
</li>
<li>
<strong>num_level_one_email_trackers_removed</strong>:
{num_level_one_email_trackers_removed}
(switch to
{switch_link("num_level_one_email_trackers_removed", "0")},
{switch_link("num_level_one_email_trackers_removed", "1")},
{switch_link("num_level_one_email_trackers_removed", "2")})
</li>
</ul>
"""
)
wrapped_email = wrap_html_email(
original_html=html_content,
language=language,
has_premium=has_premium,
tracker_report_link=tracker_report_link,
display_email="test@relay.firefox.com",
num_level_one_email_trackers_removed=num_level_one_email_trackers_removed,
)
return HttpResponse(wrapped_email) | Demonstrate rendering of forwarded HTML emails. Settings like language can be given in the querystring, otherwise settings come from a randomly chosen profile. |
154,475 | from collections import defaultdict
from copy import deepcopy
from datetime import datetime, timezone
from email import message_from_bytes
from email.iterators import _structure
from email.message import EmailMessage
from email.utils import parseaddr
import html
from io import StringIO
import json
from json import JSONDecodeError
import logging
import re
import shlex
from textwrap import dedent
from typing import Any, Literal
from urllib.parse import urlencode
from botocore.exceptions import ClientError
from codetiming import Timer
from decouple import strtobool
from django.shortcuts import render
from sentry_sdk import capture_message
from markus.utils import generate_tag
from waffle import sample_is_active
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from django.db import transaction
from django.db.models import prefetch_related_objects
from django.http import HttpRequest, HttpResponse
from django.template.loader import render_to_string
from django.utils.html import escape
from django.views.decorators.csrf import csrf_exempt
from privaterelay.utils import get_subplat_upgrade_link_by_language, glean_logger
from .models import (
CannotMakeAddressException,
DeletedAddress,
DomainAddress,
Profile,
RelayAddress,
Reply,
address_hash,
get_domain_numerical,
)
from .policy import relay_policy
from .types import (
AWS_MailJSON,
AWS_SNSMessageJSON,
OutgoingHeaders,
EmailForwardingIssues,
EmailHeaderIssues,
)
from .utils import (
_get_bucket_and_key_from_s3_json,
b64_lookup_key,
count_all_trackers,
decrypt_reply_metadata,
derive_reply_keys,
encrypt_reply_metadata,
generate_from_header,
get_domains_from_settings,
get_message_content_from_s3,
get_message_id_bytes,
get_reply_to_address,
histogram_if_enabled,
incr_if_enabled,
remove_message_from_s3,
remove_trackers,
ses_send_raw_email,
urlize_and_linebreaks,
InvalidFromHeader,
parse_email_header,
)
from .sns import verify_from_sns, SUPPORTED_SNS_TYPES
from privaterelay.ftl_bundles import main as ftl_bundle
from privaterelay.utils import flag_is_active_in_task
logger = logging.getLogger("events")
def validate_sns_arn_and_type(
topic_arn: str | None, message_type: str | None
) -> dict[str, Any] | None:
"""
Validate Topic ARN and SNS Message Type.
If an error is detected, the return is a dictionary of error details.
If no error is detected, the return is None.
"""
if not topic_arn:
error = "Received SNS request without Topic ARN."
elif topic_arn not in settings.AWS_SNS_TOPIC:
error = "Received SNS message for wrong topic."
elif not message_type:
error = "Received SNS request without Message Type."
elif message_type not in SUPPORTED_SNS_TYPES:
error = "Received SNS message for unsupported Type."
else:
error = None
if error:
return {
"error": error,
"received_topic_arn": shlex.quote(topic_arn) if topic_arn else topic_arn,
"supported_topic_arn": sorted(settings.AWS_SNS_TOPIC),
"received_sns_type": (
shlex.quote(message_type) if message_type else message_type
),
"supported_sns_types": SUPPORTED_SNS_TYPES,
}
return None
def _sns_inbound_logic(topic_arn, message_type, json_body):
if message_type == "SubscriptionConfirmation":
info_logger.info(
"SNS SubscriptionConfirmation",
extra={"SubscribeURL": json_body["SubscribeURL"]},
)
return HttpResponse("Logged SubscribeURL", status=200)
if message_type == "Notification":
incr_if_enabled("sns_inbound_Notification", 1)
return _sns_notification(json_body)
logger.error(
"SNS message type did not fall under the SNS inbound logic",
extra={"message_type": shlex.quote(message_type)},
)
capture_message(
"Received SNS message with type not handled in inbound log",
level="error",
stack=True,
)
return HttpResponse(
"Received SNS message with type not handled in inbound log", status=400
)
def incr_if_enabled(name, value=1, tags=None):
if settings.STATSD_ENABLED:
metrics.incr(name, value, tags)
def verify_from_sns(json_body):
pemfile = _grab_keyfile(json_body["SigningCertURL"])
cert = crypto.load_certificate(crypto.FILETYPE_PEM, pemfile)
signature = base64.decodebytes(json_body["Signature"].encode("utf-8"))
hash_format = _get_hash_format(json_body)
crypto.verify(
cert, signature, hash_format.format(**json_body).encode("utf-8"), "sha1"
)
return json_body
def sns_inbound(request):
incr_if_enabled("sns_inbound", 1)
# First thing we do is verify the signature
json_body = json.loads(request.body)
verified_json_body = verify_from_sns(json_body)
# Validate ARN and message type
topic_arn = verified_json_body.get("TopicArn", None)
message_type = verified_json_body.get("Type", None)
error_details = validate_sns_arn_and_type(topic_arn, message_type)
if error_details:
logger.error("validate_sns_arn_and_type_error", extra=error_details)
return HttpResponse(error_details["error"], status=400)
return _sns_inbound_logic(topic_arn, message_type, verified_json_body) | null |
154,476 | from __future__ import annotations
from decimal import Decimal
from functools import cache, wraps
from pathlib import Path
from string import ascii_uppercase
from typing import Callable, TypedDict, cast, TYPE_CHECKING
import json
import logging
import random
from django.conf import settings
from django.contrib.auth.models import AbstractBaseUser
from django.http import Http404, HttpRequest
from django.utils.translation.trans_real import parse_accept_lang_header
from waffle import get_waffle_flag_model
from waffle.models import logger as waffle_logger
from waffle.utils import (
get_cache as get_waffle_cache,
get_setting as get_waffle_setting,
)
from .plans import (
LanguageStr,
PeriodStr,
PlanCountryLangMapping,
CountryStr,
get_premium_country_language_mapping,
)
class CountryInfo(TypedDict):
country_code: str
countries: list[CountryStr]
available_in_country: bool
plan_country_lang_mapping: PlanCountryLangMapping
def _get_cc_from_request(request: HttpRequest) -> str:
"""Determine the user's region / country code."""
log_data: dict[str, str] = {}
cdn_region = None
region = None
if "X-Client-Region" in request.headers:
cdn_region = region = request.headers["X-Client-Region"].upper()
log_data["cdn_region"] = cdn_region
log_data["region_method"] = "cdn"
accept_language_region = None
if "Accept-Language" in request.headers:
log_data["accept_lang"] = request.headers["Accept-Language"]
accept_language_region = _get_cc_from_lang(request.headers["Accept-Language"])
log_data["accept_lang_region"] = accept_language_region
if region is None:
region = accept_language_region
log_data["region_method"] = "accept_lang"
if region is None:
region = "US"
log_data["region_method"] = "fallback"
log_data["region"] = region
# MPP-3284: Log details of region selection. Only log once per request, since some
# endpoints, like /api/v1/runtime_data, call this multiple times.
if not getattr(request, "_logged_region_details", False):
setattr(request, "_logged_region_details", True)
info_logger.info("region_details", extra=log_data)
return region
PlanCountryLangMapping = dict[CountryStr, PricePeriodsForLanguageDict]
def get_countries_info_from_request_and_mapping(
request: HttpRequest, mapping: PlanCountryLangMapping
) -> CountryInfo:
country_code = _get_cc_from_request(request)
countries = sorted(mapping.keys())
available_in_country = country_code in countries
return {
"country_code": country_code,
"countries": countries,
"available_in_country": available_in_country,
"plan_country_lang_mapping": mapping,
} | null |
154,477 | from __future__ import annotations
from decimal import Decimal
from functools import cache, wraps
from pathlib import Path
from string import ascii_uppercase
from typing import Callable, TypedDict, cast, TYPE_CHECKING
import json
import logging
import random
from django.conf import settings
from django.contrib.auth.models import AbstractBaseUser
from django.http import Http404, HttpRequest
from django.utils.translation.trans_real import parse_accept_lang_header
from waffle import get_waffle_flag_model
from waffle.models import logger as waffle_logger
from waffle.utils import (
get_cache as get_waffle_cache,
get_setting as get_waffle_setting,
)
from .plans import (
LanguageStr,
PeriodStr,
PlanCountryLangMapping,
CountryStr,
get_premium_country_language_mapping,
)
def enable_or_404(
check_function: Callable[[], bool],
message: str = "This conditional view is disabled.",
):
"""
Returns decorator that enables a view if a check function passes,
otherwise returns a 404.
Usage:
def percent_1():
import random
return random.randint(1, 100) == 1
def lucky_view(request):
# 1 in 100 chance of getting here
# 99 in 100 chance of 404
"""
def decorator(func):
def inner(*args, **kwargs):
if check_function():
return func(*args, **kwargs)
else:
raise Http404(message) # Display a message with DEBUG=True
return inner
return decorator
The provided code snippet includes necessary dependencies for implementing the `enable_if_setting` function. Write a Python function `def enable_if_setting( setting_name: str, message_fmt: str = "This view is disabled because {setting_name} is False", )` to solve the following problem:
Returns decorator that enables a view if a setting is truthy, otherwise returns a 404. Usage: @enable_if_setting("DEBUG") def debug_only_view(request): # DEBUG == True Or in URLS: path( "developer_info", enable_if_setting("DEBUG")(debug_only_view) ), name="developer-info", ),
Here is the function:
def enable_if_setting(
setting_name: str,
message_fmt: str = "This view is disabled because {setting_name} is False",
):
"""
Returns decorator that enables a view if a setting is truthy, otherwise
returns a 404.
Usage:
@enable_if_setting("DEBUG")
def debug_only_view(request):
# DEBUG == True
Or in URLS:
path(
"developer_info",
enable_if_setting("DEBUG")(debug_only_view)
),
name="developer-info",
),
"""
def setting_is_truthy() -> bool:
return bool(getattr(settings, setting_name))
return enable_or_404(
setting_is_truthy, message_fmt.format(setting_name=setting_name)
) | Returns decorator that enables a view if a setting is truthy, otherwise returns a 404. Usage: @enable_if_setting("DEBUG") def debug_only_view(request): # DEBUG == True Or in URLS: path( "developer_info", enable_if_setting("DEBUG")(debug_only_view) ), name="developer-info", ), |
154,478 | import base64
import json
from pathlib import Path
from typing import Any
import requests
import os
from django.apps import AppConfig
from django.conf import settings
from django.utils.functional import cached_property
def get_version_info(base_dir: str | Path | None = None) -> VersionInfo:
"""Return version information written by build process."""
if base_dir is None:
base_path = Path(settings.BASE_DIR)
else:
base_path = Path(base_dir)
version_json_path = base_path / "version.json"
info = {}
if version_json_path.exists():
with version_json_path.open() as version_file:
try:
info = json.load(version_file)
except ValueError:
pass
if not hasattr(info, "get"):
info = {}
version_info = VersionInfo(
source=info.get("source", "https://github.com/mozilla/fx-private-relay"),
version=info.get("version", "unknown"),
commit=info.get("commit", "unknown"),
build=info.get("build", "not built"),
)
return version_info
def get_profiler_startup_data() -> tuple[str | None, str | None]:
from .utils import get_version_info
if settings.RELAY_CHANNEL not in ("dev", "stage", "prod"):
return (None, None)
if settings.RELAY_CHANNEL in ("dev", "stage"):
service = f"fxprivaterelay-{settings.RELAY_CHANNEL}"
if settings.RELAY_CHANNEL == "prod":
service = "fxprivaterelay"
version_info = get_version_info()
version = version_info.get("version", "unknown")
return service, version | null |
154,479 | import base64
import json
from pathlib import Path
from typing import Any
import requests
import os
from django.apps import AppConfig
from django.conf import settings
from django.utils.functional import cached_property
def write_gcp_key_json_file(gcp_key_json_path: Path) -> None:
# create the gcp key json file from contents of GOOGLE_CLOUD_PROFILER_CREDENTIALS_B64
google_app_creds = base64.b64decode(settings.GOOGLE_CLOUD_PROFILER_CREDENTIALS_B64)
if not google_app_creds == b"":
with open(gcp_key_json_path, "w+") as gcp_key_file:
gcp_key_file.write(google_app_creds.decode("utf-8")) | null |
154,480 | from __future__ import annotations
from datetime import datetime
from logging import getLogger
from typing import Any, Literal, NamedTuple
from django.conf import settings
from django.contrib.auth.models import User
from django.http import HttpRequest
from ipware import get_client_ip
from emails.models import DomainAddress, RelayAddress
from .glean.server_events import EventsServerEventLogger, GLEAN_EVENT_MOZLOG_TYPE
from .types import RELAY_CHANNEL_NAME
The provided code snippet includes necessary dependencies for implementing the `_opt_dt_to_glean` function. Write a Python function `def _opt_dt_to_glean(value: datetime | None) -> int` to solve the following problem:
Convert an optional datetime to an integer timestamp.
Here is the function:
def _opt_dt_to_glean(value: datetime | None) -> int:
"""Convert an optional datetime to an integer timestamp."""
return -1 if value is None else int(value.timestamp()) | Convert an optional datetime to an integer timestamp. |
154,481 | from __future__ import annotations
from datetime import datetime
from logging import getLogger
from typing import Any, Literal, NamedTuple
from django.conf import settings
from django.contrib.auth.models import User
from django.http import HttpRequest
from ipware import get_client_ip
from emails.models import DomainAddress, RelayAddress
from .glean.server_events import EventsServerEventLogger, GLEAN_EVENT_MOZLOG_TYPE
from .types import RELAY_CHANNEL_NAME
The provided code snippet includes necessary dependencies for implementing the `_opt_str_to_glean` function. Write a Python function `def _opt_str_to_glean(value: str | None) -> str` to solve the following problem:
Convert an optional string to a (possibly empty) string.
Here is the function:
def _opt_str_to_glean(value: str | None) -> str:
"""Convert an optional string to a (possibly empty) string."""
return "" if value is None else value | Convert an optional string to a (possibly empty) string. |
154,482 | from copy import deepcopy
from functools import lru_cache
from typing import get_args, Literal, TypedDict
from django.conf import settings
CountryStr = Literal[
"AT", # Austria
"BE", # Belgium
"BG", # Bulgaria
"CA", # Canada
"CH", # Switzerland
"CY", # Cyprus
"CZ", # Czech Republic / Czechia
"DE", # Germany
"DK", # Denmark
"EE", # Estonia
"ES", # Spain
"FI", # Finland
"FR", # France
"GB", # United Kingdom
"GR", # Greece
"HR", # Croatia
"HU", # Hungary
"IE", # Ireland
"IT", # Italy
"LT", # Lituania
"LU", # Luxembourg
"LV", # Latvia
"MT", # Malta
"MY", # Malaysia
"NL", # Netherlands
"NZ", # New Zealand
"PL", # Poland
"PT", # Portugal
"RO", # Romania
"SE", # Sweden
"SG", # Singapore
"SI", # Slovenia
"SK", # Slovakia
"US", # United States
]
def get_premium_country_language_mapping() -> PlanCountryLangMapping:
"""Get mapping for premium countries (unlimited masks, custom subdomain)"""
return _country_language_mapping("premium")
The provided code snippet includes necessary dependencies for implementing the `get_premium_countries` function. Write a Python function `def get_premium_countries() -> set[CountryStr]` to solve the following problem:
Get the country codes where Relay premium can be sold
Here is the function:
def get_premium_countries() -> set[CountryStr]:
"""Get the country codes where Relay premium can be sold"""
mapping = get_premium_country_language_mapping()
return set(mapping.keys()) | Get the country codes where Relay premium can be sold |
154,483 | from copy import deepcopy
from functools import lru_cache
from typing import get_args, Literal, TypedDict
from django.conf import settings
PlanCountryLangMapping = dict[CountryStr, PricePeriodsForLanguageDict]
def _country_language_mapping(plan: _RelayPlanCategory) -> PlanCountryLangMapping:
"""Get plan mapping with cache parameters"""
return _cached_country_language_mapping(
plan=plan,
us_premium_monthly_price_id=settings.PREMIUM_PLAN_ID_US_MONTHLY,
us_premium_yearly_price_id=settings.PREMIUM_PLAN_ID_US_YEARLY,
us_phone_monthly_price_id=settings.PHONE_PLAN_ID_US_MONTHLY,
us_phone_yearly_price_id=settings.PHONE_PLAN_ID_US_YEARLY,
us_bundle_yearly_price_id=settings.BUNDLE_PLAN_ID_US,
)
The provided code snippet includes necessary dependencies for implementing the `get_phone_country_language_mapping` function. Write a Python function `def get_phone_country_language_mapping() -> PlanCountryLangMapping` to solve the following problem:
Get mapping for phone countries (premium + phone mask)
Here is the function:
def get_phone_country_language_mapping() -> PlanCountryLangMapping:
"""Get mapping for phone countries (premium + phone mask)"""
return _country_language_mapping("phones") | Get mapping for phone countries (premium + phone mask) |
154,484 | from django.db import migrations
def add_flag(apps, schema_editor):
Flag = apps.get_model("waffle", "Flag")
Flag.objects.get_or_create(
name="new_from_address",
defaults={"note": "MPP-1813: Try different reply address"},
) | null |
154,485 | from django.db import migrations
def remove_flag(apps, schema_editor):
Flag = apps.get_model("waffle", "Flag")
Flag.objects.filter(name="new_from_address").delete() | null |
154,486 | from django.db import migrations
The provided code snippet includes necessary dependencies for implementing the `add_db_default_forward_func` function. Write a Python function `def add_db_default_forward_func(apps, schema_editor)` to solve the following problem:
Add database defaults for new fields provider_id and settings in table socialapp. This field was added by 0004_app_provider_id_settings. https://github.com/pennersr/django-allauth/blob/32c9eaf2d70cfae4f52f8e51b0ac4cd1523c5915/allauth/socialaccount/migrations/0004_app_provider_id_settings.py The database default is used by our migrations tests. In practice, it is unlikely and unwise to add a new SocialApp in the middle of a deployment. `./manage.py sqlmigrate` did not work, so the sqlite3 steps are manual.
Here is the function:
def add_db_default_forward_func(apps, schema_editor):
"""
Add database defaults for new fields provider_id and settings in table socialapp.
This field was added by 0004_app_provider_id_settings.
https://github.com/pennersr/django-allauth/blob/32c9eaf2d70cfae4f52f8e51b0ac4cd1523c5915/allauth/socialaccount/migrations/0004_app_provider_id_settings.py
The database default is used by our migrations tests. In practice, it is unlikely
and unwise to add a new SocialApp in the middle of a deployment.
`./manage.py sqlmigrate` did not work, so the sqlite3 steps are manual.
"""
if schema_editor.connection.vendor.startswith("postgres"):
schema_editor.execute(
"""
ALTER TABLE "socialaccount_socialapp"
ALTER COLUMN "provider_id" SET DEFAULT '';
"""
)
schema_editor.execute(
"""
ALTER TABLE "socialaccount_socialapp"
ALTER COLUMN "settings" SET DEFAULT '{}';
"""
)
elif schema_editor.connection.vendor.startswith("sqlite"):
schema_editor.execute(
"""
CREATE TABLE "new__socialaccount_socialapp" (
"id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,
"provider" varchar(30) NOT NULL,
"name" varchar(40) NOT NULL,
"client_id" varchar(191) NOT NULL,
"secret" varchar(191) NOT NULL,
"key" varchar(191) NOT NULL,
"provider_id" varchar(200) NOT NULL DEFAULT '', -- Add default
"settings" text NOT NULL
DEFAULT '{}'
CHECK ((JSON_VALID("settings") OR "settings" IS NULL))
);
"""
)
schema_editor.execute(
"""
INSERT INTO "new__socialaccount_socialapp"
("id", "provider", "name", "client_id", "secret", "key", "provider_id",
"settings")
SELECT
"id", "provider", "name", "client_id", "secret", "key", "provider_id",
"settings"
FROM "socialaccount_socialapp";
"""
)
schema_editor.execute('DROP TABLE "socialaccount_socialapp";')
schema_editor.execute(
"""
ALTER TABLE "new__socialaccount_socialapp"
RENAME TO "socialaccount_socialapp";
"""
)
else:
raise Exception(f'Unknown database vendor "{schema_editor.connection.vendor}"') | Add database defaults for new fields provider_id and settings in table socialapp. This field was added by 0004_app_provider_id_settings. https://github.com/pennersr/django-allauth/blob/32c9eaf2d70cfae4f52f8e51b0ac4cd1523c5915/allauth/socialaccount/migrations/0004_app_provider_id_settings.py The database default is used by our migrations tests. In practice, it is unlikely and unwise to add a new SocialApp in the middle of a deployment. `./manage.py sqlmigrate` did not work, so the sqlite3 steps are manual. |
154,487 | from django.apps.registry import Apps
from django.db import migrations
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
INDEX_NAME = "account_emailaddress_email_upper"
def add_account_email_index(
apps: Apps, schema_editor: BaseDatabaseSchemaEditor
) -> None:
if schema_editor.connection.vendor.startswith("postgres"):
engine = "postgres"
elif schema_editor.connection.vendor.startswith("sqlite"):
engine = "sqlite"
else:
raise Exception(f'Unknown database vendor "{schema_editor.connection.vendor}"')
if_not_exists = "IF NOT EXISTS" if engine == "postgres" else ""
schema_editor.execute(
f"CREATE INDEX {if_not_exists} {INDEX_NAME} ON account_emailaddress"
" (upper(email));"
) | null |
154,488 | from django.apps.registry import Apps
from django.db import migrations
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
INDEX_NAME = "account_emailaddress_email_upper"
def drop_account_email_index(
apps: Apps, schema_editor: BaseDatabaseSchemaEditor
) -> None:
schema_editor.execute(f"DROP INDEX IF EXISTS {INDEX_NAME};") | null |
154,489 | from django.apps.registry import Apps
from django.db import migrations
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
def add_index(
schema_editor: BaseDatabaseSchemaEditor, index_name: str, table_name: str
) -> None:
if schema_editor.connection.vendor.startswith("postgres"):
engine = "postgres"
elif schema_editor.connection.vendor.startswith("sqlite"):
engine = "sqlite"
else:
raise Exception(f'Unknown database vendor "{schema_editor.connection.vendor}"')
if_not_exists = "IF NOT EXISTS" if engine == "postgres" else ""
schema_editor.execute(
f"CREATE INDEX {if_not_exists} {index_name} ON {table_name} (upper(email));"
)
def add_account_email_index(
apps: Apps, schema_editor: BaseDatabaseSchemaEditor
) -> None:
add_index(schema_editor, "account_emailaddress_email_upper", "account_emailaddress") | null |
154,490 | from django.apps.registry import Apps
from django.db import migrations
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
def drop_index(schema_editor: BaseDatabaseSchemaEditor, index_name: str) -> None:
schema_editor.execute(f"DROP INDEX {index_name};")
def drop_account_email_index(
apps: Apps, schema_editor: BaseDatabaseSchemaEditor
) -> None:
drop_index(schema_editor, "account_emailaddress_email_upper") | null |
154,491 | from django.apps.registry import Apps
from django.db import migrations
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
def add_index(
schema_editor: BaseDatabaseSchemaEditor, index_name: str, table_name: str
) -> None:
if schema_editor.connection.vendor.startswith("postgres"):
engine = "postgres"
elif schema_editor.connection.vendor.startswith("sqlite"):
engine = "sqlite"
else:
raise Exception(f'Unknown database vendor "{schema_editor.connection.vendor}"')
if_not_exists = "IF NOT EXISTS" if engine == "postgres" else ""
schema_editor.execute(
f"CREATE INDEX {if_not_exists} {index_name} ON {table_name} (upper(email));"
)
def add_auth_email_index(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None:
add_index(schema_editor, "auth_user_email_upper", "auth_user") | null |
154,492 | from django.apps.registry import Apps
from django.db import migrations
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
def drop_index(schema_editor: BaseDatabaseSchemaEditor, index_name: str) -> None:
def drop_auth_email_index(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None:
drop_index(schema_editor, "auth_user_email_upper") | null |
154,493 | from datetime import datetime, timezone
import time
import markus
from django.conf import settings
from django.shortcuts import redirect
from whitenoise.middleware import WhiteNoiseMiddleware
def _get_metric_view_name(request):
if request.resolver_match:
view = request.resolver_match.func
return f"{view.__module__}.{view.__name__}"
return "<unknown_view>" | null |
154,494 | from django.dispatch import receiver
from allauth.account.signals import user_signed_up, user_logged_in
from emails.utils import incr_if_enabled
def incr_if_enabled(name, value=1, tags=None):
if settings.STATSD_ENABLED:
metrics.incr(name, value, tags)
def record_user_signed_up(request, user, **kwargs):
incr_if_enabled("user_signed_up", 1)
# the user_signed_up signal doesn't have access to the response object
# so we have to set a user_created session var for user_logged_in receiver
request.session["user_created"] = True
request.session.modified = True | null |
154,495 | from django.dispatch import receiver
from allauth.account.signals import user_signed_up, user_logged_in
from emails.utils import incr_if_enabled
def incr_if_enabled(name, value=1, tags=None):
if settings.STATSD_ENABLED:
metrics.incr(name, value, tags)
def record_user_logged_in(request, user, **kwargs):
incr_if_enabled("user_logged_in", 1)
response = kwargs.get("response")
event = "user_logged_in"
# the user_signed_up signal doesn't have access to the response object
# so we have to check for user_created session var from user_signed_up
if request.session.get("user_created", False):
event = "user_signed_up"
if response:
response.set_cookie(f"server_ga_event:{event}", event, max_age=5) | null |
154,496 | from datetime import datetime, timedelta, timezone
from django.conf import settings
from django.core.management.base import BaseCommand, CommandParser
from privaterelay.fxa_utils import get_phone_subscription_dates
from privaterelay.management.utils import (
get_free_phone_social_accounts,
get_phone_subscriber_social_accounts,
)
import logging
logger = logging.getLogger("events")
def get_phone_subscription_dates(social_account):
subscription_data = get_subscription_data_from_fxa(social_account)
if "refreshed" in subscription_data.keys():
# user token refreshed for expanded scope
social_account.refresh_from_db()
# retry getting detailed subscription data
subscription_data = get_subscription_data_from_fxa(social_account)
if "refreshed" in subscription_data.keys():
return None, None, None
if "subscriptions" not in subscription_data.keys():
# failed to get subscriptions data which may mean user never had subscription
# and/or there is data mismatch with FxA
if not flag_is_active_in_task("free_phones", social_account.user):
# User who was flagged for having phone subscriptions
# did not actually have phone subscriptions
logger.error(
"accounts_subscription_endpoint_failed",
extra={"fxa_message": subscription_data.get("message", "")},
)
return None, None, None
date_subscribed_phone = start_date = end_date = None
product_w_phone_capabilites = [settings.PHONE_PROD_ID, settings.BUNDLE_PROD_ID]
for sub in subscription_data.get("subscriptions", []):
# Even if a user upgrade subscription e.g. from monthly to yearly
# or from phone to VPN bundle use the last subscription subscription dates
# Later, when the subscription details only show one valid subsription
# this information can be updated
subscription_created_timestamp = None
subscription_start_timestamp = None
subscription_end_timestamp = None
if sub.get("product_id") in product_w_phone_capabilites:
subscription_created_timestamp = sub.get("created")
subscription_start_timestamp = sub.get("current_period_start")
subscription_end_timestamp = sub.get("current_period_end")
else:
# not a product id for phone subscription, continue
continue
subscription_date_none = (
subscription_created_timestamp
and subscription_start_timestamp
and subscription_end_timestamp
) is None
if subscription_date_none:
# subscription dates are required fields according to FxA documentation:
# https://mozilla.github.io/ecosystem-platform/api#tag/Subscriptions/operation/getOauthMozillasubscriptionsCustomerBillingandsubscriptions
logger.error(
"accounts_subscription_subscription_date_invalid",
extra={"subscription": sub},
)
return None, None, None
date_subscribed_phone = datetime.fromtimestamp(
subscription_created_timestamp, tz=timezone.utc
)
start_date = datetime.fromtimestamp(
subscription_start_timestamp, tz=timezone.utc
)
end_date = datetime.fromtimestamp(subscription_end_timestamp, tz=timezone.utc)
return date_subscribed_phone, start_date, end_date
def get_free_phone_social_accounts() -> set[SocialAccount]:
free_phones_flag = Flag.objects.filter(name="free_phones").first()
if free_phones_flag is None:
return set()
free_phones_sa = set(
SocialAccount.objects.filter(user__in=free_phones_flag.users.all())
)
for group in free_phones_flag.groups.all():
free_phones_sa.update(SocialAccount.objects.filter(user__in=group.users.all()))
return free_phones_sa
def get_phone_subscriber_social_accounts() -> set[SocialAccount]:
phone_subscribers_sa = set()
for sub_with_phone in settings.SUBSCRIPTIONS_WITH_PHONE:
social_accounts = SocialAccount.objects.filter(
extra_data__icontains=sub_with_phone
)
phone_subscribers_sa.update(social_accounts)
return phone_subscribers_sa
def sync_phone_related_dates_on_profile(group: str) -> int:
social_accounts_with_phones = get_phone_subscriber_social_accounts()
free_phones_social_accounts = get_free_phone_social_accounts()
if group == "free":
social_accounts_with_phones = free_phones_social_accounts
if group == "both":
social_accounts_with_phones.update(free_phones_social_accounts)
if not settings.PHONES_ENABLED or len(social_accounts_with_phones) == 0:
return 0
num_updated_accounts = 0
datetime_now = datetime.now(timezone.utc)
for social_account in social_accounts_with_phones:
date_subscribed_phone, start_date, end_date = get_phone_subscription_dates(
social_account
)
profile = social_account.user.profile
if (date_subscribed_phone and start_date and end_date) is None:
# No subscription info from FxA
if group == "subscription":
# Unsure if social account user should have phone subscription
logger.error(
"no_subscription_data_in_fxa_for_user_with_phone_subscription",
extra={"fxa_uid": social_account.uid},
)
if (
social_account in free_phones_social_accounts
and profile.date_phone_subscription_reset is None
):
profile.date_phone_subscription_reset = datetime_now.replace(day=1)
profile.save()
num_updated_accounts += 1
continue
# User has/had a valid phone subscriptions, populate phone date fields
profile.date_subscribed_phone = date_subscribed_phone
profile.date_phone_subscription_start = start_date
profile.date_phone_subscription_end = end_date
if profile.date_phone_subscription_reset is None:
# initialize the reset date for phone subscription users to the start of the subscription
profile.date_phone_subscription_reset = start_date
thirtyone_days_ago = datetime_now - timedelta(settings.MAX_DAYS_IN_MONTH)
while profile.date_phone_subscription_reset < thirtyone_days_ago:
profile.date_phone_subscription_reset += timedelta(
settings.MAX_DAYS_IN_MONTH
)
profile.save()
num_updated_accounts += 1
return num_updated_accounts | null |
154,497 | from datetime import datetime, timedelta, timezone
from django.conf import settings
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
import logging
from emails.models import Profile
from privaterelay.management.utils import (
get_free_phone_social_accounts,
get_phone_subscriber_social_accounts,
)
if settings.PHONES_ENABLED:
from phones.models import RelayNumber
def reset_phone_remaining_stats(user: User) -> None:
# re-set remaining_texts and remaining_seconds to the maximum value
try:
relay_number = RelayNumber.objects.get(user=user)
except RelayNumber.DoesNotExist:
# no RelayNumber set, do nothing
return
relay_number.remaining_texts = settings.MAX_TEXTS_PER_BILLING_CYCLE
relay_number.remaining_seconds = settings.MAX_MINUTES_PER_BILLING_CYCLE * 60
relay_number.save()
def get_next_reset_date(profile: Profile) -> datetime:
# TODO: consider moving this as a property in Profile model
# assumes that profile being passed have already been checked to have
# phone subscription or a free phone user
if profile.date_phone_subscription_reset is None:
# there is a problem with the sync_phone_related_dates_on_profile
# or a new foxfooder whose date_phone_subscription_reset did not get set in
if profile.fxa:
fxa_uid = profile.fxa.uid
else:
fxa_uid = "None"
logger.error(
"phone_user_profile_dates_not_set",
extra={
"fxa_uid": fxa_uid,
"date_subscribed_phone": profile.date_phone_subscription_end,
"date_phone_subscription_start": profile.date_phone_subscription_start,
"date_phone_subscription_reset": profile.date_phone_subscription_reset,
"date_phone_subscription_end": profile.date_phone_subscription_end,
},
)
return datetime.now(timezone.utc) - timedelta(minutes=15)
calculated_next_reset_date = profile.date_phone_subscription_reset + timedelta(
settings.MAX_DAYS_IN_MONTH
)
if profile.date_phone_subscription_end is None:
return calculated_next_reset_date
if profile.date_phone_subscription_end < calculated_next_reset_date:
# return the past or the closest next reset date
return profile.date_phone_subscription_end
return calculated_next_reset_date
def get_free_phone_social_accounts() -> set[SocialAccount]:
free_phones_flag = Flag.objects.filter(name="free_phones").first()
if free_phones_flag is None:
return set()
free_phones_sa = set(
SocialAccount.objects.filter(user__in=free_phones_flag.users.all())
)
for group in free_phones_flag.groups.all():
free_phones_sa.update(SocialAccount.objects.filter(user__in=group.users.all()))
return free_phones_sa
def get_phone_subscriber_social_accounts() -> set[SocialAccount]:
phone_subscribers_sa = set()
for sub_with_phone in settings.SUBSCRIPTIONS_WITH_PHONE:
social_accounts = SocialAccount.objects.filter(
extra_data__icontains=sub_with_phone
)
phone_subscribers_sa.update(social_accounts)
return phone_subscribers_sa
def update_phone_remaining_stats() -> tuple[int, int]:
social_accounts_with_phones = get_phone_subscriber_social_accounts()
free_phones_social_accounts = get_free_phone_social_accounts()
social_accounts_with_phones.update(free_phones_social_accounts)
if not settings.PHONES_ENABLED or len(social_accounts_with_phones) == 0:
return 0, 0
updated_profiles = []
datetime_now = datetime.now(timezone.utc)
for social_account in social_accounts_with_phones:
profile = social_account.user.profile
next_reset_date = get_next_reset_date(profile)
if next_reset_date > datetime_now:
continue
# next reset day is now or in the past
reset_phone_remaining_stats(profile.user)
profile.date_phone_subscription_reset = datetime_now
profile.save()
updated_profiles.append(profile)
return len(social_accounts_with_phones), len(updated_profiles) | null |
154,498 | from __future__ import annotations
from pathlib import Path
from typing import Any, TYPE_CHECKING, cast, get_args
import ipaddress
import os
import sys
from decouple import config, Choices, Csv
import django_stubs_ext
import markus
import sentry_sdk
from sentry_sdk.integrations.django import DjangoIntegration
from sentry_sdk.integrations.logging import ignore_logger
from hashlib import sha256
import base64
from django.conf.global_settings import LANGUAGES as DEFAULT_LANGUAGES
import dj_database_url
from .types import RELAY_CHANNEL_NAME
STATSD_ENABLED: bool = DJANGO_STATSD_ENABLED or STATSD_DEBUG
def _get_initial_middleware() -> list[str]:
if STATSD_ENABLED:
return [
"privaterelay.middleware.ResponseMetrics",
]
return [] | null |
154,499 | from __future__ import annotations
from pathlib import Path
from typing import Any, TYPE_CHECKING, cast, get_args
import ipaddress
import os
import sys
from decouple import config, Choices, Csv
import django_stubs_ext
import markus
import sentry_sdk
from sentry_sdk.integrations.django import DjangoIntegration
from sentry_sdk.integrations.logging import ignore_logger
from hashlib import sha256
import base64
from django.conf.global_settings import LANGUAGES as DEFAULT_LANGUAGES
import dj_database_url
from .types import RELAY_CHANNEL_NAME
BASE_DIR: str = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
DEBUG = config("DEBUG", False, cast=bool)
if DEBUG:
INTERNAL_IPS = config("DJANGO_INTERNAL_IPS", default="", cast=Csv())
if DEBUG:
INSTALLED_APPS += [
"debug_toolbar",
]
if DEBUG:
MIDDLEWARE.append("debug_toolbar.middleware.DebugToolbarMiddleware")
if DEBUG:
# In production, we run collectstatic to index all static files.
# However, when running locally, we want to automatically pick up
# all files spewed out by `npm run watch` in /frontend/out,
# and we're fine with the performance impact of that.
WHITENOISE_ROOT = os.path.join(BASE_DIR, "frontend/out")
if DEBUG and not IN_PYTEST:
DRF_RENDERERS += [
"rest_framework.renderers.BrowsableAPIRenderer",
]
def set_index_cache_control_headers(
headers: wsgiref.headers.Headers, path: str, url: str
) -> None:
if DEBUG:
home_path = os.path.join(BASE_DIR, "frontend/out", "index.html")
else:
home_path = os.path.join(STATIC_ROOT, "index.html")
if path == home_path:
headers["Cache-Control"] = "no-cache, public" | null |
154,500 | from __future__ import annotations
from datetime import datetime, timezone
from typing import Any
from uuid import uuid4
import json
class EventsServerEventLogger:
def __init__(
self, application_id: str, app_display_version: str, channel: str
) -> None:
"""
Create EventsServerEventLogger instance.
:param str application_id: The application ID.
:param str app_display_version: The application display version.
:param str channel: The channel.
"""
self._application_id = application_id
self._app_display_version = app_display_version
self._channel = channel
def _record(self, user_agent: str, ip_address: str, event: dict[str, Any]) -> None:
now = datetime.now(timezone.utc)
timestamp = now.isoformat()
event["timestamp"] = int(1000.0 * now.timestamp()) # Milliseconds since epoch
event_payload = {
"metrics": {},
"events": [event],
"ping_info": {
# seq is required in the Glean schema, however is not useful in server context
"seq": 0,
"start_time": timestamp,
"end_time": timestamp,
},
# `Unknown` fields below are required in the Glean schema, however they are
# not useful in server context
"client_info": {
"telemetry_sdk_build": "glean_parser v13.0.0",
"first_run_date": "Unknown",
"os": "Unknown",
"os_version": "Unknown",
"architecture": "Unknown",
"app_build": "Unknown",
"app_display_version": self._app_display_version,
"app_channel": self._channel,
},
}
event_payload_serialized = json.dumps(event_payload)
# This is the message structure that Decoder expects:
# https://github.com/mozilla/gcp-ingestion/pull/2400
ping = {
"document_namespace": self._application_id,
"document_type": "events",
"document_version": "1",
"document_id": str(uuid4()),
"user_agent": user_agent,
"ip_address": ip_address,
"payload": event_payload_serialized,
}
self.emit_record(now, ping)
def emit_record(self, now: datetime, ping: dict[str, Any]) -> None:
"""Log the ping to STDOUT.
Applications might want to override this method to use their own logging.
If doing so, make sure to log the ping as JSON, and to include the
`Type: GLEAN_EVENT_MOZLOG_TYPE`."""
ping_envelope = {
"Timestamp": now.isoformat(),
"Logger": "glean",
"Type": GLEAN_EVENT_MOZLOG_TYPE,
"Fields": ping,
}
ping_envelope_serialized = json.dumps(ping_envelope)
print(ping_envelope_serialized)
def record_email_blocked(
self,
user_agent: str,
ip_address: str,
client_id: str,
fxa_id: str,
platform: str,
n_random_masks: int,
n_domain_masks: int,
n_deleted_random_masks: int,
n_deleted_domain_masks: int,
date_joined_relay: int,
premium_status: str,
date_joined_premium: int,
has_extension: bool,
date_got_extension: int,
mask_id: str,
is_random_mask: bool,
is_reply: bool,
reason: str,
) -> None:
"""
Record and submit a email_blocked event:
Relay receives but does not forward an email for a Relay user.
Event is logged to STDOUT via `print`.
:param str user_agent: The user agent.
:param str ip_address: The IP address. Will be used to decode Geo information
and scrubbed at ingestion.
:param str client_id: Firefox client ID
:param str fxa_id: Mozilla accounts user ID
:param str platform: Relay client platform
:param int n_random_masks: Number of random masks
:param int n_domain_masks: Number of premium subdomain masks
:param int n_deleted_random_masks: Number of deleted random masks
:param int n_deleted_domain_masks: Number of deleted domain masks
:param int date_joined_relay: Timestamp for joining Relay, seconds since epoch
:param str premium_status: Subscription type and term
:param int date_joined_premium: Timestamp for starting premium_status subscription, seconds since epoch, -1 if not subscribed
:param bool has_extension: The user has the Relay Add-on
:param int date_got_extension: Timestamp for adding Relay Add-on, seconds since epoch, -1 if not used
:param str mask_id: Mask ID, 'R' (random mask) or 'D' (domain mask) followed by a number.
:param bool is_random_mask: The mask is a random mask, instead of a domain mask
:param bool is_reply: The email is a reply from the Relay user
:param str reason: Code describing why the email was blocked
"""
event = {
"category": "email",
"name": "blocked",
"extra": {
"client_id": str(client_id),
"fxa_id": str(fxa_id),
"platform": str(platform),
"n_random_masks": str(n_random_masks),
"n_domain_masks": str(n_domain_masks),
"n_deleted_random_masks": str(n_deleted_random_masks),
"n_deleted_domain_masks": str(n_deleted_domain_masks),
"date_joined_relay": str(date_joined_relay),
"premium_status": str(premium_status),
"date_joined_premium": str(date_joined_premium),
"has_extension": str(has_extension).lower(),
"date_got_extension": str(date_got_extension),
"mask_id": str(mask_id),
"is_random_mask": str(is_random_mask).lower(),
"is_reply": str(is_reply).lower(),
"reason": str(reason),
},
}
self._record(user_agent, ip_address, event)
def record_email_forwarded(
self,
user_agent: str,
ip_address: str,
client_id: str,
fxa_id: str,
platform: str,
n_random_masks: int,
n_domain_masks: int,
n_deleted_random_masks: int,
n_deleted_domain_masks: int,
date_joined_relay: int,
premium_status: str,
date_joined_premium: int,
has_extension: bool,
date_got_extension: int,
mask_id: str,
is_random_mask: bool,
is_reply: bool,
) -> None:
"""
Record and submit a email_forwarded event:
Relay receives and forwards an email for a Relay user.
Event is logged to STDOUT via `print`.
:param str user_agent: The user agent.
:param str ip_address: The IP address. Will be used to decode Geo information
and scrubbed at ingestion.
:param str client_id: Firefox client ID
:param str fxa_id: Mozilla accounts user ID
:param str platform: Relay client platform
:param int n_random_masks: Number of random masks
:param int n_domain_masks: Number of premium subdomain masks
:param int n_deleted_random_masks: Number of deleted random masks
:param int n_deleted_domain_masks: Number of deleted domain masks
:param int date_joined_relay: Timestamp for joining Relay, seconds since epoch
:param str premium_status: Subscription type and term
:param int date_joined_premium: Timestamp for starting premium_status subscription, seconds since epoch, -1 if not subscribed
:param bool has_extension: The user has the Relay Add-on
:param int date_got_extension: Timestamp for adding Relay Add-on, seconds since epoch, -1 if not used
:param str mask_id: Mask ID, 'R' (random mask) or 'D' (domain mask) followed by a number.
:param bool is_random_mask: The mask is a random mask, instead of a domain mask
:param bool is_reply: The email is a reply from the Relay user
"""
event = {
"category": "email",
"name": "forwarded",
"extra": {
"client_id": str(client_id),
"fxa_id": str(fxa_id),
"platform": str(platform),
"n_random_masks": str(n_random_masks),
"n_domain_masks": str(n_domain_masks),
"n_deleted_random_masks": str(n_deleted_random_masks),
"n_deleted_domain_masks": str(n_deleted_domain_masks),
"date_joined_relay": str(date_joined_relay),
"premium_status": str(premium_status),
"date_joined_premium": str(date_joined_premium),
"has_extension": str(has_extension).lower(),
"date_got_extension": str(date_got_extension),
"mask_id": str(mask_id),
"is_random_mask": str(is_random_mask).lower(),
"is_reply": str(is_reply).lower(),
},
}
self._record(user_agent, ip_address, event)
def record_email_mask_created(
self,
user_agent: str,
ip_address: str,
client_id: str,
fxa_id: str,
platform: str,
n_random_masks: int,
n_domain_masks: int,
n_deleted_random_masks: int,
n_deleted_domain_masks: int,
date_joined_relay: int,
premium_status: str,
date_joined_premium: int,
has_extension: bool,
date_got_extension: int,
mask_id: str,
is_random_mask: bool,
created_by_api: bool,
has_website: bool,
) -> None:
"""
Record and submit a email_mask_created event:
A Relay user creates an email mask.
Event is logged to STDOUT via `print`.
:param str user_agent: The user agent.
:param str ip_address: The IP address. Will be used to decode Geo information
and scrubbed at ingestion.
:param str client_id: Firefox client ID
:param str fxa_id: Mozilla accounts user ID
:param str platform: Relay client platform
:param int n_random_masks: Number of random masks
:param int n_domain_masks: Number of premium subdomain masks
:param int n_deleted_random_masks: Number of deleted random masks
:param int n_deleted_domain_masks: Number of deleted domain masks
:param int date_joined_relay: Timestamp for joining Relay, seconds since epoch
:param str premium_status: Subscription type and term
:param int date_joined_premium: Timestamp for starting premium_status subscription, seconds since epoch, -1 if not subscribed
:param bool has_extension: The user has the Relay Add-on
:param int date_got_extension: Timestamp for adding Relay Add-on, seconds since epoch, -1 if not used
:param str mask_id: Mask ID, 'R' (random mask) or 'D' (domain mask) followed by a number.
:param bool is_random_mask: The mask is a random mask, instead of a domain mask
:param bool created_by_api: The mask was created via the API, rather than an incoming email
:param bool has_website: The mask was created by the Add-on or integration on a website
"""
event = {
"category": "email_mask",
"name": "created",
"extra": {
"client_id": str(client_id),
"fxa_id": str(fxa_id),
"platform": str(platform),
"n_random_masks": str(n_random_masks),
"n_domain_masks": str(n_domain_masks),
"n_deleted_random_masks": str(n_deleted_random_masks),
"n_deleted_domain_masks": str(n_deleted_domain_masks),
"date_joined_relay": str(date_joined_relay),
"premium_status": str(premium_status),
"date_joined_premium": str(date_joined_premium),
"has_extension": str(has_extension).lower(),
"date_got_extension": str(date_got_extension),
"mask_id": str(mask_id),
"is_random_mask": str(is_random_mask).lower(),
"created_by_api": str(created_by_api).lower(),
"has_website": str(has_website).lower(),
},
}
self._record(user_agent, ip_address, event)
def record_email_mask_deleted(
self,
user_agent: str,
ip_address: str,
client_id: str,
fxa_id: str,
platform: str,
n_random_masks: int,
n_domain_masks: int,
n_deleted_random_masks: int,
n_deleted_domain_masks: int,
date_joined_relay: int,
premium_status: str,
date_joined_premium: int,
has_extension: bool,
date_got_extension: int,
mask_id: str,
is_random_mask: bool,
) -> None:
"""
Record and submit a email_mask_deleted event:
A Relay user deletes an email mask.
Event is logged to STDOUT via `print`.
:param str user_agent: The user agent.
:param str ip_address: The IP address. Will be used to decode Geo information
and scrubbed at ingestion.
:param str client_id: Firefox client ID
:param str fxa_id: Mozilla accounts user ID
:param str platform: Relay client platform
:param int n_random_masks: Number of random masks
:param int n_domain_masks: Number of premium subdomain masks
:param int n_deleted_random_masks: Number of deleted random masks
:param int n_deleted_domain_masks: Number of deleted domain masks
:param int date_joined_relay: Timestamp for joining Relay, seconds since epoch
:param str premium_status: Subscription type and term
:param int date_joined_premium: Timestamp for starting premium_status subscription, seconds since epoch, -1 if not subscribed
:param bool has_extension: The user has the Relay Add-on
:param int date_got_extension: Timestamp for adding Relay Add-on, seconds since epoch, -1 if not used
:param str mask_id: Mask ID, 'R' (random mask) or 'D' (domain mask) followed by a number.
:param bool is_random_mask: The mask is a random mask, instead of a domain mask
"""
event = {
"category": "email_mask",
"name": "deleted",
"extra": {
"client_id": str(client_id),
"fxa_id": str(fxa_id),
"platform": str(platform),
"n_random_masks": str(n_random_masks),
"n_domain_masks": str(n_domain_masks),
"n_deleted_random_masks": str(n_deleted_random_masks),
"n_deleted_domain_masks": str(n_deleted_domain_masks),
"date_joined_relay": str(date_joined_relay),
"premium_status": str(premium_status),
"date_joined_premium": str(date_joined_premium),
"has_extension": str(has_extension).lower(),
"date_got_extension": str(date_got_extension),
"mask_id": str(mask_id),
"is_random_mask": str(is_random_mask).lower(),
},
}
self._record(user_agent, ip_address, event)
def record_email_mask_label_updated(
self,
user_agent: str,
ip_address: str,
client_id: str,
fxa_id: str,
platform: str,
n_random_masks: int,
n_domain_masks: int,
n_deleted_random_masks: int,
n_deleted_domain_masks: int,
date_joined_relay: int,
premium_status: str,
date_joined_premium: int,
has_extension: bool,
date_got_extension: int,
mask_id: str,
is_random_mask: bool,
) -> None:
"""
Record and submit a email_mask_label_updated event:
A Relay user updates an email mask's label.
Event is logged to STDOUT via `print`.
:param str user_agent: The user agent.
:param str ip_address: The IP address. Will be used to decode Geo information
and scrubbed at ingestion.
:param str client_id: Firefox client ID
:param str fxa_id: Mozilla accounts user ID
:param str platform: Relay client platform
:param int n_random_masks: Number of random masks
:param int n_domain_masks: Number of premium subdomain masks
:param int n_deleted_random_masks: Number of deleted random masks
:param int n_deleted_domain_masks: Number of deleted domain masks
:param int date_joined_relay: Timestamp for joining Relay, seconds since epoch
:param str premium_status: Subscription type and term
:param int date_joined_premium: Timestamp for starting premium_status subscription, seconds since epoch, -1 if not subscribed
:param bool has_extension: The user has the Relay Add-on
:param int date_got_extension: Timestamp for adding Relay Add-on, seconds since epoch, -1 if not used
:param str mask_id: Mask ID, 'R' (random mask) or 'D' (domain mask) followed by a number.
:param bool is_random_mask: The mask is a random mask, instead of a domain mask
"""
event = {
"category": "email_mask",
"name": "label_updated",
"extra": {
"client_id": str(client_id),
"fxa_id": str(fxa_id),
"platform": str(platform),
"n_random_masks": str(n_random_masks),
"n_domain_masks": str(n_domain_masks),
"n_deleted_random_masks": str(n_deleted_random_masks),
"n_deleted_domain_masks": str(n_deleted_domain_masks),
"date_joined_relay": str(date_joined_relay),
"premium_status": str(premium_status),
"date_joined_premium": str(date_joined_premium),
"has_extension": str(has_extension).lower(),
"date_got_extension": str(date_got_extension),
"mask_id": str(mask_id),
"is_random_mask": str(is_random_mask).lower(),
},
}
self._record(user_agent, ip_address, event)
The provided code snippet includes necessary dependencies for implementing the `create_events_server_event_logger` function. Write a Python function `def create_events_server_event_logger( application_id: str, app_display_version: str, channel: str, ) -> EventsServerEventLogger` to solve the following problem:
Factory function that creates an instance of Glean Server Event Logger to record `events` ping events. :param str application_id: The application ID. :param str app_display_version: The application display version. :param str channel: The channel. :return: An instance of EventsServerEventLogger. :rtype: EventsServerEventLogger
Here is the function:
def create_events_server_event_logger(
application_id: str,
app_display_version: str,
channel: str,
) -> EventsServerEventLogger:
"""
Factory function that creates an instance of Glean Server Event Logger to record
`events` ping events.
:param str application_id: The application ID.
:param str app_display_version: The application display version.
:param str channel: The channel.
:return: An instance of EventsServerEventLogger.
:rtype: EventsServerEventLogger
"""
return EventsServerEventLogger(application_id, app_display_version, channel) | Factory function that creates an instance of Glean Server Event Logger to record `events` ping events. :param str application_id: The application ID. :param str app_display_version: The application display version. :param str channel: The channel. :return: An instance of EventsServerEventLogger. :rtype: EventsServerEventLogger |
154,501 | from datetime import datetime, timezone
from functools import lru_cache
from hashlib import sha256
from typing import Any, Iterable, Optional, TypedDict
import json
import logging
from django.apps import apps
from django.conf import settings
from django.db import IntegrityError, transaction
from django.http import HttpRequest, HttpResponse, JsonResponse
from django.shortcuts import redirect
from django.urls import reverse
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_http_methods
from rest_framework.decorators import api_view, schema
from allauth.socialaccount.models import SocialAccount, SocialApp
from allauth.socialaccount.providers.fxa.views import FirefoxAccountsOAuth2Adapter
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicKey
from google_measurement_protocol import event, report
from oauthlib.oauth2.rfc6749.errors import CustomOAuth2Error
import jwt
import sentry_sdk
from emails.models import (
CannotMakeSubdomainException,
DomainAddress,
RelayAddress,
valid_available_subdomain,
)
from emails.utils import incr_if_enabled
from .apps import PrivateRelayConfig
from .fxa_utils import _get_oauth2_session, NoSocialToken
def _get_fxa(request):
def update_fxa(
social_account: SocialAccount,
authentic_jwt: Optional[FxAEvent] = None,
event_key: Optional[str] = None,
) -> HttpResponse:
def incr_if_enabled(name, value=1, tags=None):
def profile_refresh(request):
if not request.user or request.user.is_anonymous:
return redirect(reverse("fxa_login"))
profile = request.user.profile
fxa = _get_fxa(request)
update_fxa(fxa)
if "clicked-purchase" in request.COOKIES and profile.has_premium:
event = "user_purchased_premium"
incr_if_enabled(event, 1)
return JsonResponse({}) | null |
154,502 | from datetime import datetime, timezone
from functools import lru_cache
from hashlib import sha256
from typing import Any, Iterable, Optional, TypedDict
import json
import logging
from django.apps import apps
from django.conf import settings
from django.db import IntegrityError, transaction
from django.http import HttpRequest, HttpResponse, JsonResponse
from django.shortcuts import redirect
from django.urls import reverse
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_http_methods
from rest_framework.decorators import api_view, schema
from allauth.socialaccount.models import SocialAccount, SocialApp
from allauth.socialaccount.providers.fxa.views import FirefoxAccountsOAuth2Adapter
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicKey
from google_measurement_protocol import event, report
from oauthlib.oauth2.rfc6749.errors import CustomOAuth2Error
import jwt
import sentry_sdk
from emails.models import (
CannotMakeSubdomainException,
DomainAddress,
RelayAddress,
valid_available_subdomain,
)
from emails.utils import incr_if_enabled
from .apps import PrivateRelayConfig
from .fxa_utils import _get_oauth2_session, NoSocialToken
def valid_available_subdomain(subdomain, *args, **kwargs):
if not subdomain:
raise CannotMakeSubdomainException("error-subdomain-cannot-be-empty-or-null")
# valid subdomains:
# can't start or end with a hyphen
# must be 1-63 alphanumeric characters and/or hyphens
subdomain = subdomain.lower()
valid_subdomain_pattern = re.compile("^(?!-)[a-z0-9-]{1,63}(?<!-)$")
valid = valid_subdomain_pattern.match(subdomain) is not None
# can't have "bad" words in them
bad_word = has_bad_words(subdomain)
# can't have "blocked" words in them
blocked_word = is_blocklisted(subdomain)
# can't be taken by someone else
taken = (
RegisteredSubdomain.objects.filter(
subdomain_hash=hash_subdomain(subdomain)
).count()
> 0
)
if not valid or bad_word or blocked_word or taken:
raise CannotMakeSubdomainException("error-subdomain-not-available")
return True
class CannotMakeSubdomainException(BadRequest):
"""Exception raised by Profile due to error on subdomain creation.
Attributes:
message -- optional explanation of the error
"""
def __init__(self, message=None):
self.message = message
def profile_subdomain(request):
if not request.user or request.user.is_anonymous:
return redirect(reverse("fxa_login"))
profile = request.user.profile
if not profile.has_premium:
raise CannotMakeSubdomainException("error-premium-check-subdomain")
try:
if request.method == "GET":
subdomain = request.GET.get("subdomain", None)
available = valid_available_subdomain(subdomain)
return JsonResponse({"available": available})
else:
subdomain = request.POST.get("subdomain", None)
profile.add_subdomain(subdomain)
return JsonResponse(
{"status": "Accepted", "message": "success-subdomain-registered"},
status=202,
)
except CannotMakeSubdomainException as e:
return JsonResponse({"message": e.message, "subdomain": subdomain}, status=400) | null |
154,503 | from datetime import datetime, timezone
from functools import lru_cache
from hashlib import sha256
from typing import Any, Iterable, Optional, TypedDict
import json
import logging
from django.apps import apps
from django.conf import settings
from django.db import IntegrityError, transaction
from django.http import HttpRequest, HttpResponse, JsonResponse
from django.shortcuts import redirect
from django.urls import reverse
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_http_methods
from rest_framework.decorators import api_view, schema
from allauth.socialaccount.models import SocialAccount, SocialApp
from allauth.socialaccount.providers.fxa.views import FirefoxAccountsOAuth2Adapter
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicKey
from google_measurement_protocol import event, report
from oauthlib.oauth2.rfc6749.errors import CustomOAuth2Error
import jwt
import sentry_sdk
from emails.models import (
CannotMakeSubdomainException,
DomainAddress,
RelayAddress,
valid_available_subdomain,
)
from emails.utils import incr_if_enabled
from .apps import PrivateRelayConfig
from .fxa_utils import _get_oauth2_session, NoSocialToken
logger = logging.getLogger("events")
def metrics_event(request):
try:
request_data = json.loads(request.body)
except json.JSONDecodeError:
return JsonResponse({"msg": "Could not decode JSON"}, status=415)
if "ga_uuid" not in request_data:
return JsonResponse({"msg": "No GA uuid found"}, status=404)
# "dimension5" is a Google Analytics-specific variable to track a custom dimension,
# used to determine which browser vendor the add-on is using: Firefox or Chrome
# "dimension7" is a Google Analytics-specific variable to track a custom dimension,
# used to determine where the ping is coming from: website (default), add-on or app
event_data = event(
request_data.get("category", None),
request_data.get("action", None),
request_data.get("label", None),
request_data.get("value", None),
dimension5=request_data.get("dimension5", None),
dimension7=request_data.get("dimension7", "website"),
)
try:
report(settings.GOOGLE_ANALYTICS_ID, request_data.get("ga_uuid"), event_data)
except Exception as e:
logger.error("metrics_event", extra={"error": e})
return JsonResponse({"msg": "Unable to report metrics event."}, status=500)
return JsonResponse({"msg": "OK"}, status=200) | null |
154,504 | from datetime import datetime, timezone
from functools import lru_cache
from hashlib import sha256
from typing import Any, Iterable, Optional, TypedDict
import json
import logging
from django.apps import apps
from django.conf import settings
from django.db import IntegrityError, transaction
from django.http import HttpRequest, HttpResponse, JsonResponse
from django.shortcuts import redirect
from django.urls import reverse
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_http_methods
from rest_framework.decorators import api_view, schema
from allauth.socialaccount.models import SocialAccount, SocialApp
from allauth.socialaccount.providers.fxa.views import FirefoxAccountsOAuth2Adapter
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicKey
from google_measurement_protocol import event, report
from oauthlib.oauth2.rfc6749.errors import CustomOAuth2Error
import jwt
import sentry_sdk
from emails.models import (
CannotMakeSubdomainException,
DomainAddress,
RelayAddress,
valid_available_subdomain,
)
from emails.utils import incr_if_enabled
from .apps import PrivateRelayConfig
from .fxa_utils import _get_oauth2_session, NoSocialToken
FXA_DELETE_EVENT = "https://schemas.accounts.firefox.com/event/delete-user"
PROFILE_EVENTS = [FXA_PROFILE_CHANGE_EVENT, FXA_SUBSCRIPTION_CHANGE_EVENT]
info_logger = logging.getLogger("eventsinfo")
def _parse_jwt_from_request(request: HttpRequest) -> str:
request_auth = request.headers["Authorization"]
return request_auth.split("Bearer ")[1]
def _authenticate_fxa_jwt(req_jwt: str) -> FxAEvent:
authentic_jwt = _verify_jwt_with_fxa_key(req_jwt, fxa_verifying_keys())
if not authentic_jwt:
# FXA key may be old? re-fetch FXA keys and try again
authentic_jwt = _verify_jwt_with_fxa_key(
req_jwt, fxa_verifying_keys(reload=True)
)
if not authentic_jwt:
raise Exception("Could not authenticate JWT with FXA key.")
return authentic_jwt
def _get_account_from_jwt(authentic_jwt: FxAEvent) -> SocialAccount:
social_account_uid = authentic_jwt["sub"]
return SocialAccount.objects.get(uid=social_account_uid, provider="fxa")
def _get_event_keys_from_jwt(authentic_jwt: FxAEvent) -> Iterable[str]:
return authentic_jwt["events"].keys()
def update_fxa(
social_account: SocialAccount,
authentic_jwt: Optional[FxAEvent] = None,
event_key: Optional[str] = None,
) -> HttpResponse:
try:
client = _get_oauth2_session(social_account)
except NoSocialToken as e:
sentry_sdk.capture_exception(e)
return HttpResponse("202 Accepted", status=202)
# TODO: more graceful handling of profile fetch failures
try:
resp = client.get(FirefoxAccountsOAuth2Adapter.profile_url)
except CustomOAuth2Error as e:
sentry_sdk.capture_exception(e)
return HttpResponse("202 Accepted", status=202)
extra_data = resp.json()
try:
new_email = extra_data["email"]
except KeyError as e:
sentry_sdk.capture_exception(e)
return HttpResponse("202 Accepted", status=202)
if authentic_jwt and event_key:
info_logger.info(
"fxa_rp_event",
extra={
"fxa_uid": authentic_jwt["sub"],
"event_key": event_key,
"real_address": sha256(new_email.encode("utf-8")).hexdigest(),
},
)
return _update_all_data(social_account, extra_data, new_email)
def _handle_fxa_delete(
authentic_jwt: FxAEvent, social_account: SocialAccount, event_key: str
) -> None:
# Using for loops here because QuerySet.delete() does a bulk delete which does
# not call the model delete() methods that create DeletedAddress records
for relay_address in RelayAddress.objects.filter(user=social_account.user):
relay_address.delete()
for domain_address in DomainAddress.objects.filter(user=social_account.user):
domain_address.delete()
social_account.user.delete()
info_logger.info(
"fxa_rp_event",
extra={
"fxa_uid": authentic_jwt["sub"],
"event_key": event_key,
},
)
def fxa_rp_events(request: HttpRequest) -> HttpResponse:
req_jwt = _parse_jwt_from_request(request)
authentic_jwt = _authenticate_fxa_jwt(req_jwt)
event_keys = _get_event_keys_from_jwt(authentic_jwt)
try:
social_account = _get_account_from_jwt(authentic_jwt)
except SocialAccount.DoesNotExist as e:
# capture an exception in sentry, but don't error, or FXA will retry
sentry_sdk.capture_exception(e)
return HttpResponse("202 Accepted", status=202)
for event_key in event_keys:
if event_key in PROFILE_EVENTS:
if settings.DEBUG:
info_logger.info(
"fxa_profile_update",
extra={
"jwt": authentic_jwt,
"event_key": event_key,
},
)
update_fxa(social_account, authentic_jwt, event_key)
if event_key == FXA_DELETE_EVENT:
_handle_fxa_delete(authentic_jwt, social_account, event_key)
return HttpResponse("200 OK", status=200) | null |
154,505 | from __future__ import annotations
from datetime import datetime, timedelta, timezone
from math import floor
from typing import Iterator, Optional
import logging
import phonenumbers
import secrets
import string
from django.contrib.auth.models import User
from django.conf import settings
from django.core.cache import cache
from django.core.exceptions import BadRequest, ValidationError
from django.db.migrations.recorder import MigrationRecorder
from django.db import models
from django.db.models.signals import post_save
from django.dispatch.dispatcher import receiver
from django.urls import reverse
from twilio.base.exceptions import TwilioRestException
from twilio.rest import Client
from emails.utils import incr_if_enabled
from .apps import phones_config, twilio_client
from .iq_utils import send_iq_sms
def verification_code_default():
return str(secrets.randbelow(1000000)).zfill(6) | null |
154,506 | from __future__ import annotations
from datetime import datetime, timedelta, timezone
from math import floor
from typing import Iterator, Optional
import logging
import phonenumbers
import secrets
import string
from django.contrib.auth.models import User
from django.conf import settings
from django.core.cache import cache
from django.core.exceptions import BadRequest, ValidationError
from django.db.migrations.recorder import MigrationRecorder
from django.db import models
from django.db.models.signals import post_save
from django.dispatch.dispatcher import receiver
from django.urls import reverse
from twilio.base.exceptions import TwilioRestException
from twilio.rest import Client
from emails.utils import incr_if_enabled
from .apps import phones_config, twilio_client
from .iq_utils import send_iq_sms
def verification_sent_date_default():
return datetime.now(timezone.utc) | null |
154,507 | from __future__ import annotations
from datetime import datetime, timedelta, timezone
from math import floor
from typing import Iterator, Optional
import logging
import phonenumbers
import secrets
import string
from django.contrib.auth.models import User
from django.conf import settings
from django.core.cache import cache
from django.core.exceptions import BadRequest, ValidationError
from django.db.migrations.recorder import MigrationRecorder
from django.db import models
from django.db.models.signals import post_save
from django.dispatch.dispatcher import receiver
from django.urls import reverse
from twilio.base.exceptions import TwilioRestException
from twilio.rest import Client
from emails.utils import incr_if_enabled
from .apps import phones_config, twilio_client
from .iq_utils import send_iq_sms
MAX_MINUTES_TO_VERIFY_REAL_PHONE = 5
class RealPhone(models.Model):
def save(self, *args, **kwargs):
def mark_verified(self):
def get_expired_unverified_realphone_records(number):
return RealPhone.objects.filter(
number=number,
verified=False,
verification_sent_date__lt=(
datetime.now(timezone.utc)
- timedelta(0, 60 * settings.MAX_MINUTES_TO_VERIFY_REAL_PHONE)
),
) | null |
154,508 | from __future__ import annotations
from datetime import datetime, timedelta, timezone
from math import floor
from typing import Iterator, Optional
import logging
import phonenumbers
import secrets
import string
from django.contrib.auth.models import User
from django.conf import settings
from django.core.cache import cache
from django.core.exceptions import BadRequest, ValidationError
from django.db.migrations.recorder import MigrationRecorder
from django.db import models
from django.db.models.signals import post_save
from django.dispatch.dispatcher import receiver
from django.urls import reverse
from twilio.base.exceptions import TwilioRestException
from twilio.rest import Client
from emails.utils import incr_if_enabled
from .apps import phones_config, twilio_client
from .iq_utils import send_iq_sms
MAX_MINUTES_TO_VERIFY_REAL_PHONE = 5
class RealPhone(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
number = models.CharField(max_length=15)
verification_code = models.CharField(
max_length=8, default=verification_code_default
)
verification_sent_date = models.DateTimeField(
blank=True, null=True, default=verification_sent_date_default
)
verified = models.BooleanField(default=False)
verified_date = models.DateTimeField(blank=True, null=True)
country_code = models.CharField(max_length=2, default="US")
class Meta:
constraints = [
models.UniqueConstraint(
fields=["number", "verified"],
condition=models.Q(verified=True),
name="unique_verified_number",
)
]
def save(self, *args, **kwargs):
# delete any expired unverified RealPhone records for this number
# note: it doesn't matter which user is trying to create a new
# RealPhone record - any expired unverified record for the number
# should be deleted
expired_verification_records = get_expired_unverified_realphone_records(
self.number
)
expired_verification_records.delete()
# We are not ready to support multiple real phone numbers per user,
# so raise an exception if this save() would create a second
# RealPhone record for the user
user_verified_number_records = get_verified_realphone_records(self.user)
for verified_number in user_verified_number_records:
if (
verified_number.number == self.number
and verified_number.verification_code == self.verification_code
):
# User is verifying the same number twice
return super().save(*args, **kwargs)
else:
raise BadRequest("User already has a verified number.")
# call super save to save into the DB
# See also: realphone_post_save receiver below
return super().save(*args, **kwargs)
def mark_verified(self):
incr_if_enabled("phones_RealPhone.mark_verified")
self.verified = True
self.verified_date = datetime.now(timezone.utc)
self.save(force_update=True)
return self
def get_pending_unverified_realphone_records(number):
return RealPhone.objects.filter(
number=number,
verified=False,
verification_sent_date__gt=(
datetime.now(timezone.utc)
- timedelta(0, 60 * settings.MAX_MINUTES_TO_VERIFY_REAL_PHONE)
),
) | null |
154,509 | from __future__ import annotations
from datetime import datetime, timedelta, timezone
from math import floor
from typing import Iterator, Optional
import logging
import phonenumbers
import secrets
import string
from django.contrib.auth.models import User
from django.conf import settings
from django.core.cache import cache
from django.core.exceptions import BadRequest, ValidationError
from django.db.migrations.recorder import MigrationRecorder
from django.db import models
from django.db.models.signals import post_save
from django.dispatch.dispatcher import receiver
from django.urls import reverse
from twilio.base.exceptions import TwilioRestException
from twilio.rest import Client
from emails.utils import incr_if_enabled
from .apps import phones_config, twilio_client
from .iq_utils import send_iq_sms
class RealPhone(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
number = models.CharField(max_length=15)
verification_code = models.CharField(
max_length=8, default=verification_code_default
)
verification_sent_date = models.DateTimeField(
blank=True, null=True, default=verification_sent_date_default
)
verified = models.BooleanField(default=False)
verified_date = models.DateTimeField(blank=True, null=True)
country_code = models.CharField(max_length=2, default="US")
class Meta:
constraints = [
models.UniqueConstraint(
fields=["number", "verified"],
condition=models.Q(verified=True),
name="unique_verified_number",
)
]
def save(self, *args, **kwargs):
# delete any expired unverified RealPhone records for this number
# note: it doesn't matter which user is trying to create a new
# RealPhone record - any expired unverified record for the number
# should be deleted
expired_verification_records = get_expired_unverified_realphone_records(
self.number
)
expired_verification_records.delete()
# We are not ready to support multiple real phone numbers per user,
# so raise an exception if this save() would create a second
# RealPhone record for the user
user_verified_number_records = get_verified_realphone_records(self.user)
for verified_number in user_verified_number_records:
if (
verified_number.number == self.number
and verified_number.verification_code == self.verification_code
):
# User is verifying the same number twice
return super().save(*args, **kwargs)
else:
raise BadRequest("User already has a verified number.")
# call super save to save into the DB
# See also: realphone_post_save receiver below
return super().save(*args, **kwargs)
def mark_verified(self):
incr_if_enabled("phones_RealPhone.mark_verified")
self.verified = True
self.verified_date = datetime.now(timezone.utc)
self.save(force_update=True)
return self
def get_verified_realphone_record(number):
return RealPhone.objects.filter(number=number, verified=True).first() | null |
154,510 | from __future__ import annotations
from datetime import datetime, timedelta, timezone
from math import floor
from typing import Iterator, Optional
import logging
import phonenumbers
import secrets
import string
from django.contrib.auth.models import User
from django.conf import settings
from django.core.cache import cache
from django.core.exceptions import BadRequest, ValidationError
from django.db.migrations.recorder import MigrationRecorder
from django.db import models
from django.db.models.signals import post_save
from django.dispatch.dispatcher import receiver
from django.urls import reverse
from twilio.base.exceptions import TwilioRestException
from twilio.rest import Client
from emails.utils import incr_if_enabled
from .apps import phones_config, twilio_client
from .iq_utils import send_iq_sms
MAX_MINUTES_TO_VERIFY_REAL_PHONE = 5
class RealPhone(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
number = models.CharField(max_length=15)
verification_code = models.CharField(
max_length=8, default=verification_code_default
)
verification_sent_date = models.DateTimeField(
blank=True, null=True, default=verification_sent_date_default
)
verified = models.BooleanField(default=False)
verified_date = models.DateTimeField(blank=True, null=True)
country_code = models.CharField(max_length=2, default="US")
class Meta:
constraints = [
models.UniqueConstraint(
fields=["number", "verified"],
condition=models.Q(verified=True),
name="unique_verified_number",
)
]
def save(self, *args, **kwargs):
# delete any expired unverified RealPhone records for this number
# note: it doesn't matter which user is trying to create a new
# RealPhone record - any expired unverified record for the number
# should be deleted
expired_verification_records = get_expired_unverified_realphone_records(
self.number
)
expired_verification_records.delete()
# We are not ready to support multiple real phone numbers per user,
# so raise an exception if this save() would create a second
# RealPhone record for the user
user_verified_number_records = get_verified_realphone_records(self.user)
for verified_number in user_verified_number_records:
if (
verified_number.number == self.number
and verified_number.verification_code == self.verification_code
):
# User is verifying the same number twice
return super().save(*args, **kwargs)
else:
raise BadRequest("User already has a verified number.")
# call super save to save into the DB
# See also: realphone_post_save receiver below
return super().save(*args, **kwargs)
def mark_verified(self):
incr_if_enabled("phones_RealPhone.mark_verified")
self.verified = True
self.verified_date = datetime.now(timezone.utc)
self.save(force_update=True)
return self
def get_valid_realphone_verification_record(user, number, verification_code):
return RealPhone.objects.filter(
user=user,
number=number,
verification_code=verification_code,
verification_sent_date__gt=(
datetime.now(timezone.utc)
- timedelta(0, 60 * settings.MAX_MINUTES_TO_VERIFY_REAL_PHONE)
),
).first() | null |
154,511 | from __future__ import annotations
from datetime import datetime, timedelta, timezone
from math import floor
from typing import Iterator, Optional
import logging
import phonenumbers
import secrets
import string
from django.contrib.auth.models import User
from django.conf import settings
from django.core.cache import cache
from django.core.exceptions import BadRequest, ValidationError
from django.db.migrations.recorder import MigrationRecorder
from django.db import models
from django.db.models.signals import post_save
from django.dispatch.dispatcher import receiver
from django.urls import reverse
from twilio.base.exceptions import TwilioRestException
from twilio.rest import Client
from emails.utils import incr_if_enabled
from .apps import phones_config, twilio_client
from .iq_utils import send_iq_sms
def iq_fmt(e164_number: str) -> str:
return "1" + str(phonenumbers.parse(e164_number, "E164").national_number) | null |
154,512 | from __future__ import annotations
from datetime import datetime, timedelta, timezone
from math import floor
from typing import Iterator, Optional
import logging
import phonenumbers
import secrets
import string
from django.contrib.auth.models import User
from django.conf import settings
from django.core.cache import cache
from django.core.exceptions import BadRequest, ValidationError
from django.db.migrations.recorder import MigrationRecorder
from django.db import models
from django.db.models.signals import post_save
from django.dispatch.dispatcher import receiver
from django.urls import reverse
from twilio.base.exceptions import TwilioRestException
from twilio.rest import Client
from emails.utils import incr_if_enabled
from .apps import phones_config, twilio_client
from .iq_utils import send_iq_sms
def incr_if_enabled(name, value=1, tags=None):
if settings.STATSD_ENABLED:
metrics.incr(name, value, tags)
def twilio_client() -> Client:
assert not settings.PHONES_NO_CLIENT_CALLS_IN_TEST
return phones_config().twilio_client
def send_iq_sms(to_num: str, from_num: str, text: str) -> None:
iq_formatted_to_num = to_num.replace("+", "")
iq_formatted_from_num = from_num.replace("+", "")
json_body = {
"from": iq_formatted_from_num,
"to": [iq_formatted_to_num],
"text": text,
}
resp = requests.post(
settings.IQ_PUBLISH_MESSAGE_URL,
headers={"Authorization": f"Bearer {settings.IQ_OUTBOUND_API_KEY}"},
json=json_body,
)
if resp.status_code < 200 or resp.status_code > 299:
raise exceptions.ValidationError(json.loads(resp.content.decode()))
def realphone_post_save(sender, instance, created, **kwargs):
# don't do anything if running migrations
if type(instance) == MigrationRecorder.Migration:
return
if created:
# only send verification_code when creating new record
incr_if_enabled("phones_RealPhone.post_save_created_send_verification")
text_body = (
f"Your Firefox Relay verification code is {instance.verification_code}"
)
if settings.PHONES_NO_CLIENT_CALLS_IN_TEST:
return
if settings.IQ_FOR_VERIFICATION:
send_iq_sms(instance.number, settings.IQ_MAIN_NUMBER, text_body)
return
client = twilio_client()
client.messages.create(
body=text_body,
from_=settings.TWILIO_MAIN_NUMBER,
to=instance.number,
) | null |
154,513 | from __future__ import annotations
from datetime import datetime, timedelta, timezone
from math import floor
from typing import Iterator, Optional
import logging
import phonenumbers
import secrets
import string
from django.contrib.auth.models import User
from django.conf import settings
from django.core.cache import cache
from django.core.exceptions import BadRequest, ValidationError
from django.db.migrations.recorder import MigrationRecorder
from django.db import models
from django.db.models.signals import post_save
from django.dispatch.dispatcher import receiver
from django.urls import reverse
from twilio.base.exceptions import TwilioRestException
from twilio.rest import Client
from emails.utils import incr_if_enabled
from .apps import phones_config, twilio_client
from .iq_utils import send_iq_sms
def vcard_lookup_key_default():
return "".join(
secrets.choice(string.ascii_letters + string.digits) for i in range(6)
) | null |
154,514 | from __future__ import annotations
from datetime import datetime, timedelta, timezone
from math import floor
from typing import Iterator, Optional
import logging
import phonenumbers
import secrets
import string
from django.contrib.auth.models import User
from django.conf import settings
from django.core.cache import cache
from django.core.exceptions import BadRequest, ValidationError
from django.db.migrations.recorder import MigrationRecorder
from django.db import models
from django.db.models.signals import post_save
from django.dispatch.dispatcher import receiver
from django.urls import reverse
from twilio.base.exceptions import TwilioRestException
from twilio.rest import Client
from emails.utils import incr_if_enabled
from .apps import phones_config, twilio_client
from .iq_utils import send_iq_sms
logger = logging.getLogger("eventsinfo")
class CachedList:
"""A list that is stored in a cache."""
def __init__(self, cache_key: str) -> None:
self.cache_key = cache_key
cache_value = cache.get(self.cache_key, "")
if cache_value:
self.data = cache_value.split(",")
else:
self.data = []
def __iter__(self) -> Iterator[str]:
return (item for item in self.data)
def append(self, item: str) -> None:
self.data.append(item)
self.data.sort()
cache.set(self.cache_key, ",".join(self.data))
The provided code snippet includes necessary dependencies for implementing the `register_with_messaging_service` function. Write a Python function `def register_with_messaging_service(client: Client, number_sid: str) -> None` to solve the following problem:
Register a Twilio US phone number with a Messaging Service.
Here is the function:
def register_with_messaging_service(client: Client, number_sid: str) -> None:
"""Register a Twilio US phone number with a Messaging Service."""
assert settings.TWILIO_MESSAGING_SERVICE_SID
closed_sids = CachedList("twilio_messaging_service_closed")
for service_sid in settings.TWILIO_MESSAGING_SERVICE_SID:
if service_sid in closed_sids:
continue
try:
client.messaging.v1.services(service_sid).phone_numbers.create(
phone_number_sid=number_sid
)
except TwilioRestException as err:
log_extra = {
"err_msg": err.msg,
"status": err.status,
"code": err.code,
"service_sid": service_sid,
"number_sid": number_sid,
}
if err.status == 409 and err.code == 21710:
# Log "Phone Number is already in the Messaging Service"
# https://www.twilio.com/docs/api/errors/21710
logger.warning("twilio_messaging_service", extra=log_extra)
return
elif err.status == 412 and err.code == 21714:
# Log "Number Pool size limit reached", continue to next service
# https://www.twilio.com/docs/api/errors/21714
closed_sids.append(service_sid)
logger.warning("twilio_messaging_service", extra=log_extra)
else:
# Log and re-raise other Twilio errors
logger.error("twilio_messaging_service", extra=log_extra)
raise
else:
return # Successfully registered with service
raise Exception("All services in TWILIO_MESSAGING_SERVICE_SID are full") | Register a Twilio US phone number with a Messaging Service. |
154,515 | from __future__ import annotations
from datetime import datetime, timedelta, timezone
from math import floor
from typing import Iterator, Optional
import logging
import phonenumbers
import secrets
import string
from django.contrib.auth.models import User
from django.conf import settings
from django.core.cache import cache
from django.core.exceptions import BadRequest, ValidationError
from django.db.migrations.recorder import MigrationRecorder
from django.db import models
from django.db.models.signals import post_save
from django.dispatch.dispatcher import receiver
from django.urls import reverse
from twilio.base.exceptions import TwilioRestException
from twilio.rest import Client
from emails.utils import incr_if_enabled
from .apps import phones_config, twilio_client
from .iq_utils import send_iq_sms
def send_welcome_message(user, relay_number):
real_phone = RealPhone.objects.get(user=user)
assert settings.SITE_ORIGIN
media_url = settings.SITE_ORIGIN + reverse(
"vCard", kwargs={"lookup_key": relay_number.vcard_lookup_key}
)
client = twilio_client()
client.messages.create(
body=(
"Welcome to Relay phone masking! 🎉 Please add your number to your contacts."
" This will help you identify your Relay messages and calls."
),
from_=settings.TWILIO_MAIN_NUMBER,
to=real_phone.number,
media_url=[media_url],
)
def incr_if_enabled(name, value=1, tags=None):
if settings.STATSD_ENABLED:
metrics.incr(name, value, tags)
def relaynumber_post_save(sender, instance, created, **kwargs):
# don't do anything if running migrations
if type(instance) == MigrationRecorder.Migration:
return
# TODO: if IQ_FOR_NEW_NUMBERS, send welcome message via IQ
if not instance.vendor == "twilio":
return
if created:
incr_if_enabled("phones_RelayNumber.post_save_created_send_welcome")
if not settings.PHONES_NO_CLIENT_CALLS_IN_TEST:
# only send welcome vCard when creating new record
send_welcome_message(instance.user, instance) | null |
154,516 | from __future__ import annotations
from datetime import datetime, timedelta, timezone
from math import floor
from typing import Iterator, Optional
import logging
import phonenumbers
import secrets
import string
from django.contrib.auth.models import User
from django.conf import settings
from django.core.cache import cache
from django.core.exceptions import BadRequest, ValidationError
from django.db.migrations.recorder import MigrationRecorder
from django.db import models
from django.db.models.signals import post_save
from django.dispatch.dispatcher import receiver
from django.urls import reverse
from twilio.base.exceptions import TwilioRestException
from twilio.rest import Client
from emails.utils import incr_if_enabled
from .apps import phones_config, twilio_client
from .iq_utils import send_iq_sms
def last_inbound_date_default():
return datetime.now(timezone.utc) | null |
154,517 | from __future__ import annotations
from datetime import datetime, timedelta, timezone
from math import floor
from typing import Iterator, Optional
import logging
import phonenumbers
import secrets
import string
from django.contrib.auth.models import User
from django.conf import settings
from django.core.cache import cache
from django.core.exceptions import BadRequest, ValidationError
from django.db.migrations.recorder import MigrationRecorder
from django.db import models
from django.db.models.signals import post_save
from django.dispatch.dispatcher import receiver
from django.urls import reverse
from twilio.base.exceptions import TwilioRestException
from twilio.rest import Client
from emails.utils import incr_if_enabled
from .apps import phones_config, twilio_client
from .iq_utils import send_iq_sms
def get_verified_realphone_records(user):
return RealPhone.objects.filter(user=user, verified=True)
class RelayNumber(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
number = models.CharField(max_length=15, db_index=True, unique=True)
vendor = models.CharField(max_length=15, default="twilio")
location = models.CharField(max_length=255)
country_code = models.CharField(max_length=2, default="US")
vcard_lookup_key = models.CharField(
max_length=6, default=vcard_lookup_key_default, unique=True
)
enabled = models.BooleanField(default=True)
remaining_seconds = models.IntegerField(
default=settings.MAX_MINUTES_PER_BILLING_CYCLE * 60
)
remaining_texts = models.IntegerField(default=settings.MAX_TEXTS_PER_BILLING_CYCLE)
calls_forwarded = models.IntegerField(default=0)
calls_blocked = models.IntegerField(default=0)
texts_forwarded = models.IntegerField(default=0)
texts_blocked = models.IntegerField(default=0)
created_at = models.DateTimeField(null=True, auto_now_add=True)
def remaining_minutes(self):
# return a 0 or positive int for remaining minutes
return floor(max(self.remaining_seconds, 0) / 60)
def calls_and_texts_forwarded(self):
return self.calls_forwarded + self.texts_forwarded
def calls_and_texts_blocked(self):
return self.calls_blocked + self.texts_blocked
def storing_phone_log(self) -> bool:
return bool(self.user.profile.store_phone_log)
def save(self, *args, **kwargs):
realphone = get_verified_realphone_records(self.user).first()
if not realphone:
raise ValidationError("User does not have a verified real phone.")
# if this number exists for this user, this is an update call
existing_numbers = RelayNumber.objects.filter(user=self.user)
this_number = existing_numbers.filter(number=self.number).first()
if this_number and this_number.id == self.id:
return super().save(*args, **kwargs)
elif existing_numbers.exists():
raise ValidationError("User can have only one relay number.")
if RelayNumber.objects.filter(number=self.number).exists():
raise ValidationError("This number is already claimed.")
use_twilio = (
self.vendor == "twilio" and not settings.PHONES_NO_CLIENT_CALLS_IN_TEST
)
if use_twilio:
# Before saving into DB provision the number in Twilio
client = twilio_client()
# Since this will charge the Twilio account, first see if this
# is running with TEST creds to avoid charges.
if settings.TWILIO_TEST_ACCOUNT_SID:
client = phones_config().twilio_test_client
twilio_incoming_number = client.incoming_phone_numbers.create(
phone_number=self.number,
sms_application_sid=settings.TWILIO_SMS_APPLICATION_SID,
voice_application_sid=settings.TWILIO_SMS_APPLICATION_SID,
)
# Assume number was selected through suggested_numbers, so same country
# as realphone
self.country_code = realphone.country_code.upper()
# Add US numbers to the Relay messaging service, so it goes into our
# US A2P 10DLC campaign
if use_twilio and self.country_code == "US":
if settings.TWILIO_MESSAGING_SERVICE_SID:
register_with_messaging_service(client, twilio_incoming_number.sid)
else:
logger.warning(
"Skipping Twilio Messaging Service registration, since"
" TWILIO_MESSAGING_SERVICE_SID is empty.",
extra={"number_sid": twilio_incoming_number.sid},
)
return super().save(*args, **kwargs)
def convert_twilio_numbers_to_dict(twilio_numbers):
"""
To serialize twilio numbers to JSON for the API,
we need to convert them into dictionaries.
"""
numbers_as_dicts = []
for twilio_number in twilio_numbers:
number = {}
number["friendly_name"] = twilio_number.friendly_name
number["iso_country"] = twilio_number.iso_country
number["locality"] = twilio_number.locality
number["phone_number"] = twilio_number.phone_number
number["postal_code"] = twilio_number.postal_code
number["region"] = twilio_number.region
numbers_as_dicts.append(number)
return numbers_as_dicts
def twilio_client() -> Client:
assert not settings.PHONES_NO_CLIENT_CALLS_IN_TEST
return phones_config().twilio_client
def suggested_numbers(user):
real_phone = get_verified_realphone_records(user).first()
if real_phone is None:
raise BadRequest(
"available_numbers: This user hasn't verified a RealPhone yet."
)
existing_number = RelayNumber.objects.filter(user=user)
if existing_number:
raise BadRequest(
"available_numbers: Another RelayNumber already exists for this user."
)
real_num = real_phone.number
client = twilio_client()
avail_nums = client.available_phone_numbers(real_phone.country_code)
# TODO: can we make multiple pattern searches in a single Twilio API request
same_prefix_options = []
# look for numbers with same area code and 3-number prefix
contains = "%s****" % real_num[:8] if real_num else ""
twilio_nums = avail_nums.local.list(contains=contains, limit=10)
same_prefix_options.extend(convert_twilio_numbers_to_dict(twilio_nums))
# look for numbers with same area code, 2-number prefix and suffix
contains = "%s***%s" % (real_num[:7], real_num[10:]) if real_num else ""
twilio_nums = avail_nums.local.list(contains=contains, limit=10)
same_prefix_options.extend(convert_twilio_numbers_to_dict(twilio_nums))
# look for numbers with same area code and 1-number prefix
contains = "%s******" % real_num[:6] if real_num else ""
twilio_nums = avail_nums.local.list(contains=contains, limit=10)
same_prefix_options.extend(convert_twilio_numbers_to_dict(twilio_nums))
# look for same number in other area codes
contains = "+1***%s" % real_num[5:] if real_num else ""
twilio_nums = avail_nums.local.list(contains=contains, limit=10)
other_areas_options = convert_twilio_numbers_to_dict(twilio_nums)
# look for any numbers in the area code
contains = "%s*******" % real_num[:5] if real_num else ""
twilio_nums = avail_nums.local.list(contains=contains, limit=10)
same_area_options = convert_twilio_numbers_to_dict(twilio_nums)
# look for any available numbers
twilio_nums = avail_nums.local.list(limit=10)
random_options = convert_twilio_numbers_to_dict(twilio_nums)
return {
"real_num": real_num,
"same_prefix_options": same_prefix_options,
"other_areas_options": other_areas_options,
"same_area_options": same_area_options,
"random_options": random_options,
} | null |
154,518 | from __future__ import annotations
from datetime import datetime, timedelta, timezone
from math import floor
from typing import Iterator, Optional
import logging
import phonenumbers
import secrets
import string
from django.contrib.auth.models import User
from django.conf import settings
from django.core.cache import cache
from django.core.exceptions import BadRequest, ValidationError
from django.db.migrations.recorder import MigrationRecorder
from django.db import models
from django.db.models.signals import post_save
from django.dispatch.dispatcher import receiver
from django.urls import reverse
from twilio.base.exceptions import TwilioRestException
from twilio.rest import Client
from emails.utils import incr_if_enabled
from .apps import phones_config, twilio_client
from .iq_utils import send_iq_sms
def convert_twilio_numbers_to_dict(twilio_numbers):
"""
To serialize twilio numbers to JSON for the API,
we need to convert them into dictionaries.
"""
numbers_as_dicts = []
for twilio_number in twilio_numbers:
number = {}
number["friendly_name"] = twilio_number.friendly_name
number["iso_country"] = twilio_number.iso_country
number["locality"] = twilio_number.locality
number["phone_number"] = twilio_number.phone_number
number["postal_code"] = twilio_number.postal_code
number["region"] = twilio_number.region
numbers_as_dicts.append(number)
return numbers_as_dicts
def twilio_client() -> Client:
assert not settings.PHONES_NO_CLIENT_CALLS_IN_TEST
return phones_config().twilio_client
def location_numbers(location, country_code="US"):
client = twilio_client()
avail_nums = client.available_phone_numbers(country_code)
twilio_nums = avail_nums.local.list(in_locality=location, limit=10)
return convert_twilio_numbers_to_dict(twilio_nums) | null |
154,519 | from __future__ import annotations
from datetime import datetime, timedelta, timezone
from math import floor
from typing import Iterator, Optional
import logging
import phonenumbers
import secrets
import string
from django.contrib.auth.models import User
from django.conf import settings
from django.core.cache import cache
from django.core.exceptions import BadRequest, ValidationError
from django.db.migrations.recorder import MigrationRecorder
from django.db import models
from django.db.models.signals import post_save
from django.dispatch.dispatcher import receiver
from django.urls import reverse
from twilio.base.exceptions import TwilioRestException
from twilio.rest import Client
from emails.utils import incr_if_enabled
from .apps import phones_config, twilio_client
from .iq_utils import send_iq_sms
def convert_twilio_numbers_to_dict(twilio_numbers):
"""
To serialize twilio numbers to JSON for the API,
we need to convert them into dictionaries.
"""
numbers_as_dicts = []
for twilio_number in twilio_numbers:
number = {}
number["friendly_name"] = twilio_number.friendly_name
number["iso_country"] = twilio_number.iso_country
number["locality"] = twilio_number.locality
number["phone_number"] = twilio_number.phone_number
number["postal_code"] = twilio_number.postal_code
number["region"] = twilio_number.region
numbers_as_dicts.append(number)
return numbers_as_dicts
def twilio_client() -> Client:
assert not settings.PHONES_NO_CLIENT_CALLS_IN_TEST
return phones_config().twilio_client
def area_code_numbers(area_code, country_code="US"):
client = twilio_client()
avail_nums = client.available_phone_numbers(country_code)
twilio_nums = avail_nums.local.list(area_code=area_code, limit=10)
return convert_twilio_numbers_to_dict(twilio_nums) | null |
154,520 | from django.db import migrations, models
The provided code snippet includes necessary dependencies for implementing the `add_db_default_forward_func` function. Write a Python function `def add_db_default_forward_func(apps, schema_editor)` to solve the following problem:
Add a database default of 3000 for remaining_seconds, for PostgreSQL and SQLite3 Using `./manage.py sqlmigrate` for the SQL, and the technique from: https://stackoverflow.com/a/45232678/10612
Here is the function:
def add_db_default_forward_func(apps, schema_editor):
"""
Add a database default of 3000 for remaining_seconds, for PostgreSQL and SQLite3
Using `./manage.py sqlmigrate` for the SQL, and the technique from:
https://stackoverflow.com/a/45232678/10612
"""
if schema_editor.connection.vendor.startswith("postgres"):
schema_editor.execute(
'ALTER TABLE "phones_relaynumber"'
' ALTER COLUMN "remaining_minutes" SET DEFAULT 50;'
)
elif schema_editor.connection.vendor.startswith("sqlite"):
schema_editor.execute(
'CREATE TABLE "new__phones_relaynumber"'
' ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,'
' "calls_blocked" integer NOT NULL DEFAULT 0,'
' "calls_forwarded" integer NOT NULL DEFAULT 0,'
' "enabled" bool NOT NULL DEFAULT 1,'
' "location" varchar(255) NOT NULL,'
' "number" varchar(15) NOT NULL,'
' "remaining_minutes" integer NULL DEFAULT 50,'
' "remaining_seconds" integer NOT NULL DEFAULT 3000,'
' "remaining_texts" integer NOT NULL DEFAULT 75,'
' "texts_blocked" integer NOT NULL DEFAULT 0,'
' "texts_forwarded" integer NOT NULL DEFAULT 0,'
' "user_id" integer NOT NULL REFERENCES "auth_user" ("id") DEFERRABLE INITIALLY DEFERRED,'
' "vcard_lookup_key" varchar(6) NOT NULL UNIQUE);'
)
schema_editor.execute(
'INSERT INTO "new__phones_relaynumber" ("id", "number", "location",'
' "user_id", "vcard_lookup_key", "enabled", "calls_blocked",'
' "calls_forwarded", "remaining_texts", "texts_blocked",'
' "texts_forwarded", "remaining_seconds", "remaining_minutes")'
' SELECT "id", "number", "location", "user_id", "vcard_lookup_key",'
' "enabled", "calls_blocked", "calls_forwarded", "remaining_texts",'
' "texts_blocked", "texts_forwarded", "remaining_seconds", NULL'
' FROM "phones_relaynumber";'
)
schema_editor.execute('DROP TABLE "phones_relaynumber";')
schema_editor.execute(
'ALTER TABLE "new__phones_relaynumber" RENAME TO "phones_relaynumber";'
)
schema_editor.execute(
'CREATE INDEX "phones_relaynumber_number_742e5d6b" ON "phones_relaynumber"'
' ("number");'
)
schema_editor.execute(
'CREATE INDEX "phones_relaynumber_user_id_62c65ede" ON "phones_relaynumber"'
' ("user_id");'
)
else:
raise Exception(f'Unknown database vendor "{schema_editor.connection.vendor}"') | Add a database default of 3000 for remaining_seconds, for PostgreSQL and SQLite3 Using `./manage.py sqlmigrate` for the SQL, and the technique from: https://stackoverflow.com/a/45232678/10612 |
154,521 | from django.db import migrations, models
The provided code snippet includes necessary dependencies for implementing the `add_db_default_forward_func` function. Write a Python function `def add_db_default_forward_func(apps, schema_editor)` to solve the following problem:
Add a database default of TRUE for enabled, for PostgreSQL and SQLite3 Using `./manage.py sqlmigrate` for the SQL, and the technique from: https://stackoverflow.com/a/45232678/10612
Here is the function:
def add_db_default_forward_func(apps, schema_editor):
"""
Add a database default of TRUE for enabled, for PostgreSQL and SQLite3
Using `./manage.py sqlmigrate` for the SQL, and the technique from:
https://stackoverflow.com/a/45232678/10612
"""
if schema_editor.connection.vendor.startswith("postgres"):
schema_editor.execute(
'ALTER TABLE "phones_inboundcontact"'
" ALTER COLUMN \"last_inbound_type\" SET DEFAULT 'text';"
)
elif schema_editor.connection.vendor.startswith("sqlite"):
schema_editor.execute(
'CREATE TABLE IF NOT EXISTS "new__phones_inboundcontact" '
' ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,'
' "inbound_number" varchar(15) NOT NULL,'
' "last_inbound_date" datetime NOT NULL,'
" \"last_inbound_type\" varchar(4) NOT NULL DEFAULT 'text',"
' "num_calls" integer unsigned NOT NULL CHECK ("num_calls" >= 0),'
' "num_calls_blocked" integer unsigned NOT NULL CHECK ("num_calls_blocked" >= 0),'
' "num_texts" integer unsigned NOT NULL CHECK ("num_texts" >= 0),'
' "num_texts_blocked" integer unsigned NOT NULL CHECK ("num_texts_blocked" >= 0),'
' "blocked" bool NOT NULL,'
' "relay_number_id" integer NOT NULL REFERENCES "phones_relaynumber" ("id")'
" DEFERRABLE INITIALLY DEFERRED);"
)
schema_editor.execute(
'INSERT INTO "new__phones_inboundcontact"'
' ("id", "inbound_number", "last_inbound_date", "last_inbound_type", "num_calls", "num_calls_blocked", "num_texts", "num_texts_blocked", "blocked", "relay_number_id")'
' SELECT "id", "inbound_number", "last_inbound_date", "text", "num_calls", "num_calls_blocked", "num_texts", "num_texts_blocked", "blocked", "relay_number_id"'
' FROM "phones_inboundcontact";'
)
schema_editor.execute('DROP TABLE "phones_inboundcontact";')
schema_editor.execute(
'ALTER TABLE "new__phones_inboundcontact" RENAME TO "phones_inboundcontact";'
)
schema_editor.execute(
'CREATE INDEX "phones_inboundcontact_relay_number_id_f95dbf8c" ON "phones_inboundcontact" ("relay_number_id");'
)
schema_editor.execute(
'CREATE INDEX "phones_inbo_relay_n_eaf332_idx" ON "phones_inboundcontact" ("relay_number_id", "inbound_number");'
)
else:
raise Exception(f'Unknown database vendor "{schema_editor.connection.vendor}"') | Add a database default of TRUE for enabled, for PostgreSQL and SQLite3 Using `./manage.py sqlmigrate` for the SQL, and the technique from: https://stackoverflow.com/a/45232678/10612 |
154,522 | from django.db import migrations, models
The provided code snippet includes necessary dependencies for implementing the `add_db_default_forward_func` function. Write a Python function `def add_db_default_forward_func(apps, schema_editor)` to solve the following problem:
Add a database default of TRUE for enabled, for PostgreSQL and SQLite3 Using `./manage.py sqlmigrate` for the SQL, and the technique from: https://stackoverflow.com/a/45232678/10612
Here is the function:
def add_db_default_forward_func(apps, schema_editor):
"""
Add a database default of TRUE for enabled, for PostgreSQL and SQLite3
Using `./manage.py sqlmigrate` for the SQL, and the technique from:
https://stackoverflow.com/a/45232678/10612
"""
if schema_editor.connection.vendor.startswith("postgres"):
schema_editor.execute(
'ALTER TABLE "phones_relaynumber"'
' ALTER COLUMN "enabled" SET DEFAULT true;'
)
elif schema_editor.connection.vendor.startswith("sqlite"):
schema_editor.execute(
'CREATE TABLE "new__phones_relaynumber"'
' ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,'
' "enabled" bool NOT NULL DEFAULT 1,'
' "number" varchar(15) NOT NULL,'
' "location" varchar(255) NOT NULL,'
' "user_id" integer NOT NULL REFERENCES "auth_user" ("id")'
" DEFERRABLE INITIALLY DEFERRED,"
' "vcard_lookup_key" varchar(6) NOT NULL UNIQUE);'
)
schema_editor.execute(
'INSERT INTO "new__phones_relaynumber"'
' ("id", "number", "location", "user_id", "vcard_lookup_key", "enabled")'
' SELECT "id", "number", "location", "user_id", "vcard_lookup_key", 1'
' FROM "phones_relaynumber";'
)
schema_editor.execute('DROP TABLE "phones_relaynumber";')
schema_editor.execute(
'ALTER TABLE "new__phones_relaynumber" RENAME TO "phones_relaynumber";'
)
schema_editor.execute(
'CREATE INDEX "phones_relaynumber_number_742e5d6b" ON "phones_relaynumber"'
' ("number");'
)
schema_editor.execute(
'CREATE INDEX "phones_relaynumber_user_id_62c65ede" ON "phones_relaynumber"'
' ("user_id");'
)
else:
raise Exception(f'Unknown database vendor "{schema_editor.connection.vendor}"') | Add a database default of TRUE for enabled, for PostgreSQL and SQLite3 Using `./manage.py sqlmigrate` for the SQL, and the technique from: https://stackoverflow.com/a/45232678/10612 |
154,523 | from django.db import migrations, models
The provided code snippet includes necessary dependencies for implementing the `add_db_default_forward_func` function. Write a Python function `def add_db_default_forward_func(apps, schema_editor)` to solve the following problem:
Add a database default of 'twilio' for vendor, for PostgreSQL and SQLite3 Using `./manage.py sqlmigrate` for the SQL, and the technique from: https://stackoverflow.com/a/45232678/10612
Here is the function:
def add_db_default_forward_func(apps, schema_editor):
"""
Add a database default of 'twilio' for vendor, for PostgreSQL and SQLite3
Using `./manage.py sqlmigrate` for the SQL, and the technique from:
https://stackoverflow.com/a/45232678/10612
"""
if schema_editor.connection.vendor.startswith("postgres"):
schema_editor.execute(
'ALTER TABLE "phones_relaynumber"'
" ALTER COLUMN \"vendor\" SET DEFAULT 'twilio';"
)
elif schema_editor.connection.vendor.startswith("sqlite"):
# Add default 'twilio' to phone_relaynumber.vendor
schema_editor.execute(
'CREATE TABLE "new__phones_relaynumber"'
' ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,'
' "number" varchar(15) NOT NULL,'
' "location" varchar(255) NOT NULL,'
' "user_id" integer NOT NULL REFERENCES "auth_user" ("id") DEFERRABLE INITIALLY DEFERRED,'
' "vcard_lookup_key" varchar(6) NOT NULL UNIQUE,'
' "enabled" bool NOT NULL,'
' "calls_blocked" integer NOT NULL,'
' "calls_forwarded" integer NOT NULL,'
' "remaining_texts" integer NOT NULL,'
' "texts_blocked" integer NOT NULL,'
' "texts_forwarded" integer NOT NULL,'
' "remaining_seconds" integer NOT NULL DEFAULT 3000,'
' "remaining_minutes" integer NULL,'
" \"country_code\" varchar(2) NOT NULL DEFAULT 'US',"
" \"vendor\" varchar(15) NOT NULL DEFAULT 'twilio');"
)
schema_editor.execute(
'INSERT INTO "new__phones_relaynumber"'
' ("id", "number", "location", "user_id", "vcard_lookup_key", "enabled",'
' "calls_blocked", "calls_forwarded", "remaining_texts", "texts_blocked",'
' "texts_forwarded", "remaining_seconds", "remaining_minutes",'
' "country_code", "vendor")'
' SELECT "id", "number", "location", "user_id", "vcard_lookup_key",'
' "enabled", "calls_blocked", "calls_forwarded", "remaining_texts",'
' "texts_blocked", "texts_forwarded", "remaining_seconds",'
' "remaining_minutes", "country_code", \'twilio\' FROM "phones_relaynumber";'
)
schema_editor.execute('DROP TABLE "phones_relaynumber";')
schema_editor.execute(
'ALTER TABLE "new__phones_relaynumber" RENAME TO "phones_relaynumber";'
)
schema_editor.execute(
'CREATE INDEX "phones_relaynumber_number_742e5d6b" ON "phones_relaynumber"'
' ("number");'
)
schema_editor.execute(
'CREATE INDEX "phones_relaynumber_user_id_62c65ede" ON "phones_relaynumber"'
' ("user_id");'
) | Add a database default of 'twilio' for vendor, for PostgreSQL and SQLite3 Using `./manage.py sqlmigrate` for the SQL, and the technique from: https://stackoverflow.com/a/45232678/10612 |
154,524 | from django.db import migrations, models
The provided code snippet includes necessary dependencies for implementing the `migrate_existing_data` function. Write a Python function `def migrate_existing_data(apps, schema_editor)` to solve the following problem:
After creating the remaining_seconds column, populate it with the value of remaining_mintes*60.
Here is the function:
def migrate_existing_data(apps, schema_editor):
"""
After creating the remaining_seconds column,
populate it with the value of remaining_mintes*60.
"""
if schema_editor.connection.vendor.startswith("postgres"):
schema_editor.execute(
'UPDATE "phones_relaynumber" SET remaining_seconds = remaining_minutes*60;'
)
elif schema_editor.connection.vendor.startswith("sqlite"):
schema_editor.execute(
'UPDATE "phones_relaynumber" SET remaining_seconds = remaining_minutes*60;'
)
else:
raise Exception(f'Unknown database vendor "{schema_editor.connection.vendor}"') | After creating the remaining_seconds column, populate it with the value of remaining_mintes*60. |
154,525 | from django.db import migrations, models
The provided code snippet includes necessary dependencies for implementing the `add_db_default_forward_func` function. Write a Python function `def add_db_default_forward_func(apps, schema_editor)` to solve the following problem:
Add a database default of 3000 for remaining_seconds, for PostgreSQL and SQLite3 Using `./manage.py sqlmigrate` for the SQL, and the technique from: https://stackoverflow.com/a/45232678/10612
Here is the function:
def add_db_default_forward_func(apps, schema_editor):
"""
Add a database default of 3000 for remaining_seconds, for PostgreSQL and SQLite3
Using `./manage.py sqlmigrate` for the SQL, and the technique from:
https://stackoverflow.com/a/45232678/10612
"""
if schema_editor.connection.vendor.startswith("postgres"):
schema_editor.execute(
'ALTER TABLE "phones_relaynumber"'
' ALTER COLUMN "remaining_seconds" SET DEFAULT 3000;'
)
elif schema_editor.connection.vendor.startswith("sqlite"):
schema_editor.execute(
'CREATE TABLE "new__phones_relaynumber"'
' ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,'
' "remaining_minutes" integer NOT NULL DEFAULT 50,'
' "remaining_seconds" integer NOT NULL DEFAULT 3000,'
' "remaining_texts" integer NOT NULL DEFAULT 75,'
' "calls_forwarded" integer NOT NULL DEFAULT 0,'
' "calls_blocked" integer NOT NULL DEFAULT 0,'
' "texts_forwarded" integer NOT NULL DEFAULT 0,'
' "texts_blocked" integer NOT NULL DEFAULT 0,'
' "enabled" bool NOT NULL DEFAULT 1,'
' "number" varchar(15) NOT NULL,'
' "location" varchar(255) NOT NULL,'
' "user_id" integer NOT NULL REFERENCES "auth_user" ("id")'
" DEFERRABLE INITIALLY DEFERRED,"
' "vcard_lookup_key" varchar(6) NOT NULL UNIQUE);'
)
schema_editor.execute(
'INSERT INTO "new__phones_relaynumber"'
' ("id", "number", "location", "user_id", "vcard_lookup_key", "enabled",'
' "remaining_texts", "remaining_minutes", "calls_forwarded", "calls_blocked", '
' "texts_forwarded", "texts_blocked", "remaining_seconds") '
' SELECT "id", "number", "location", "user_id", "vcard_lookup_key", "enabled", '
' "remaining_texts", "remaining_minutes", "calls_forwarded", "calls_blocked", '
' "texts_forwarded", "texts_blocked", 3000 '
' FROM "phones_relaynumber";'
)
schema_editor.execute('DROP TABLE "phones_relaynumber";')
schema_editor.execute(
'ALTER TABLE "new__phones_relaynumber" RENAME TO "phones_relaynumber";'
)
schema_editor.execute(
'CREATE INDEX "phones_relaynumber_number_742e5d6b" ON "phones_relaynumber"'
' ("number");'
)
schema_editor.execute(
'CREATE INDEX "phones_relaynumber_user_id_62c65ede" ON "phones_relaynumber"'
' ("user_id");'
)
else:
raise Exception(f'Unknown database vendor "{schema_editor.connection.vendor}"') | Add a database default of 3000 for remaining_seconds, for PostgreSQL and SQLite3 Using `./manage.py sqlmigrate` for the SQL, and the technique from: https://stackoverflow.com/a/45232678/10612 |
154,526 | from django.db import migrations, models
The provided code snippet includes necessary dependencies for implementing the `add_db_default_forward_func` function. Write a Python function `def add_db_default_forward_func(apps, schema_editor)` to solve the following problem:
Add a database default of TRUE for enabled, for PostgreSQL and SQLite3 Using `./manage.py sqlmigrate` for the SQL, and the technique from: https://stackoverflow.com/a/45232678/10612
Here is the function:
def add_db_default_forward_func(apps, schema_editor):
"""
Add a database default of TRUE for enabled, for PostgreSQL and SQLite3
Using `./manage.py sqlmigrate` for the SQL, and the technique from:
https://stackoverflow.com/a/45232678/10612
"""
if schema_editor.connection.vendor.startswith("postgres"):
schema_editor.execute(
'ALTER TABLE "phones_relaynumber"'
' ALTER COLUMN "remaining_minutes" SET DEFAULT 50,'
' ALTER COLUMN "remaining_texts" SET DEFAULT 75,'
' ALTER COLUMN "calls_forwarded" SET DEFAULT 0,'
' ALTER COLUMN "calls_blocked" SET DEFAULT 0,'
' ALTER COLUMN "texts_forwarded" SET DEFAULT 0,'
' ALTER COLUMN "texts_blocked" SET DEFAULT 0;'
)
elif schema_editor.connection.vendor.startswith("sqlite"):
schema_editor.execute(
'CREATE TABLE "new__phones_relaynumber"'
' ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,'
' "remaining_minutes" integer NOT NULL DEFAULT 50,'
' "remaining_texts" integer NOT NULL DEFAULT 75,'
' "calls_forwarded" integer NOT NULL DEFAULT 0,'
' "calls_blocked" integer NOT NULL DEFAULT 0,'
' "texts_forwarded" integer NOT NULL DEFAULT 0,'
' "texts_blocked" integer NOT NULL DEFAULT 0,'
' "enabled" bool NOT NULL DEFAULT 1,'
' "number" varchar(15) NOT NULL,'
' "location" varchar(255) NOT NULL,'
' "user_id" integer NOT NULL REFERENCES "auth_user" ("id")'
" DEFERRABLE INITIALLY DEFERRED,"
' "vcard_lookup_key" varchar(6) NOT NULL UNIQUE);'
)
schema_editor.execute(
'INSERT INTO "new__phones_relaynumber"'
' ("id", "number", "location", "user_id", "vcard_lookup_key", "enabled",'
' "remaining_texts", "remaining_minutes", "calls_forwarded", "calls_blocked", '
' "texts_forwarded", "texts_blocked") '
' SELECT "id", "number", "location", "user_id", "vcard_lookup_key", "enabled", '
" 75, 50, 0, 0, 0, 0 "
' FROM "phones_relaynumber";'
)
schema_editor.execute('DROP TABLE "phones_relaynumber";')
schema_editor.execute(
'ALTER TABLE "new__phones_relaynumber" RENAME TO "phones_relaynumber";'
)
schema_editor.execute(
'CREATE INDEX "phones_relaynumber_number_742e5d6b" ON "phones_relaynumber"'
' ("number");'
)
schema_editor.execute(
'CREATE INDEX "phones_relaynumber_user_id_62c65ede" ON "phones_relaynumber"'
' ("user_id");'
)
else:
raise Exception(f'Unknown database vendor "{schema_editor.connection.vendor}"') | Add a database default of TRUE for enabled, for PostgreSQL and SQLite3 Using `./manage.py sqlmigrate` for the SQL, and the technique from: https://stackoverflow.com/a/45232678/10612 |
154,527 | from django.db import migrations, models
The provided code snippet includes necessary dependencies for implementing the `add_db_default_forward_func` function. Write a Python function `def add_db_default_forward_func(apps, schema_editor)` to solve the following problem:
Add a database default of 'US' for country_code, for PostgreSQL and SQLite3 Using `./manage.py sqlmigrate` for the SQL, and the technique from: https://stackoverflow.com/a/45232678/10612
Here is the function:
def add_db_default_forward_func(apps, schema_editor):
"""
Add a database default of 'US' for country_code, for PostgreSQL and SQLite3
Using `./manage.py sqlmigrate` for the SQL, and the technique from:
https://stackoverflow.com/a/45232678/10612
"""
if schema_editor.connection.vendor.startswith("postgres"):
schema_editor.execute(
'ALTER TABLE "phones_realphone"'
" ALTER COLUMN \"country_code\" SET DEFAULT 'US';"
)
schema_editor.execute(
'ALTER TABLE "phones_relaynumber"'
" ALTER COLUMN \"country_code\" SET DEFAULT 'US';"
)
elif schema_editor.connection.vendor.startswith("sqlite"):
# Add default 'US' to phone_realnumber.country_code
schema_editor.execute(
'CREATE TABLE "new__phones_realphone"'
' ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,'
" \"country_code\" varchar(2) NOT NULL DEFAULT 'US',"
' "number" varchar(15) NOT NULL,'
' "verification_code" varchar(8) NOT NULL,'
' "verified" bool NOT NULL,'
' "verified_date" datetime NULL,'
' "user_id" integer NOT NULL REFERENCES "auth_user" ("id")'
" DEFERRABLE INITIALLY DEFERRED,"
' "verification_sent_date" datetime NULL);'
)
schema_editor.execute(
'INSERT INTO "new__phones_realphone"'
' ("id", "number", "verification_code", "verified", "verified_date",'
' "user_id", "verification_sent_date", "country_code")'
' SELECT "id", "number", "verification_code", "verified", "verified_date",'
' "user_id", "verification_sent_date", \'US\' FROM "phones_realphone";'
)
schema_editor.execute('DROP TABLE "phones_realphone";')
schema_editor.execute(
'ALTER TABLE "new__phones_realphone" RENAME TO "phones_realphone";'
)
schema_editor.execute(
'CREATE UNIQUE INDEX "unique_verified_number" ON "phones_realphone"'
' ("number", "verified") WHERE "verified";'
)
schema_editor.execute(
'CREATE INDEX "phones_realphone_user_id_194ddf70" ON "phones_realphone"'
' ("user_id");'
)
# Add default 'US' to phone_relaynumber.country_code
schema_editor.execute(
'CREATE TABLE "new__phones_relaynumber"'
' ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,'
' "number" varchar(15) NOT NULL,'
' "location" varchar(255) NOT NULL,'
' "user_id" integer NOT NULL REFERENCES "auth_user" ("id") DEFERRABLE INITIALLY DEFERRED,'
' "vcard_lookup_key" varchar(6) NOT NULL UNIQUE,'
' "enabled" bool NOT NULL,'
' "calls_blocked" integer NOT NULL,'
' "calls_forwarded" integer NOT NULL,'
' "remaining_texts" integer NOT NULL,'
' "texts_blocked" integer NOT NULL,'
' "texts_forwarded" integer NOT NULL,'
' "remaining_seconds" integer NOT NULL DEFAULT 3000,'
' "remaining_minutes" integer NULL,'
" \"country_code\" varchar(2) NOT NULL DEFAULT 'US');"
)
schema_editor.execute(
'INSERT INTO "new__phones_relaynumber"'
' ("id", "number", "location", "user_id", "vcard_lookup_key", "enabled",'
' "calls_blocked", "calls_forwarded", "remaining_texts", "texts_blocked",'
' "texts_forwarded", "remaining_seconds", "remaining_minutes",'
' "country_code")'
' SELECT "id", "number", "location", "user_id", "vcard_lookup_key",'
' "enabled", "calls_blocked", "calls_forwarded", "remaining_texts",'
' "texts_blocked", "texts_forwarded", "remaining_seconds",'
' "remaining_minutes", \'US\' FROM "phones_relaynumber";'
)
schema_editor.execute('DROP TABLE "phones_relaynumber";')
schema_editor.execute(
'ALTER TABLE "new__phones_relaynumber" RENAME TO "phones_relaynumber";'
)
schema_editor.execute(
'CREATE INDEX "phones_relaynumber_number_742e5d6b" ON "phones_relaynumber"'
' ("number");'
)
schema_editor.execute(
'CREATE INDEX "phones_relaynumber_user_id_62c65ede" ON "phones_relaynumber"'
' ("user_id");'
) | Add a database default of 'US' for country_code, for PostgreSQL and SQLite3 Using `./manage.py sqlmigrate` for the SQL, and the technique from: https://stackoverflow.com/a/45232678/10612 |
154,528 | import logging
from flask import Flask
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
def lambda_handler(event=None, context=None):
logger.info("Lambda function invoked index()")
return "Flask says Hello!!" | null |
154,529 | import json
import pprint
import re
import time
class AuthPolicy:
awsAccountId = ""
"""The AWS account id the policy will be generated for. This is used to create the method ARNs."""
principalId = ""
"""The principal used for the policy, this should be a unique identifier for the end user."""
version = "2012-10-17"
"""The policy version used for the evaluation. This should always be '2012-10-17'"""
pathRegex = r"^[/.a-zA-Z0-9-\*]+$"
"""The regular expression used to validate resource paths for the policy"""
"""these are the internal lists of allowed and denied methods. These are lists
of objects and each object has 2 properties: A resource ARN and a nullable
conditions statement.
the build method processes these lists and generates the appropriate
statements for the final policy"""
allowMethods = []
denyMethods = []
restApiId = "*"
"""The API Gateway API id. By default this is set to '*'"""
region = "*"
"""The region where the API is deployed. By default this is set to '*'"""
stage = "*"
"""The name of the stage used in the policy. By default this is set to '*'"""
def __init__(self, principal, awsAccountId):
self.awsAccountId = awsAccountId
self.principalId = principal
self.allowMethods = []
self.denyMethods = []
def _addMethod(self, effect, verb, resource, conditions):
"""Adds a method to the internal lists of allowed or denied methods. Each object in
the internal list contains a resource ARN and a condition statement. The condition
statement can be null."""
if verb != "*" and not hasattr(HttpVerb, verb):
raise NameError("Invalid HTTP verb " + verb + ". Allowed verbs in HttpVerb class")
resourcePattern = re.compile(self.pathRegex)
if not resourcePattern.match(resource):
raise NameError("Invalid resource path: " + resource + ". Path should match " + self.pathRegex)
if resource[:1] == "/":
resource = resource[1:]
resourceArn = (
"arn:aws:execute-api:"
+ self.region
+ ":"
+ self.awsAccountId
+ ":"
+ self.restApiId
+ "/"
+ self.stage
+ "/"
+ verb
+ "/"
+ resource
)
if effect.lower() == "allow":
self.allowMethods.append({"resourceArn": resourceArn, "conditions": conditions})
elif effect.lower() == "deny":
self.denyMethods.append({"resourceArn": resourceArn, "conditions": conditions})
def _getEmptyStatement(self, effect):
"""Returns an empty statement object prepopulated with the correct action and the
desired effect."""
statement = {
"Action": "execute-api:Invoke",
"Effect": effect[:1].upper() + effect[1:].lower(),
"Resource": [],
}
return statement
def _getStatementForEffect(self, effect, methods):
"""This function loops over an array of objects containing a resourceArn and
conditions statement and generates the array of statements for the policy."""
statements = []
if len(methods) > 0:
statement = self._getEmptyStatement(effect)
for curMethod in methods:
if curMethod["conditions"] is None or len(curMethod["conditions"]) == 0:
statement["Resource"].append(curMethod["resourceArn"])
else:
conditionalStatement = self._getEmptyStatement(effect)
conditionalStatement["Resource"].append(curMethod["resourceArn"])
conditionalStatement["Condition"] = curMethod["conditions"]
statements.append(conditionalStatement)
statements.append(statement)
return statements
def allowAllMethods(self):
"""Adds a '*' allow to the policy to authorize access to all methods of an API"""
self._addMethod("Allow", HttpVerb.ALL, "*", [])
def denyAllMethods(self):
"""Adds a '*' allow to the policy to deny access to all methods of an API"""
self._addMethod("Deny", HttpVerb.ALL, "*", [])
def allowMethod(self, verb, resource):
"""Adds an API Gateway method (Http verb + Resource path) to the list of allowed
methods for the policy"""
self._addMethod("Allow", verb, resource, [])
def denyMethod(self, verb, resource):
"""Adds an API Gateway method (Http verb + Resource path) to the list of denied
methods for the policy"""
self._addMethod("Deny", verb, resource, [])
def allowMethodWithConditions(self, verb, resource, conditions):
"""Adds an API Gateway method (Http verb + Resource path) to the list of allowed
methods and includes a condition for the policy statement. More on AWS policy
conditions here: http://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements.html#Condition"""
self._addMethod("Allow", verb, resource, conditions)
def denyMethodWithConditions(self, verb, resource, conditions):
"""Adds an API Gateway method (Http verb + Resource path) to the list of denied
methods and includes a condition for the policy statement. More on AWS policy
conditions here: http://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements.html#Condition"""
self._addMethod("Deny", verb, resource, conditions)
def build(self):
"""Generates the policy document based on the internal lists of allowed and denied
conditions. This will generate a policy with two main statements for the effect:
one statement for Allow and one statement for Deny.
Methods that includes conditions will have their own statement in the policy."""
if (self.allowMethods is None or len(self.allowMethods) == 0) and (
self.denyMethods is None or len(self.denyMethods) == 0
):
raise NameError("No statements defined for the policy")
policy = {
"principalId": self.principalId,
"policyDocument": {"Version": self.version, "Statement": []},
}
policy["policyDocument"]["Statement"].extend(self._getStatementForEffect("Allow", self.allowMethods))
policy["policyDocument"]["Statement"].extend(self._getStatementForEffect("Deny", self.denyMethods))
return policy
The provided code snippet includes necessary dependencies for implementing the `lambda_handler` function. Write a Python function `def lambda_handler(event, context)` to solve the following problem:
validate the incoming token
Here is the function:
def lambda_handler(event, context):
print("Client token: " + event["authorizationToken"])
print("Method ARN: " + event["methodArn"])
"""validate the incoming token"""
"""and produce the principal user identifier associated with the token"""
"""this could be accomplished in a number of ways:"""
"""1. Call out to OAuth provider"""
"""2. Decode a JWT token inline"""
"""3. Lookup in a self-managed DB"""
principalId = "user|a1b2c3d4"
"""you can send a 401 Unauthorized response to the client by failing like so:"""
"""raise Exception('Unauthorized')"""
"""if the token is valid, a policy must be generated which will allow or deny access to the client"""
"""if access is denied, the client will receive a 403 Access Denied response"""
"""if access is allowed, API Gateway will proceed with the backend integration configured on the method that was called"""
"""this function must generate a policy that is associated with the recognized principal user identifier."""
"""depending on your use case, you might store policies in a DB, or generate them on the fly"""
"""keep in mind, the policy is cached for 5 minutes by default (TTL is configurable in the authorizer)"""
"""and will apply to subsequent calls to any method/resource in the RestApi"""
"""made with the same token"""
"""the example policy below denies access to all resources in the RestApi"""
tmp = event["methodArn"].split(":")
apiGatewayArnTmp = tmp[5].split("/")
awsAccountId = tmp[4]
policy = AuthPolicy(principalId, awsAccountId)
policy.restApiId = apiGatewayArnTmp[0]
policy.region = tmp[3]
policy.stage = apiGatewayArnTmp[1]
# Blueprint denies all methods by default
# policy.denyAllMethods()
# Example allows all methods
policy.allowAllMethods()
"""policy.allowMethod(HttpVerb.GET, "/pets/*")"""
"""finally, build the policy and exit the function using return"""
return policy.build() | validate the incoming token |
154,530 |
def myfunc():
print("Running my function in a schedule!") | null |
154,531 |
def myfunc_with_events(event, context):
print("Event time was", event["time"])
print("This log is", context.log_group_name, context.log_stream_name)
print("Time left for execution:", context.get_remaining_time_in_millis()) | null |
154,532 | import os
import sys
def get_django_wsgi(settings_module):
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", settings_module)
return get_wsgi_application() | null |
154,533 | import argparse
import base64
import collections
import importlib
import inspect
import os
import pkgutil
import random
import re
import string
import sys
import tempfile
import time
import zipfile
from builtins import bytes, input
from datetime import datetime, timedelta
from typing import Optional
import argcomplete
import botocore
import click
import hjson as json
import pkg_resources
import requests
import slugify
import toml
import yaml
from click import BaseCommand, Context
from click.exceptions import ClickException
from click.globals import push_context
from dateutil import parser
from .core import API_GATEWAY_REGIONS, Zappa
from .utilities import (
check_new_version_available,
detect_django_settings,
detect_flask_apps,
get_runtime_from_python_version,
get_venv_from_python_version,
human_size,
is_valid_bucket_name,
parse_s3_url,
string_to_timestamp,
validate_name,
)
The provided code snippet includes necessary dependencies for implementing the `disable_click_colors` function. Write a Python function `def disable_click_colors()` to solve the following problem:
Set a Click context where colors are disabled. Creates a throwaway BaseCommand to play nicely with the Context constructor. The intended side-effect here is that click.echo() checks this context and will suppress colors. https://github.com/pallets/click/blob/e1aa43a3/click/globals.py#L39
Here is the function:
def disable_click_colors():
"""
Set a Click context where colors are disabled. Creates a throwaway BaseCommand
to play nicely with the Context constructor.
The intended side-effect here is that click.echo() checks this context and will
suppress colors.
https://github.com/pallets/click/blob/e1aa43a3/click/globals.py#L39
"""
ctx = Context(BaseCommand("AllYourBaseAreBelongToUs"))
ctx.color = False
push_context(ctx) | Set a Click context where colors are disabled. Creates a throwaway BaseCommand to play nicely with the Context constructor. The intended side-effect here is that click.echo() checks this context and will suppress colors. https://github.com/pallets/click/blob/e1aa43a3/click/globals.py#L39 |
154,534 | import argparse
import base64
import collections
import importlib
import inspect
import os
import pkgutil
import random
import re
import string
import sys
import tempfile
import time
import zipfile
from builtins import bytes, input
from datetime import datetime, timedelta
from typing import Optional
import argcomplete
import botocore
import click
import hjson as json
import pkg_resources
import requests
import slugify
import toml
import yaml
from click import BaseCommand, Context
from click.exceptions import ClickException
from click.globals import push_context
from dateutil import parser
from .core import API_GATEWAY_REGIONS, Zappa
from .utilities import (
check_new_version_available,
detect_django_settings,
detect_flask_apps,
get_runtime_from_python_version,
get_venv_from_python_version,
human_size,
is_valid_bucket_name,
parse_s3_url,
string_to_timestamp,
validate_name,
)
class ZappaCLI:
"""
ZappaCLI object is responsible for loading the settings,
handling the input arguments and executing the calls to the core library.
"""
# CLI
vargs = None
command = None
stage_env = None
# Zappa settings
zappa = None
zappa_settings = None
load_credentials = True
disable_progress = False
# Specific settings
api_stage = None
app_function = None
aws_region = None
debug = None
prebuild_script = None
project_name = None
profile_name = None
lambda_arn = None
lambda_name = None
lambda_description = None
lambda_concurrency = None
s3_bucket_name = None
settings_file = None
zip_path = None
handler_path = None
vpc_config = None
memory_size = None
ephemeral_storage = None
use_apigateway = None
lambda_handler = None
django_settings = None
manage_roles = True
exception_handler = None
environment_variables = None
authorizer = None
xray_tracing = False
aws_kms_key_arn = ""
context_header_mappings = None
additional_text_mimetypes = None
tags = [] # type: ignore[var-annotated]
layers = None
stage_name_env_pattern = re.compile("^[a-zA-Z0-9_]+$")
def __init__(self):
self._stage_config_overrides = {} # change using self.override_stage_config_setting(key, val)
def stage_config(self):
"""
A shortcut property for settings of a stage.
"""
def get_stage_setting(stage, extended_stages=None):
if extended_stages is None:
extended_stages = []
if stage in extended_stages:
raise RuntimeError(
stage + " has already been extended to these settings. "
"There is a circular extends within the settings file."
)
extended_stages.append(stage)
try:
stage_settings = dict(self.zappa_settings[stage].copy())
except KeyError:
raise ClickException("Cannot extend settings for undefined stage '" + stage + "'.")
extends_stage = self.zappa_settings[stage].get("extends", None)
if not extends_stage:
return stage_settings
extended_settings = get_stage_setting(stage=extends_stage, extended_stages=extended_stages)
extended_settings.update(stage_settings)
return extended_settings
settings = get_stage_setting(stage=self.api_stage)
# Backwards compatible for delete_zip setting that was more explicitly named delete_local_zip
if "delete_zip" in settings:
settings["delete_local_zip"] = settings.get("delete_zip")
settings.update(self.stage_config_overrides)
return settings
def stage_config_overrides(self):
"""
Returns zappa_settings we forcefully override for the current stage
set by `self.override_stage_config_setting(key, value)`
"""
return getattr(self, "_stage_config_overrides", {}).get(self.api_stage, {})
def override_stage_config_setting(self, key, val):
"""
Forcefully override a setting set by zappa_settings (for the current stage only)
:param key: settings key
:param val: value
"""
self._stage_config_overrides = getattr(self, "_stage_config_overrides", {})
self._stage_config_overrides.setdefault(self.api_stage, {})[key] = val
def handle(self, argv=None):
"""
Main function.
Parses command, load settings and dispatches accordingly.
"""
desc = "Zappa - Deploy Python applications to AWS Lambda" " and API Gateway.\n"
parser = argparse.ArgumentParser(description=desc)
parser.add_argument(
"-v",
"--version",
action="version",
version=pkg_resources.get_distribution("zappa").version,
help="Print the zappa version",
)
parser.add_argument("--color", default="auto", choices=["auto", "never", "always"])
env_parser = argparse.ArgumentParser(add_help=False)
me_group = env_parser.add_mutually_exclusive_group()
all_help = "Execute this command for all of our defined " "Zappa stages."
me_group.add_argument("--all", action="store_true", help=all_help)
me_group.add_argument("stage_env", nargs="?")
group = env_parser.add_argument_group()
group.add_argument("-a", "--app_function", help="The WSGI application function.")
group.add_argument("-s", "--settings_file", help="The path to a Zappa settings file.")
group.add_argument("-q", "--quiet", action="store_true", help="Silence all output.")
# https://github.com/Miserlou/Zappa/issues/407
# Moved when 'template' command added.
# Fuck Terraform.
group.add_argument(
"-j",
"--json",
action="store_true",
help="Make the output of this command be machine readable.",
)
# https://github.com/Miserlou/Zappa/issues/891
group.add_argument("--disable_progress", action="store_true", help="Disable progress bars.")
group.add_argument("--no_venv", action="store_true", help="Skip venv check.")
##
# Certify
##
subparsers = parser.add_subparsers(title="subcommands", dest="command")
cert_parser = subparsers.add_parser("certify", parents=[env_parser], help="Create and install SSL certificate")
cert_parser.add_argument(
"--manual",
action="store_true",
help=("Gets new Let's Encrypt certificates, but prints them to console." "Does not update API Gateway domains."),
)
cert_parser.add_argument("-y", "--yes", action="store_true", help="Auto confirm yes.")
##
# Deploy
##
deploy_parser = subparsers.add_parser("deploy", parents=[env_parser], help="Deploy application.")
deploy_parser.add_argument(
"-z",
"--zip",
help="Deploy Lambda with specific local or S3 hosted zip package",
)
deploy_parser.add_argument(
"-d",
"--docker-image-uri",
help="Deploy Lambda with a specific docker image hosted in AWS Elastic Container Registry",
)
##
# Init
##
subparsers.add_parser("init", help="Initialize Zappa app.")
##
# Package
##
package_parser = subparsers.add_parser(
"package",
parents=[env_parser],
help="Build the application zip package locally.",
)
package_parser.add_argument("-o", "--output", help="Name of file to output the package to.")
##
# Template
##
template_parser = subparsers.add_parser(
"template",
parents=[env_parser],
help="Create a CloudFormation template for this API Gateway.",
)
template_parser.add_argument(
"-l",
"--lambda-arn",
required=True,
help="ARN of the Lambda function to template to.",
)
template_parser.add_argument("-r", "--role-arn", required=True, help="ARN of the Role to template with.")
template_parser.add_argument("-o", "--output", help="Name of file to output the template to.")
##
# Invocation
##
invoke_parser = subparsers.add_parser("invoke", parents=[env_parser], help="Invoke remote function.")
invoke_parser.add_argument(
"--raw",
action="store_true",
help=("When invoking remotely, invoke this python as a string," " not as a modular path."),
)
invoke_parser.add_argument("--no-color", action="store_true", help=("Don't color the output"))
invoke_parser.add_argument("command_rest")
##
# Manage
##
manage_parser = subparsers.add_parser("manage", help="Invoke remote Django manage.py commands.")
rest_help = "Command in the form of <env> <command>. <env> is not " "required if --all is specified"
manage_parser.add_argument("--all", action="store_true", help=all_help)
manage_parser.add_argument("command_rest", nargs="+", help=rest_help)
manage_parser.add_argument("--no-color", action="store_true", help=("Don't color the output"))
# This is explicitly added here because this is the only subcommand that doesn't inherit from env_parser
# https://github.com/Miserlou/Zappa/issues/1002
manage_parser.add_argument("-s", "--settings_file", help="The path to a Zappa settings file.")
##
# Rollback
##
def positive_int(s):
"""Ensure an arg is positive"""
i = int(s)
if i < 0:
msg = "This argument must be positive (got {})".format(s)
raise argparse.ArgumentTypeError(msg)
return i
rollback_parser = subparsers.add_parser(
"rollback",
parents=[env_parser],
help="Rollback deployed code to a previous version.",
)
rollback_parser.add_argument(
"-n",
"--num-rollback",
type=positive_int,
default=1,
help="The number of versions to rollback.",
)
##
# Scheduling
##
subparsers.add_parser(
"schedule",
parents=[env_parser],
help="Schedule functions to occur at regular intervals.",
)
##
# Status
##
subparsers.add_parser(
"status",
parents=[env_parser],
help="Show deployment status and event schedules.",
)
##
# Log Tailing
##
tail_parser = subparsers.add_parser("tail", parents=[env_parser], help="Tail deployment logs.")
tail_parser.add_argument("--no-color", action="store_true", help="Don't color log tail output.")
tail_parser.add_argument(
"--http",
action="store_true",
help="Only show HTTP requests in tail output.",
)
tail_parser.add_argument(
"--non-http",
action="store_true",
help="Only show non-HTTP requests in tail output.",
)
tail_parser.add_argument(
"--since",
type=str,
default="100000s",
help="Only show lines since a certain timeframe.",
)
tail_parser.add_argument("--filter", type=str, default="", help="Apply a filter pattern to the logs.")
tail_parser.add_argument(
"--force-color",
action="store_true",
help="Force coloring log tail output even if coloring support is not auto-detected. (example: piping)",
)
tail_parser.add_argument(
"--disable-keep-open",
action="store_true",
help="Exit after printing the last available log, rather than keeping the log open.",
)
##
# Undeploy
##
undeploy_parser = subparsers.add_parser("undeploy", parents=[env_parser], help="Undeploy application.")
undeploy_parser.add_argument(
"--remove-logs",
action="store_true",
help=("Removes log groups of api gateway and lambda task" " during the undeployment."),
)
undeploy_parser.add_argument("-y", "--yes", action="store_true", help="Auto confirm yes.")
##
# Unschedule
##
subparsers.add_parser("unschedule", parents=[env_parser], help="Unschedule functions.")
##
# Updating
##
update_parser = subparsers.add_parser("update", parents=[env_parser], help="Update deployed application.")
update_parser.add_argument(
"-z",
"--zip",
help="Update Lambda with specific local or S3 hosted zip package",
)
update_parser.add_argument(
"-n",
"--no-upload",
help="Update configuration where appropriate, but don't upload new code",
)
update_parser.add_argument(
"-d",
"--docker-image-uri",
help="Update Lambda with a specific docker image hosted in AWS Elastic Container Registry",
)
##
# Debug
##
subparsers.add_parser(
"shell",
parents=[env_parser],
help="A debug shell with a loaded Zappa object.",
)
##
# Python Settings File
##
settings_parser = subparsers.add_parser(
"save-python-settings-file",
parents=[env_parser],
help="Generate & save the Zappa settings Python file for docker deployments",
)
settings_parser.add_argument(
"-o",
"--output_path",
help=(
"The path to save the Zappa settings Python file. "
"File must be named zappa_settings.py and should be saved "
"in the same directory as the Zappa handler.py"
),
)
argcomplete.autocomplete(parser)
args = parser.parse_args(argv)
self.vargs = vars(args)
if args.color == "never":
disable_click_colors()
elif args.color == "always":
# TODO: Support aggressive coloring like "--force-color" on all commands
pass
elif args.color == "auto":
pass
# Parse the input
# NOTE(rmoe): Special case for manage command
# The manage command can't have both stage_env and command_rest
# arguments. Since they are both positional arguments argparse can't
# differentiate the two. This causes problems when used with --all.
# (e.g. "manage --all showmigrations admin" argparse thinks --all has
# been specified AND that stage_env='showmigrations')
# By having command_rest collect everything but --all we can split it
# apart here instead of relying on argparse.
if not args.command:
parser.print_help()
return
if args.command == "manage" and not self.vargs.get("all"):
self.stage_env = self.vargs["command_rest"].pop(0)
else:
self.stage_env = self.vargs.get("stage_env")
if args.command in ("package", "save-python-settings-file"):
self.load_credentials = False
self.command = args.command
self.disable_progress = self.vargs.get("disable_progress")
if self.vargs.get("quiet"):
self.silence()
# We don't have any settings yet, so make those first!
# (Settings-based interactions will fail
# before a project has been initialized.)
if self.command == "init":
self.init()
return
# Make sure there isn't a new version available
if not self.vargs.get("json"):
self.check_for_update()
# Load and Validate Settings File
self.load_settings_file(self.vargs.get("settings_file"))
# Should we execute this for all stages, or just one?
all_stages = self.vargs.get("all")
stages = []
if all_stages: # All stages!
stages = self.zappa_settings.keys()
else: # Just one env.
if not self.stage_env:
# If there's only one stage defined in the settings,
# use that as the default.
if len(self.zappa_settings.keys()) == 1:
stages.append(list(self.zappa_settings.keys())[0])
else:
parser.error("Please supply a stage to interact with.")
else:
stages.append(self.stage_env)
for stage in stages:
try:
self.dispatch_command(self.command, stage)
except ClickException as e:
# Discussion on exit codes: https://github.com/Miserlou/Zappa/issues/407
e.show()
sys.exit(e.exit_code)
def dispatch_command(self, command, stage):
"""
Given a command to execute and stage,
execute that command.
"""
self.check_stage_name(stage)
self.api_stage = stage
if command not in ["status", "manage"]:
if not self.vargs.get("json", None):
click.echo(
"Calling "
+ click.style(command, fg="green", bold=True)
+ " for stage "
+ click.style(self.api_stage, bold=True)
+ ".."
)
# Explicitly define the app function.
# Related: https://github.com/Miserlou/Zappa/issues/832
if self.vargs.get("app_function", None):
self.app_function = self.vargs["app_function"]
# Load our settings, based on api_stage.
try:
self.load_settings(self.vargs.get("settings_file"))
except ValueError as e:
if hasattr(e, "message"):
print("Error: {}".format(e.message))
else:
print(str(e))
sys.exit(-1)
self.callback("settings")
# Hand it off
if command == "deploy": # pragma: no cover
self.deploy(self.vargs["zip"], self.vargs["docker_image_uri"])
if command == "package": # pragma: no cover
self.package(self.vargs["output"])
if command == "template": # pragma: no cover
self.template(
self.vargs["lambda_arn"],
self.vargs["role_arn"],
output=self.vargs["output"],
json=self.vargs["json"],
)
elif command == "update": # pragma: no cover
self.update(
self.vargs["zip"],
self.vargs["no_upload"],
self.vargs["docker_image_uri"],
)
elif command == "rollback": # pragma: no cover
self.rollback(self.vargs["num_rollback"])
elif command == "invoke": # pragma: no cover
if not self.vargs.get("command_rest"):
print("Please enter the function to invoke.")
return
self.invoke(
self.vargs["command_rest"],
raw_python=self.vargs["raw"],
no_color=self.vargs["no_color"],
)
elif command == "manage": # pragma: no cover
if not self.vargs.get("command_rest"):
print("Please enter the management command to invoke.")
return
if not self.django_settings:
print("This command is for Django projects only!")
print("If this is a Django project, please define django_settings in your zappa_settings.")
return
command_tail = self.vargs.get("command_rest")
if len(command_tail) > 1:
command = " ".join(command_tail) # ex: zappa manage dev "shell --version"
else:
command = command_tail[0] # ex: zappa manage dev showmigrations admin
self.invoke(
command,
command="manage",
no_color=self.vargs["no_color"],
)
elif command == "tail": # pragma: no cover
self.tail(
colorize=(not self.vargs["no_color"]),
http=self.vargs["http"],
non_http=self.vargs["non_http"],
since=self.vargs["since"],
filter_pattern=self.vargs["filter"],
force_colorize=self.vargs["force_color"] or None,
keep_open=not self.vargs["disable_keep_open"],
)
elif command == "undeploy": # pragma: no cover
self.undeploy(no_confirm=self.vargs["yes"], remove_logs=self.vargs["remove_logs"])
elif command == "schedule": # pragma: no cover
self.schedule()
elif command == "unschedule": # pragma: no cover
self.unschedule()
elif command == "status": # pragma: no cover
self.status(return_json=self.vargs["json"])
elif command == "certify": # pragma: no cover
self.certify(no_confirm=self.vargs["yes"], manual=self.vargs["manual"])
elif command == "shell": # pragma: no cover
self.shell()
elif command == "save-python-settings-file": # pragma: no cover
self.save_python_settings_file(self.vargs["output_path"])
##
# The Commands
##
def save_python_settings_file(self, output_path=None):
settings_path = output_path or "zappa_settings.py"
print("Generating Zappa settings Python file and saving to {}".format(settings_path))
if not settings_path.endswith("zappa_settings.py"):
raise ValueError("Settings file must be named zappa_settings.py")
zappa_settings_s = self.get_zappa_settings_string()
with open(settings_path, "w") as f_out:
f_out.write(zappa_settings_s)
def package(self, output=None):
"""
Only build the package
"""
# Make sure we're in a venv.
self.check_venv()
# force not to delete the local zip
self.override_stage_config_setting("delete_local_zip", False)
# Execute the prebuild script
if self.prebuild_script:
self.execute_prebuild_script()
# Create the Lambda Zip
self.create_package(output)
self.callback("zip")
size = human_size(os.path.getsize(self.zip_path))
click.echo(
click.style("Package created", fg="green", bold=True)
+ ": "
+ click.style(self.zip_path, bold=True)
+ " ("
+ size
+ ")"
)
def template(self, lambda_arn, role_arn, output=None, json=False):
"""
Only build the template file.
"""
if not lambda_arn:
raise ClickException("Lambda ARN is required to template.")
if not role_arn:
raise ClickException("Role ARN is required to template.")
self.zappa.credentials_arn = role_arn
# Create the template!
template = self.zappa.create_stack_template(
lambda_arn=lambda_arn,
lambda_name=self.lambda_name,
api_key_required=self.api_key_required,
iam_authorization=self.iam_authorization,
authorizer=self.authorizer,
cors_options=self.cors,
description=self.apigateway_description,
endpoint_configuration=self.endpoint_configuration,
)
if not output:
template_file = self.lambda_name + "-template-" + str(int(time.time())) + ".json"
else:
template_file = output
with open(template_file, "wb") as out:
out.write(bytes(template.to_json(indent=None, separators=(",", ":")), "utf-8"))
if not json:
click.echo(click.style("Template created", fg="green", bold=True) + ": " + click.style(template_file, bold=True))
else:
with open(template_file, "r") as out:
print(out.read())
def deploy(self, source_zip=None, docker_image_uri=None):
"""
Package your project, upload it to S3, register the Lambda function
and create the API Gateway routes.
"""
if not source_zip or docker_image_uri:
# Make sure the necessary IAM execution roles are available
if self.manage_roles:
try:
self.zappa.create_iam_roles()
except botocore.client.ClientError as ce:
raise ClickException(
click.style("Failed", fg="red")
+ " to "
+ click.style("manage IAM roles", bold=True)
+ "!\n"
+ "You may "
+ click.style("lack the necessary AWS permissions", bold=True)
+ " to automatically manage a Zappa execution role.\n"
+ click.style("Exception reported by AWS:", bold=True)
+ format(ce)
+ "\n"
+ "To fix this, see here: "
+ click.style(
"https://github.com/Zappa/Zappa#custom-aws-iam-roles-and-policies-for-deployment",
bold=True,
)
+ "\n"
)
# Make sure this isn't already deployed.
deployed_versions = self.zappa.get_lambda_function_versions(self.lambda_name)
if len(deployed_versions) > 0:
raise ClickException(
"This application is "
+ click.style("already deployed", fg="red")
+ " - did you mean to call "
+ click.style("update", bold=True)
+ "?"
)
if not source_zip and not docker_image_uri:
# Make sure we're in a venv.
self.check_venv()
# Execute the prebuild script
if self.prebuild_script:
self.execute_prebuild_script()
# Create the Lambda Zip
self.create_package()
self.callback("zip")
# Upload it to S3
success = self.zappa.upload_to_s3(
self.zip_path,
self.s3_bucket_name,
disable_progress=self.disable_progress,
)
if not success: # pragma: no cover
raise ClickException("Unable to upload to S3. Quitting.")
# If using a slim handler, upload it to S3 and tell lambda to use this slim handler zip
if self.stage_config.get("slim_handler", False):
# https://github.com/Miserlou/Zappa/issues/510
success = self.zappa.upload_to_s3(
self.handler_path,
self.s3_bucket_name,
disable_progress=self.disable_progress,
)
if not success: # pragma: no cover
raise ClickException("Unable to upload handler to S3. Quitting.")
# Copy the project zip to the current project zip
current_project_name = "{0!s}_{1!s}_current_project.tar.gz".format(self.api_stage, self.project_name)
success = self.zappa.copy_on_s3(
src_file_name=self.zip_path,
dst_file_name=current_project_name,
bucket_name=self.s3_bucket_name,
)
if not success: # pragma: no cover
raise ClickException("Unable to copy the zip to be the current project. Quitting.")
handler_file = self.handler_path
else:
handler_file = self.zip_path
# Fixes https://github.com/Miserlou/Zappa/issues/613
try:
self.lambda_arn = self.zappa.get_lambda_function(function_name=self.lambda_name)
except botocore.client.ClientError:
# Register the Lambda function with that zip as the source
# You'll also need to define the path to your lambda_handler code.
kwargs = dict(
handler=self.lambda_handler,
description=self.lambda_description,
vpc_config=self.vpc_config,
dead_letter_config=self.dead_letter_config,
timeout=self.timeout_seconds,
memory_size=self.memory_size,
ephemeral_storage=self.ephemeral_storage,
runtime=self.runtime,
aws_environment_variables=self.aws_environment_variables,
aws_kms_key_arn=self.aws_kms_key_arn,
use_alb=self.use_alb,
layers=self.layers,
concurrency=self.lambda_concurrency,
)
kwargs["function_name"] = self.lambda_name
if docker_image_uri:
kwargs["docker_image_uri"] = docker_image_uri
elif source_zip and source_zip.startswith("s3://"):
bucket, key_name = parse_s3_url(source_zip)
kwargs["bucket"] = bucket
kwargs["s3_key"] = key_name
elif source_zip and not source_zip.startswith("s3://"):
with open(source_zip, mode="rb") as fh:
byte_stream = fh.read()
kwargs["local_zip"] = byte_stream
else:
kwargs["bucket"] = self.s3_bucket_name
kwargs["s3_key"] = handler_file
self.lambda_arn = self.zappa.create_lambda_function(**kwargs)
# Schedule events for this deployment
self.schedule()
endpoint_url = ""
deployment_string = click.style("Deployment complete", fg="green", bold=True) + "!"
if self.use_alb:
kwargs = dict(
lambda_arn=self.lambda_arn,
lambda_name=self.lambda_name,
alb_vpc_config=self.alb_vpc_config,
timeout=self.timeout_seconds,
)
self.zappa.deploy_lambda_alb(**kwargs)
if self.use_apigateway:
# Create and configure the API Gateway
self.zappa.create_stack_template(
lambda_arn=self.lambda_arn,
lambda_name=self.lambda_name,
api_key_required=self.api_key_required,
iam_authorization=self.iam_authorization,
authorizer=self.authorizer,
cors_options=self.cors,
description=self.apigateway_description,
endpoint_configuration=self.endpoint_configuration,
)
self.zappa.update_stack(
self.lambda_name,
self.s3_bucket_name,
wait=True,
disable_progress=self.disable_progress,
)
api_id = self.zappa.get_api_id(self.lambda_name)
# Add binary support
if self.binary_support:
self.zappa.add_binary_support(api_id=api_id, cors=self.cors)
# Add payload compression
if self.stage_config.get("payload_compression", True):
self.zappa.add_api_compression(
api_id=api_id,
min_compression_size=self.stage_config.get("payload_minimum_compression_size", 0),
)
# Deploy the API!
endpoint_url = self.deploy_api_gateway(api_id)
deployment_string = deployment_string + ": {}".format(endpoint_url)
# Create/link API key
if self.api_key_required:
if self.api_key is None:
self.zappa.create_api_key(api_id=api_id, stage_name=self.api_stage)
else:
self.zappa.add_api_stage_to_api_key(api_key=self.api_key, api_id=api_id, stage_name=self.api_stage)
if self.stage_config.get("touch", True):
self.zappa.wait_until_lambda_function_is_updated(function_name=self.lambda_name)
self.touch_endpoint(endpoint_url)
# Finally, delete the local copy our zip package
if not source_zip and not docker_image_uri:
if self.stage_config.get("delete_local_zip", True):
self.remove_local_zip()
# Remove the project zip from S3.
if not source_zip and not docker_image_uri:
self.remove_uploaded_zip()
self.callback("post")
click.echo(deployment_string)
def update(self, source_zip=None, no_upload=False, docker_image_uri=None):
"""
Repackage and update the function code.
"""
if not source_zip and not docker_image_uri:
# Make sure we're in a venv.
self.check_venv()
# Execute the prebuild script
if self.prebuild_script:
self.execute_prebuild_script()
# Temporary version check
try:
updated_time = 1472581018
function_response = self.zappa.lambda_client.get_function(FunctionName=self.lambda_name)
conf = function_response["Configuration"]
last_updated = parser.parse(conf["LastModified"])
last_updated_unix = time.mktime(last_updated.timetuple())
except botocore.exceptions.BotoCoreError as e:
click.echo(click.style(type(e).__name__, fg="red") + ": " + e.args[0])
sys.exit(-1)
except Exception:
click.echo(
click.style("Warning!", fg="red")
+ " Couldn't get function "
+ self.lambda_name
+ " in "
+ self.zappa.aws_region
+ " - have you deployed yet?"
)
sys.exit(-1)
if last_updated_unix <= updated_time:
click.echo(
click.style("Warning!", fg="red")
+ " You may have upgraded Zappa since deploying this application. You will need to "
+ click.style("redeploy", bold=True)
+ " for this deployment to work properly!"
)
# Make sure the necessary IAM execution roles are available
if self.manage_roles:
try:
self.zappa.create_iam_roles()
except botocore.client.ClientError:
click.echo(click.style("Failed", fg="red") + " to " + click.style("manage IAM roles", bold=True) + "!")
click.echo(
"You may "
+ click.style("lack the necessary AWS permissions", bold=True)
+ " to automatically manage a Zappa execution role."
)
click.echo(
"To fix this, see here: "
+ click.style(
"https://github.com/Zappa/Zappa#custom-aws-iam-roles-and-policies-for-deployment",
bold=True,
)
)
sys.exit(-1)
# Create the Lambda Zip,
if not no_upload:
self.create_package()
self.callback("zip")
# Upload it to S3
if not no_upload:
success = self.zappa.upload_to_s3(
self.zip_path,
self.s3_bucket_name,
disable_progress=self.disable_progress,
)
if not success: # pragma: no cover
raise ClickException("Unable to upload project to S3. Quitting.")
# If using a slim handler, upload it to S3 and tell lambda to use this slim handler zip
if self.stage_config.get("slim_handler", False):
# https://github.com/Miserlou/Zappa/issues/510
success = self.zappa.upload_to_s3(
self.handler_path,
self.s3_bucket_name,
disable_progress=self.disable_progress,
)
if not success: # pragma: no cover
raise ClickException("Unable to upload handler to S3. Quitting.")
# Copy the project zip to the current project zip
current_project_name = "{0!s}_{1!s}_current_project.tar.gz".format(self.api_stage, self.project_name)
success = self.zappa.copy_on_s3(
src_file_name=self.zip_path,
dst_file_name=current_project_name,
bucket_name=self.s3_bucket_name,
)
if not success: # pragma: no cover
raise ClickException("Unable to copy the zip to be the current project. Quitting.")
handler_file = self.handler_path
else:
handler_file = self.zip_path
# Register the Lambda function with that zip as the source
# You'll also need to define the path to your lambda_handler code.
kwargs = dict(
bucket=self.s3_bucket_name,
function_name=self.lambda_name,
num_revisions=self.num_retained_versions,
concurrency=self.lambda_concurrency,
)
if docker_image_uri:
kwargs["docker_image_uri"] = docker_image_uri
self.lambda_arn = self.zappa.update_lambda_function(**kwargs)
elif source_zip and source_zip.startswith("s3://"):
bucket, key_name = parse_s3_url(source_zip)
kwargs.update(dict(bucket=bucket, s3_key=key_name))
self.lambda_arn = self.zappa.update_lambda_function(**kwargs)
elif source_zip and not source_zip.startswith("s3://"):
with open(source_zip, mode="rb") as fh:
byte_stream = fh.read()
kwargs["local_zip"] = byte_stream
self.lambda_arn = self.zappa.update_lambda_function(**kwargs)
else:
if not no_upload:
kwargs["s3_key"] = handler_file
self.lambda_arn = self.zappa.update_lambda_function(**kwargs)
# Remove the uploaded zip from S3, because it is now registered..
if not source_zip and not no_upload and not docker_image_uri:
self.remove_uploaded_zip()
# Update the configuration, in case there are changes.
self.lambda_arn = self.zappa.update_lambda_configuration(
lambda_arn=self.lambda_arn,
function_name=self.lambda_name,
handler=self.lambda_handler,
description=self.lambda_description,
vpc_config=self.vpc_config,
timeout=self.timeout_seconds,
memory_size=self.memory_size,
ephemeral_storage=self.ephemeral_storage,
runtime=self.runtime,
aws_environment_variables=self.aws_environment_variables,
aws_kms_key_arn=self.aws_kms_key_arn,
layers=self.layers,
wait=False,
)
# Finally, delete the local copy our zip package
if not source_zip and not no_upload and not docker_image_uri:
if self.stage_config.get("delete_local_zip", True):
self.remove_local_zip()
if self.use_apigateway:
self.zappa.create_stack_template(
lambda_arn=self.lambda_arn,
lambda_name=self.lambda_name,
api_key_required=self.api_key_required,
iam_authorization=self.iam_authorization,
authorizer=self.authorizer,
cors_options=self.cors,
description=self.apigateway_description,
endpoint_configuration=self.endpoint_configuration,
)
self.zappa.update_stack(
self.lambda_name,
self.s3_bucket_name,
wait=True,
update_only=True,
disable_progress=self.disable_progress,
)
api_id = self.zappa.get_api_id(self.lambda_name)
# Update binary support
if self.binary_support:
self.zappa.add_binary_support(api_id=api_id, cors=self.cors)
else:
self.zappa.remove_binary_support(api_id=api_id, cors=self.cors)
if self.stage_config.get("payload_compression", True):
self.zappa.add_api_compression(
api_id=api_id,
min_compression_size=self.stage_config.get("payload_minimum_compression_size", 0),
)
else:
self.zappa.remove_api_compression(api_id=api_id)
# It looks a bit like we might actually be using this just to get the URL,
# but we're also updating a few of the APIGW settings.
endpoint_url = self.deploy_api_gateway(api_id)
if self.stage_config.get("domain", None):
endpoint_url = self.stage_config.get("domain")
else:
endpoint_url = None
self.schedule()
# Update any cognito pool with the lambda arn
# do this after schedule as schedule clears the lambda policy and we need to add one
self.update_cognito_triggers()
self.callback("post")
if endpoint_url and "https://" not in endpoint_url:
endpoint_url = "https://" + endpoint_url
if self.base_path:
endpoint_url += "/" + self.base_path
deployed_string = "Your updated Zappa deployment is " + click.style("live", fg="green", bold=True) + "!"
if self.use_apigateway:
deployed_string = deployed_string + ": " + click.style("{}".format(endpoint_url), bold=True)
api_url = None
if endpoint_url and "amazonaws.com" not in endpoint_url:
api_url = self.zappa.get_api_url(self.lambda_name, self.api_stage)
if endpoint_url != api_url:
deployed_string = deployed_string + " (" + api_url + ")"
if self.stage_config.get("touch", True):
self.zappa.wait_until_lambda_function_is_updated(function_name=self.lambda_name)
if api_url:
self.touch_endpoint(api_url)
elif endpoint_url:
self.touch_endpoint(endpoint_url)
click.echo(deployed_string)
def rollback(self, revision):
"""
Rollsback the currently deploy lambda code to a previous revision.
"""
print("Rolling back..")
self.zappa.rollback_lambda_function_version(self.lambda_name, versions_back=revision)
print("Done!")
def tail(
self,
since,
filter_pattern,
limit=10000,
keep_open=True,
colorize=True,
http=False,
non_http=False,
force_colorize=False,
):
"""
Tail this function's logs.
if keep_open, do so repeatedly, printing any new logs
"""
try:
since_stamp = string_to_timestamp(since)
last_since = since_stamp
while True:
new_logs = self.zappa.fetch_logs(
self.lambda_name,
start_time=since_stamp,
limit=limit,
filter_pattern=filter_pattern,
)
new_logs = [e for e in new_logs if e["timestamp"] > last_since]
self.print_logs(new_logs, colorize, http, non_http, force_colorize)
if not keep_open:
break
if new_logs:
last_since = new_logs[-1]["timestamp"]
time.sleep(1)
except KeyboardInterrupt: # pragma: no cover
# Die gracefully
try:
sys.exit(0)
except SystemExit:
os._exit(130)
def undeploy(self, no_confirm=False, remove_logs=False):
"""
Tear down an existing deployment.
"""
if not no_confirm: # pragma: no cover
confirm = input("Are you sure you want to undeploy? [y/n] ")
if confirm != "y":
return
if self.use_alb:
self.zappa.undeploy_lambda_alb(self.lambda_name)
if self.use_apigateway:
if remove_logs:
self.zappa.remove_api_gateway_logs(self.lambda_name)
domain_name = self.stage_config.get("domain", None)
base_path = self.stage_config.get("base_path", None)
# Only remove the api key when not specified
if self.api_key_required and self.api_key is None:
api_id = self.zappa.get_api_id(self.lambda_name)
self.zappa.remove_api_key(api_id, self.api_stage)
self.zappa.undeploy_api_gateway(self.lambda_name, domain_name=domain_name, base_path=base_path)
self.unschedule() # removes event triggers, including warm up event.
self.zappa.delete_lambda_function(self.lambda_name)
if remove_logs:
self.zappa.remove_lambda_function_logs(self.lambda_name)
click.echo(click.style("Done", fg="green", bold=True) + "!")
def update_cognito_triggers(self):
"""
Update any cognito triggers
"""
if self.cognito:
user_pool = self.cognito.get("user_pool")
triggers = self.cognito.get("triggers", [])
lambda_configs = set()
for trigger in triggers:
lambda_configs.add(trigger["source"].split("_")[0])
self.zappa.update_cognito(self.lambda_name, user_pool, lambda_configs, self.lambda_arn)
def schedule(self):
"""
Given a a list of functions and a schedule to execute them,
setup up regular execution.
"""
events = self.stage_config.get("events", [])
if events:
if not isinstance(events, list): # pragma: no cover
print("Events must be supplied as a list.")
return
for event in events:
self.collision_warning(event.get("function"))
if self.stage_config.get("keep_warm", True):
if not events:
events = []
keep_warm_rate = self.stage_config.get("keep_warm_expression", "rate(4 minutes)")
events.append(
{
"name": "zappa-keep-warm",
"function": "handler.keep_warm_callback",
"expression": keep_warm_rate,
"description": "Zappa Keep Warm - {}".format(self.lambda_name),
}
)
if events:
try:
function_response = self.zappa.lambda_client.get_function(FunctionName=self.lambda_name)
except botocore.exceptions.ClientError: # pragma: no cover
click.echo(
click.style("Function does not exist", fg="yellow")
+ ", please "
+ click.style("deploy", bold=True)
+ "first. Ex:"
+ click.style("zappa deploy {}.".format(self.api_stage), bold=True)
)
sys.exit(-1)
print("Scheduling..")
self.zappa.schedule_events(
lambda_arn=function_response["Configuration"]["FunctionArn"],
lambda_name=self.lambda_name,
events=events,
)
# Add async tasks SNS
if self.stage_config.get("async_source", None) == "sns" and self.stage_config.get("async_resources", True):
self.lambda_arn = self.zappa.get_lambda_function(function_name=self.lambda_name)
topic_arn = self.zappa.create_async_sns_topic(lambda_name=self.lambda_name, lambda_arn=self.lambda_arn)
click.echo("SNS Topic created: %s" % topic_arn)
# Add async tasks DynamoDB
table_name = self.stage_config.get("async_response_table", False)
read_capacity = self.stage_config.get("async_response_table_read_capacity", 1)
write_capacity = self.stage_config.get("async_response_table_write_capacity", 1)
if table_name and self.stage_config.get("async_resources", True):
created, response_table = self.zappa.create_async_dynamodb_table(table_name, read_capacity, write_capacity)
if created:
click.echo("DynamoDB table created: %s" % table_name)
else:
click.echo("DynamoDB table exists: %s" % table_name)
provisioned_throughput = response_table["Table"]["ProvisionedThroughput"]
if (
provisioned_throughput["ReadCapacityUnits"] != read_capacity
or provisioned_throughput["WriteCapacityUnits"] != write_capacity
):
click.echo(
click.style(
"\nWarning! Existing DynamoDB table ({}) does not match configured capacity.\n".format(table_name),
fg="red",
)
)
def unschedule(self):
"""
Given a a list of scheduled functions,
tear down their regular execution.
"""
# Run even if events are not defined to remove previously existing ones (thus default to []).
events = self.stage_config.get("events", [])
if not isinstance(events, list): # pragma: no cover
print("Events must be supplied as a list.")
return
function_arn = None
try:
function_response = self.zappa.lambda_client.get_function(FunctionName=self.lambda_name)
function_arn = function_response["Configuration"]["FunctionArn"]
except botocore.exceptions.ClientError: # pragma: no cover
raise ClickException(
"Function does not exist, you should deploy first. Ex: zappa deploy {}. "
"Proceeding to unschedule CloudWatch based events.".format(self.api_stage)
)
print("Unscheduling..")
self.zappa.unschedule_events(
lambda_name=self.lambda_name,
lambda_arn=function_arn,
events=events,
)
# Remove async task SNS
if self.stage_config.get("async_source", None) == "sns" and self.stage_config.get("async_resources", True):
removed_arns = self.zappa.remove_async_sns_topic(self.lambda_name)
click.echo("SNS Topic removed: %s" % ", ".join(removed_arns))
def invoke(self, function_name, raw_python=False, command=None, no_color=False):
"""
Invoke a remote function.
"""
# There are three likely scenarios for 'command' here:
# command, which is a modular function path
# raw_command, which is a string of python to execute directly
# manage, which is a Django-specific management command invocation
key = command if command is not None else "command"
if raw_python:
command = {"raw_command": function_name}
else:
command = {key: function_name}
# Can't use hjson
import json as json
response = self.zappa.invoke_lambda_function(
self.lambda_name,
json.dumps(command),
invocation_type="RequestResponse",
)
print(self.format_lambda_response(response, not no_color))
# For a successful request FunctionError is not in response.
# https://github.com/Miserlou/Zappa/pull/1254/
if "FunctionError" in response:
raise ClickException("{} error occurred while invoking command.".format(response["FunctionError"]))
def format_lambda_response(self, response, colorize=True):
if "LogResult" in response:
logresult_bytes = base64.b64decode(response["LogResult"])
try:
decoded = logresult_bytes.decode()
except UnicodeDecodeError:
return logresult_bytes
else:
if colorize and sys.stdout.isatty():
formatted = self.format_invoke_command(decoded)
return self.colorize_invoke_command(formatted)
else:
return decoded
else:
return response
def format_invoke_command(self, string):
"""
Formats correctly the string output from the invoke() method,
replacing line breaks and tabs when necessary.
"""
string = string.replace("\\n", "\n")
formated_response = ""
for line in string.splitlines():
if line.startswith("REPORT"):
line = line.replace("\t", "\n")
if line.startswith("[DEBUG]"):
line = line.replace("\t", " ")
formated_response += line + "\n"
formated_response = formated_response.replace("\n\n", "\n")
return formated_response
def colorize_invoke_command(self, string):
"""
Apply various heuristics to return a colorized version the invoke
command string. If these fail, simply return the string in plaintext.
Inspired by colorize_log_entry().
"""
final_string = string
try:
# Line headers
try:
for token in ["START", "END", "REPORT", "[DEBUG]"]:
if token in final_string:
format_string = "[{}]"
# match whole words only
pattern = r"\b{}\b"
if token == "[DEBUG]":
format_string = "{}"
pattern = re.escape(token)
repl = click.style(format_string.format(token), bold=True, fg="cyan")
final_string = re.sub(pattern.format(token), repl, final_string)
except Exception: # pragma: no cover
pass
# Green bold Tokens
try:
for token in [
"Zappa Event:",
"RequestId:",
"Version:",
"Duration:",
"Billed",
"Memory Size:",
"Max Memory Used:",
]:
if token in final_string:
final_string = final_string.replace(token, click.style(token, bold=True, fg="green"))
except Exception: # pragma: no cover
pass
# UUIDs
for token in final_string.replace("\t", " ").split(" "):
try:
if token.count("-") == 4 and token.replace("-", "").isalnum():
final_string = final_string.replace(token, click.style(token, fg="magenta"))
except Exception: # pragma: no cover
pass
return final_string
except Exception:
return string
def status(self, return_json=False):
"""
Describe the status of the current deployment.
"""
def tabular_print(title, value):
"""
Convenience function for priting formatted table items.
"""
click.echo("%-*s%s" % (32, click.style("\t" + title, fg="green") + ":", str(value)))
return
# Lambda Env Details
lambda_versions = self.zappa.get_lambda_function_versions(self.lambda_name)
if not lambda_versions:
raise ClickException(
click.style(
"No Lambda %s detected in %s - have you deployed yet?" % (self.lambda_name, self.zappa.aws_region),
fg="red",
)
)
status_dict = collections.OrderedDict()
status_dict["Lambda Versions"] = len(lambda_versions)
function_response = self.zappa.lambda_client.get_function(FunctionName=self.lambda_name)
conf = function_response["Configuration"]
self.lambda_arn = conf["FunctionArn"]
status_dict["Lambda Name"] = self.lambda_name
status_dict["Lambda ARN"] = self.lambda_arn
status_dict["Lambda Role ARN"] = conf["Role"]
status_dict["Lambda Code Size"] = conf["CodeSize"]
status_dict["Lambda Version"] = conf["Version"]
status_dict["Lambda Last Modified"] = conf["LastModified"]
status_dict["Lambda Memory Size"] = conf["MemorySize"]
status_dict["Lambda Timeout"] = conf["Timeout"]
# Handler & Runtime won't be present for lambda Docker deployments
# https://github.com/Miserlou/Zappa/issues/2188
status_dict["Lambda Handler"] = conf.get("Handler", "")
status_dict["Lambda Runtime"] = conf.get("Runtime", "")
if "VpcConfig" in conf.keys():
status_dict["Lambda VPC ID"] = conf.get("VpcConfig", {}).get("VpcId", "Not assigned")
else:
status_dict["Lambda VPC ID"] = None
# Calculated statistics
try:
function_invocations = self.zappa.cloudwatch.get_metric_statistics(
Namespace="AWS/Lambda",
MetricName="Invocations",
StartTime=datetime.utcnow() - timedelta(days=1),
EndTime=datetime.utcnow(),
Period=1440,
Statistics=["Sum"],
Dimensions=[{"Name": "FunctionName", "Value": "{}".format(self.lambda_name)}],
)["Datapoints"][0]["Sum"]
except Exception:
function_invocations = 0
try:
function_errors = self.zappa.cloudwatch.get_metric_statistics(
Namespace="AWS/Lambda",
MetricName="Errors",
StartTime=datetime.utcnow() - timedelta(days=1),
EndTime=datetime.utcnow(),
Period=1440,
Statistics=["Sum"],
Dimensions=[{"Name": "FunctionName", "Value": "{}".format(self.lambda_name)}],
)["Datapoints"][0]["Sum"]
except Exception:
function_errors = 0
try:
error_rate = "{0:.2f}%".format(function_errors / function_invocations * 100)
except Exception:
error_rate = "Error calculating"
status_dict["Invocations (24h)"] = int(function_invocations)
status_dict["Errors (24h)"] = int(function_errors)
status_dict["Error Rate (24h)"] = error_rate
# URLs
if self.use_apigateway:
api_url = self.zappa.get_api_url(self.lambda_name, self.api_stage)
status_dict["API Gateway URL"] = api_url
# Api Keys
api_id = self.zappa.get_api_id(self.lambda_name)
for api_key in self.zappa.get_api_keys(api_id, self.api_stage):
status_dict["API Gateway x-api-key"] = api_key
# There literally isn't a better way to do this.
# AWS provides no way to tie a APIGW domain name to its Lambda function.
domain_url = self.stage_config.get("domain", None)
base_path = self.stage_config.get("base_path", None)
if domain_url:
status_dict["Domain URL"] = "https://" + domain_url
if base_path:
status_dict["Domain URL"] += "/" + base_path
else:
status_dict["Domain URL"] = "None Supplied"
# Scheduled Events
event_rules = self.zappa.get_event_rules_for_lambda(lambda_arn=self.lambda_arn)
status_dict["Num. Event Rules"] = len(event_rules)
if len(event_rules) > 0:
status_dict["Events"] = []
for rule in event_rules:
event_dict = {}
rule_name = rule["Name"]
event_dict["Event Rule Name"] = rule_name
event_dict["Event Rule Schedule"] = rule.get("ScheduleExpression", None)
event_dict["Event Rule State"] = rule.get("State", None).title()
event_dict["Event Rule ARN"] = rule.get("Arn", None)
status_dict["Events"].append(event_dict)
if return_json:
# Putting the status in machine readable format
# https://github.com/Miserlou/Zappa/issues/407
print(json.dumpsJSON(status_dict))
else:
click.echo("Status for " + click.style(self.lambda_name, bold=True) + ": ")
for k, v in status_dict.items():
if k == "Events":
# Events are a list of dicts
for event in v:
for item_k, item_v in event.items():
tabular_print(item_k, item_v)
else:
tabular_print(k, v)
# TODO: S3/SQS/etc. type events?
return True
def check_stage_name(self, stage_name):
"""
Make sure the stage name matches the AWS-allowed pattern
(calls to apigateway_client.create_deployment, will fail with error
message "ClientError: An error occurred (BadRequestException) when
calling the CreateDeployment operation: Stage name only allows
a-zA-Z0-9_" if the pattern does not match)
"""
if not self.use_apigateway:
return True
if self.stage_name_env_pattern.match(stage_name):
return True
raise ValueError("API stage names must match a-zA-Z0-9_ ; '{0!s}' does not.".format(stage_name))
def check_environment(self, environment):
"""
Make sure the environment contains only strings
(since putenv needs a string)
"""
non_strings = []
for k, v in environment.items():
if not isinstance(v, str):
non_strings.append(k)
if non_strings:
raise ValueError("The following environment variables are not strings: {}".format(", ".join(non_strings)))
else:
return True
def init(self, settings_file="zappa_settings.json"):
"""
Initialize a new Zappa project by creating a new zappa_settings.json in a guided process.
This should probably be broken up into few separate componants once it's stable.
Testing these inputs requires monkeypatching with mock, which isn't pretty.
"""
# Make sure we're in a venv.
self.check_venv()
# Ensure that we don't already have a zappa_settings file.
if os.path.isfile(settings_file):
raise ClickException(
"This project already has a " + click.style("{0!s} file".format(settings_file), fg="red", bold=True) + "!"
)
# Explain system.
click.echo(
click.style(
"""\n███████╗ █████╗ ██████╗ ██████╗ █████╗
╚══███╔╝██╔══██╗██╔══██╗██╔══██╗██╔══██╗
███╔╝ ███████║██████╔╝██████╔╝███████║
███╔╝ ██╔══██║██╔═══╝ ██╔═══╝ ██╔══██║
███████╗██║ ██║██║ ██║ ██║ ██║
╚══════╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═╝\n""",
fg="green",
bold=True,
)
)
click.echo(
click.style("Welcome to ", bold=True) + click.style("Zappa", fg="green", bold=True) + click.style("!\n", bold=True)
)
click.echo(
click.style("Zappa", bold=True) + " is a system for running server-less Python web applications"
" on AWS Lambda and AWS API Gateway."
)
click.echo("This `init` command will help you create and configure your new Zappa deployment.")
click.echo("Let's get started!\n")
# Create Env
while True:
click.echo(
"Your Zappa configuration can support multiple production stages, like '"
+ click.style("dev", bold=True)
+ "', '"
+ click.style("staging", bold=True)
+ "', and '"
+ click.style("production", bold=True)
+ "'."
)
env = input("What do you want to call this environment (default 'dev'): ") or "dev"
try:
self.check_stage_name(env)
break
except ValueError:
click.echo(click.style("Stage names must match a-zA-Z0-9_", fg="red"))
# Detect AWS profiles and regions
# If anyone knows a more straightforward way to easily detect and
# parse AWS profiles I'm happy to change this, feels like a hack
session = botocore.session.Session()
config = session.full_config
profiles = config.get("profiles", {})
profile_names = list(profiles.keys())
click.echo(
"\nAWS Lambda and API Gateway are only available in certain regions. "
"Let's check to make sure you have a profile set up in one that will work."
)
if not profile_names:
profile_name, profile = None, None
click.echo(
"We couldn't find an AWS profile to use. "
"Before using Zappa, you'll need to set one up. See here for more info: {}".format(
click.style(BOTO3_CONFIG_DOCS_URL, fg="blue", underline=True)
)
)
elif len(profile_names) == 1:
profile_name = profile_names[0]
profile = profiles[profile_name]
click.echo("Okay, using profile {}!".format(click.style(profile_name, bold=True)))
else:
if "default" in profile_names:
default_profile = [p for p in profile_names if p == "default"][0]
else:
default_profile = profile_names[0]
while True:
profile_name = (
input(
"We found the following profiles: {}, and {}. "
"Which would you like us to use? (default '{}'): ".format(
", ".join(profile_names[:-1]),
profile_names[-1],
default_profile,
)
)
or default_profile
)
if profile_name in profiles:
profile = profiles[profile_name]
break
else:
click.echo("Please enter a valid name for your AWS profile.")
profile_region = profile.get("region") if profile else None
# Create Bucket
click.echo(
"\nYour Zappa deployments will need to be uploaded to a " + click.style("private S3 bucket", bold=True) + "."
)
click.echo("If you don't have a bucket yet, we'll create one for you too.")
default_bucket = "zappa-" + "".join(random.choice(string.ascii_lowercase + string.digits) for _ in range(9))
while True:
bucket = input("What do you want to call your bucket? (default '%s'): " % default_bucket) or default_bucket
if is_valid_bucket_name(bucket):
break
click.echo(click.style("Invalid bucket name!", bold=True))
click.echo("S3 buckets must be named according to the following rules:")
click.echo(
"""* Bucket names must be unique across all existing bucket names in Amazon S3.
* Bucket names must comply with DNS naming conventions.
* Bucket names must be at least 3 and no more than 63 characters long.
* Bucket names must not contain uppercase characters or underscores.
* Bucket names must start with a lowercase letter or number.
* Bucket names must be a series of one or more labels. Adjacent labels are separated
by a single period (.). Bucket names can contain lowercase letters, numbers, and
hyphens. Each label must start and end with a lowercase letter or a number.
* Bucket names must not be formatted as an IP address (for example, 192.168.5.4).
* When you use virtual hosted–style buckets with Secure Sockets Layer (SSL), the SSL
wildcard certificate only matches buckets that don't contain periods. To work around
this, use HTTP or write your own certificate verification logic. We recommend that
you do not use periods (".") in bucket names when using virtual hosted–style buckets.
"""
)
# Detect Django/Flask
try: # pragma: no cover
import django # noqa: F401
has_django = True
except ImportError:
has_django = False
try: # pragma: no cover
import flask # noqa: F401
has_flask = True
except ImportError:
has_flask = False
print("")
# App-specific
if has_django: # pragma: no cover
click.echo("It looks like this is a " + click.style("Django", bold=True) + " application!")
click.echo("What is the " + click.style("module path", bold=True) + " to your projects's Django settings?")
django_settings = None
matches = detect_django_settings()
while django_settings in [None, ""]:
if matches:
click.echo(
"We discovered: "
+ click.style(
", ".join("{}".format(i) for v, i in enumerate(matches)),
bold=True,
)
)
django_settings = input("Where are your project's settings? (default '%s'): " % matches[0]) or matches[0]
else:
click.echo("(This will likely be something like 'your_project.settings')")
django_settings = input("Where are your project's settings?: ")
django_settings = django_settings.replace("'", "")
django_settings = django_settings.replace('"', "")
else:
matches = None
if has_flask:
click.echo("It looks like this is a " + click.style("Flask", bold=True) + " application.")
matches = detect_flask_apps()
click.echo("What's the " + click.style("modular path", bold=True) + " to your app's function?")
click.echo("This will likely be something like 'your_module.app'.")
app_function = None
while app_function in [None, ""]:
if matches:
click.echo(
"We discovered: "
+ click.style(
", ".join("{}".format(i) for v, i in enumerate(matches)),
bold=True,
)
)
app_function = input("Where is your app's function? (default '%s'): " % matches[0]) or matches[0]
else:
app_function = input("Where is your app's function?: ")
app_function = app_function.replace("'", "")
app_function = app_function.replace('"', "")
# TODO: Create VPC?
# Memory size? Time limit?
# Domain? LE keys? Region?
# 'Advanced Settings' mode?
# Globalize
click.echo(
"\nYou can optionally deploy to "
+ click.style("all available regions", bold=True)
+ " in order to provide fast global service."
)
click.echo("If you are using Zappa for the first time, you probably don't want to do this!")
global_deployment = False
while True:
global_type = input(
"Would you like to deploy this application "
+ click.style("globally", bold=True)
+ "? (default 'n') [y/n/(p)rimary]: "
)
if not global_type:
break
if global_type.lower() in ["y", "yes", "p", "primary"]:
global_deployment = True
break
if global_type.lower() in ["n", "no"]:
global_deployment = False
break
# The given environment name
zappa_settings = {
env: {
"profile_name": profile_name,
"s3_bucket": bucket,
"runtime": get_venv_from_python_version(),
"project_name": self.get_project_name(),
"exclude": ["boto3", "dateutil", "botocore", "s3transfer", "concurrent"],
}
}
if profile_region:
zappa_settings[env]["aws_region"] = profile_region
if has_django:
zappa_settings[env]["django_settings"] = django_settings
else:
zappa_settings[env]["app_function"] = app_function
# Global Region Deployment
if global_deployment:
additional_regions = [r for r in API_GATEWAY_REGIONS if r != profile_region]
# Create additional stages
if global_type.lower() in ["p", "primary"]:
additional_regions = [r for r in additional_regions if "-1" in r]
for region in additional_regions:
env_name = env + "_" + region.replace("-", "_")
g_env = {env_name: {"extends": env, "aws_region": region}}
zappa_settings.update(g_env)
import json as json # hjson is fine for loading, not fine for writing.
zappa_settings_json = json.dumps(zappa_settings, sort_keys=True, indent=4)
click.echo("\nOkay, here's your " + click.style("zappa_settings.json", bold=True) + ":\n")
click.echo(click.style(zappa_settings_json, fg="yellow", bold=False))
confirm = input("\nDoes this look " + click.style("okay", bold=True, fg="green") + "? (default 'y') [y/n]: ") or "yes"
if confirm[0] not in ["y", "Y", "yes", "YES"]:
click.echo("" + click.style("Sorry", bold=True, fg="red") + " to hear that! Please init again.")
return
# Write
with open("zappa_settings.json", "w") as zappa_settings_file:
zappa_settings_file.write(zappa_settings_json)
if global_deployment:
click.echo(
"\n"
+ click.style("Done", bold=True)
+ "! You can also "
+ click.style("deploy all", bold=True)
+ " by executing:\n"
)
click.echo(click.style("\t$ zappa deploy --all", bold=True))
click.echo("\nAfter that, you can " + click.style("update", bold=True) + " your application code with:\n")
click.echo(click.style("\t$ zappa update --all", bold=True))
else:
click.echo(
"\n"
+ click.style("Done", bold=True)
+ "! Now you can "
+ click.style("deploy", bold=True)
+ " your Zappa application by executing:\n"
)
click.echo(click.style("\t$ zappa deploy %s" % env, bold=True))
click.echo("\nAfter that, you can " + click.style("update", bold=True) + " your application code with:\n")
click.echo(click.style("\t$ zappa update %s" % env, bold=True))
click.echo(
"\nTo learn more, check out our project page on "
+ click.style("GitHub", bold=True)
+ " here: "
+ click.style("https://github.com/Zappa/Zappa", fg="cyan", bold=True)
)
click.echo(
"and stop by our "
+ click.style("Slack", bold=True)
+ " channel here: "
+ click.style("https://zappateam.slack.com", fg="cyan", bold=True)
)
click.echo("\nEnjoy!,")
click.echo(" ~ Team " + click.style("Zappa", bold=True) + "!")
return
def certify(self, no_confirm=True, manual=False):
"""
Register or update a domain certificate for this env.
"""
if not self.domain:
raise ClickException(
"Can't certify a domain without " + click.style("domain", fg="red", bold=True) + " configured!"
)
if not no_confirm: # pragma: no cover
confirm = input("Are you sure you want to certify? [y/n] ")
if confirm != "y":
return
# Make sure this isn't already deployed.
deployed_versions = self.zappa.get_lambda_function_versions(self.lambda_name)
if len(deployed_versions) == 0:
raise ClickException(
"This application "
+ click.style("isn't deployed yet", fg="red")
+ " - did you mean to call "
+ click.style("deploy", bold=True)
+ "?"
)
account_key_location = self.stage_config.get("lets_encrypt_key", None)
cert_location = self.stage_config.get("certificate", None)
cert_key_location = self.stage_config.get("certificate_key", None)
cert_chain_location = self.stage_config.get("certificate_chain", None)
cert_arn = self.stage_config.get("certificate_arn", None)
base_path = self.stage_config.get("base_path", None)
# These are sensitive
certificate_body = None
certificate_private_key = None
certificate_chain = None
# Prepare for custom Let's Encrypt
if not cert_location and not cert_arn:
if not account_key_location:
raise ClickException(
"Can't certify a domain without "
+ click.style("lets_encrypt_key", fg="red", bold=True)
+ " or "
+ click.style("certificate", fg="red", bold=True)
+ " or "
+ click.style("certificate_arn", fg="red", bold=True)
+ " configured!"
)
# Get install account_key to /tmp/account_key.pem
from .letsencrypt import gettempdir
if account_key_location.startswith("s3://"):
bucket, key_name = parse_s3_url(account_key_location)
self.zappa.s3_client.download_file(bucket, key_name, os.path.join(gettempdir(), "account.key"))
else:
from shutil import copyfile
copyfile(account_key_location, os.path.join(gettempdir(), "account.key"))
# Prepare for Custom SSL
elif not account_key_location and not cert_arn:
if not cert_location or not cert_key_location or not cert_chain_location:
raise ClickException(
"Can't certify a domain without "
+ click.style(
"certificate, certificate_key and certificate_chain",
fg="red",
bold=True,
)
+ " configured!"
)
# Read the supplied certificates.
with open(cert_location) as f:
certificate_body = f.read()
with open(cert_key_location) as f:
certificate_private_key = f.read()
with open(cert_chain_location) as f:
certificate_chain = f.read()
click.echo("Certifying domain " + click.style(self.domain, fg="green", bold=True) + "..")
# Get cert and update domain.
# Let's Encrypt
if not cert_location and not cert_arn:
from .letsencrypt import get_cert_and_update_domain
cert_success = get_cert_and_update_domain(self.zappa, self.lambda_name, self.api_stage, self.domain, manual)
# Custom SSL / ACM
else:
route53 = self.stage_config.get("route53_enabled", True)
if not self.zappa.get_domain_name(self.domain, route53=route53):
dns_name = self.zappa.create_domain_name(
domain_name=self.domain,
certificate_name=self.domain + "-Zappa-Cert",
certificate_body=certificate_body,
certificate_private_key=certificate_private_key,
certificate_chain=certificate_chain,
certificate_arn=cert_arn,
lambda_name=self.lambda_name,
stage=self.api_stage,
base_path=base_path,
)
if route53:
self.zappa.update_route53_records(self.domain, dns_name)
print(
"Created a new domain name with supplied certificate. "
"Please note that it can take up to 40 minutes for this domain to be "
"created and propagated through AWS, but it requires no further work on your part."
)
else:
self.zappa.update_domain_name(
domain_name=self.domain,
certificate_name=self.domain + "-Zappa-Cert",
certificate_body=certificate_body,
certificate_private_key=certificate_private_key,
certificate_chain=certificate_chain,
certificate_arn=cert_arn,
lambda_name=self.lambda_name,
stage=self.api_stage,
route53=route53,
base_path=base_path,
)
cert_success = True
if cert_success:
click.echo("Certificate " + click.style("updated", fg="green", bold=True) + "!")
else:
click.echo(click.style("Failed", fg="red", bold=True) + " to generate or install certificate! :(")
click.echo("\n==============\n")
shamelessly_promote()
##
# Shell
##
def shell(self):
"""
Spawn a debug shell.
"""
click.echo(
click.style("NOTICE!", fg="yellow", bold=True)
+ " This is a "
+ click.style("local", fg="green", bold=True)
+ " shell, inside a "
+ click.style("Zappa", bold=True)
+ " object!"
)
self.zappa.shell()
return
##
# Utility
##
def callback(self, position):
"""
Allows the execution of custom code between creation of the zip file and deployment to AWS.
:return: None
"""
callbacks = self.stage_config.get("callbacks", {})
callback = callbacks.get(position)
if callback:
(mod_path, cb_func_name) = callback.rsplit(".", 1)
try: # Prefer callback in working directory
if mod_path.count(".") >= 1: # Callback function is nested in a folder
(mod_folder_path, mod_name) = mod_path.rsplit(".", 1)
mod_folder_path_fragments = mod_folder_path.split(".")
working_dir = os.path.join(os.getcwd(), *mod_folder_path_fragments)
else:
mod_name = mod_path
working_dir = os.getcwd()
working_dir_importer = pkgutil.get_importer(working_dir)
module_ = working_dir_importer.find_module(mod_name).load_module(mod_name)
except (ImportError, AttributeError):
try: # Callback func might be in virtualenv
module_ = importlib.import_module(mod_path)
except ImportError: # pragma: no cover
raise ClickException(
click.style("Failed ", fg="red")
+ "to "
+ click.style(
"import {position} callback ".format(position=position),
bold=True,
)
+ 'module: "{mod_path}"'.format(mod_path=click.style(mod_path, bold=True))
)
if not hasattr(module_, cb_func_name): # pragma: no cover
raise ClickException(
click.style("Failed ", fg="red")
+ "to "
+ click.style("find {position} callback ".format(position=position), bold=True)
+ 'function: "{cb_func_name}" '.format(cb_func_name=click.style(cb_func_name, bold=True))
+ 'in module "{mod_path}"'.format(mod_path=mod_path)
)
cb_func = getattr(module_, cb_func_name)
cb_func(self) # Call the function passing self
def check_for_update(self):
"""
Print a warning if there's a new Zappa version available.
"""
try:
version = pkg_resources.require("zappa")[0].version
updateable = check_new_version_available(version)
if updateable:
click.echo(
click.style("Important!", fg="yellow", bold=True)
+ " A new version of "
+ click.style("Zappa", bold=True)
+ " is available!"
)
click.echo("Upgrade with: " + click.style("pip install zappa --upgrade", bold=True))
click.echo(
"Visit the project page on GitHub to see the latest changes: "
+ click.style("https://github.com/Zappa/Zappa", bold=True)
)
except Exception as e: # pragma: no cover
print(e)
return
def load_settings(self, settings_file=None, session=None):
"""
Load the local zappa_settings file.
An existing boto session can be supplied, though this is likely for testing purposes.
Returns the loaded Zappa object.
"""
# Ensure we're passed a valid settings file.
if not settings_file:
settings_file = self.get_json_or_yaml_settings()
if not os.path.isfile(settings_file):
raise ClickException("Please configure your zappa_settings file.")
# Load up file
self.load_settings_file(settings_file)
# Make sure that this stage is our settings
if self.api_stage not in self.zappa_settings.keys():
raise ClickException("Please define stage '{0!s}' in your Zappa settings.".format(self.api_stage))
# We need a working title for this project. Use one if supplied, else cwd dirname.
if "project_name" in self.stage_config: # pragma: no cover
# If the name is invalid, this will throw an exception with message up stack
self.project_name = validate_name(self.stage_config["project_name"])
else:
self.project_name = self.get_project_name()
# The name of the actual AWS Lambda function, ex, 'helloworld-dev'
# Assume that we already have have validated the name beforehand.
# Related: https://github.com/Miserlou/Zappa/pull/664
# https://github.com/Miserlou/Zappa/issues/678
# And various others from Slack.
self.lambda_name = slugify.slugify(self.project_name + "-" + self.api_stage)
# Load stage-specific settings
self.s3_bucket_name = self.stage_config.get(
"s3_bucket",
"zappa-" + "".join(random.choice(string.ascii_lowercase + string.digits) for _ in range(9)),
)
self.vpc_config = self.stage_config.get("vpc_config", {})
self.memory_size = self.stage_config.get("memory_size", 512)
self.ephemeral_storage = self.stage_config.get("ephemeral_storage", {"Size": 512})
# Validate ephemeral storage structure and size
if "Size" not in self.ephemeral_storage:
raise ClickException("Please provide a valid Size for ephemeral_storage in your Zappa settings.")
elif not 512 <= self.ephemeral_storage["Size"] <= 10240:
raise ClickException("Please provide a valid ephemeral_storage size between 512 - 10240 in your Zappa settings.")
self.app_function = self.stage_config.get("app_function", None)
self.exception_handler = self.stage_config.get("exception_handler", None)
self.aws_region = self.stage_config.get("aws_region", None)
self.debug = self.stage_config.get("debug", True)
self.prebuild_script = self.stage_config.get("prebuild_script", None)
self.profile_name = self.stage_config.get("profile_name", None)
self.log_level = self.stage_config.get("log_level", "DEBUG")
self.domain = self.stage_config.get("domain", None)
self.base_path = self.stage_config.get("base_path", None)
self.timeout_seconds = self.stage_config.get("timeout_seconds", 30)
dead_letter_arn = self.stage_config.get("dead_letter_arn", "")
self.dead_letter_config = {"TargetArn": dead_letter_arn} if dead_letter_arn else {}
self.cognito = self.stage_config.get("cognito", None)
self.num_retained_versions = self.stage_config.get("num_retained_versions", None)
# Check for valid values of num_retained_versions
if self.num_retained_versions is not None and type(self.num_retained_versions) is not int:
raise ClickException(
"Please supply either an integer or null for num_retained_versions in the zappa_settings.json. Found %s"
% type(self.num_retained_versions)
)
elif type(self.num_retained_versions) is int and self.num_retained_versions < 1:
raise ClickException("The value for num_retained_versions in the zappa_settings.json should be greater than 0.")
# Provide legacy support for `use_apigateway`, now `apigateway_enabled`.
# https://github.com/Miserlou/Zappa/issues/490
# https://github.com/Miserlou/Zappa/issues/493
self.use_apigateway = self.stage_config.get("use_apigateway", True)
if self.use_apigateway:
self.use_apigateway = self.stage_config.get("apigateway_enabled", True)
self.apigateway_description = self.stage_config.get("apigateway_description", None)
self.lambda_handler = self.stage_config.get("lambda_handler", "handler.lambda_handler")
# DEPRECATED. https://github.com/Miserlou/Zappa/issues/456
self.remote_env_bucket = self.stage_config.get("remote_env_bucket", None)
self.remote_env_file = self.stage_config.get("remote_env_file", None)
self.remote_env = self.stage_config.get("remote_env", None)
self.settings_file = self.stage_config.get("settings_file", None)
self.django_settings = self.stage_config.get("django_settings", None)
self.manage_roles = self.stage_config.get("manage_roles", True)
self.binary_support = self.stage_config.get("binary_support", True)
self.api_key_required = self.stage_config.get("api_key_required", False)
self.api_key = self.stage_config.get("api_key")
self.endpoint_configuration = self.stage_config.get("endpoint_configuration", None)
self.iam_authorization = self.stage_config.get("iam_authorization", False)
self.cors = self.stage_config.get("cors", False)
self.lambda_description = self.stage_config.get("lambda_description", "Zappa Deployment")
self.lambda_concurrency = self.stage_config.get("lambda_concurrency", None)
self.environment_variables = self.stage_config.get("environment_variables", {})
self.aws_environment_variables = self.stage_config.get("aws_environment_variables", {})
self.check_environment(self.environment_variables)
self.authorizer = self.stage_config.get("authorizer", {})
self.runtime = self.stage_config.get("runtime", get_runtime_from_python_version())
self.aws_kms_key_arn = self.stage_config.get("aws_kms_key_arn", "")
self.context_header_mappings = self.stage_config.get("context_header_mappings", {})
self.xray_tracing = self.stage_config.get("xray_tracing", False)
self.desired_role_arn = self.stage_config.get("role_arn")
self.layers = self.stage_config.get("layers", None)
self.additional_text_mimetypes = self.stage_config.get("additional_text_mimetypes", None)
# check that BINARY_SUPPORT is True if additional_text_mimetypes is provided
if self.additional_text_mimetypes and not self.binary_support:
raise ClickException("zappa_settings.json has additional_text_mimetypes defined, but binary_support is False!")
# Load ALB-related settings
self.use_alb = self.stage_config.get("alb_enabled", False)
self.alb_vpc_config = self.stage_config.get("alb_vpc_config", {})
# Additional tags
self.tags = self.stage_config.get("tags", {})
desired_role_name = self.lambda_name + "-ZappaLambdaExecutionRole"
self.zappa = Zappa(
boto_session=session,
profile_name=self.profile_name,
aws_region=self.aws_region,
load_credentials=self.load_credentials,
desired_role_name=desired_role_name,
desired_role_arn=self.desired_role_arn,
runtime=self.runtime,
tags=self.tags,
endpoint_urls=self.stage_config.get("aws_endpoint_urls", {}),
xray_tracing=self.xray_tracing,
)
for setting in CUSTOM_SETTINGS:
if setting in self.stage_config:
setting_val = self.stage_config[setting]
# Read the policy file contents.
if setting.endswith("policy"):
with open(setting_val, "r") as f:
setting_val = f.read()
setattr(self.zappa, setting, setting_val)
if self.app_function:
self.collision_warning(self.app_function)
if self.app_function[-3:] == ".py":
click.echo(
click.style("Warning!", fg="red", bold=True)
+ " Your app_function is pointing to a "
+ click.style("file and not a function", bold=True)
+ "! It should probably be something like 'my_file.app', not 'my_file.py'!"
)
return self.zappa
def get_json_or_yaml_settings(self, settings_name="zappa_settings"):
"""
Return zappa_settings path as JSON or YAML (or TOML), as appropriate.
"""
zs_json = settings_name + ".json"
zs_yml = settings_name + ".yml"
zs_yaml = settings_name + ".yaml"
zs_toml = settings_name + ".toml"
# Must have at least one
if (
not os.path.isfile(zs_json)
and not os.path.isfile(zs_yml)
and not os.path.isfile(zs_yaml)
and not os.path.isfile(zs_toml)
):
raise ClickException("Please configure a zappa_settings file or call `zappa init`.")
# Prefer JSON
if os.path.isfile(zs_json):
settings_file = zs_json
elif os.path.isfile(zs_toml):
settings_file = zs_toml
elif os.path.isfile(zs_yml):
settings_file = zs_yml
else:
settings_file = zs_yaml
return settings_file
def load_settings_file(self, settings_file=None):
"""
Load our settings file.
"""
if not settings_file:
settings_file = self.get_json_or_yaml_settings()
if not os.path.isfile(settings_file):
raise ClickException("Please configure your zappa_settings file or call `zappa init`.")
path, ext = os.path.splitext(settings_file)
if ext == ".yml" or ext == ".yaml":
with open(settings_file) as yaml_file:
try:
self.zappa_settings = yaml.safe_load(yaml_file)
except ValueError: # pragma: no cover
raise ValueError("Unable to load the Zappa settings YAML. It may be malformed.")
elif ext == ".toml":
with open(settings_file) as toml_file:
try:
self.zappa_settings = toml.load(toml_file)
except ValueError: # pragma: no cover
raise ValueError("Unable to load the Zappa settings TOML. It may be malformed.")
else:
with open(settings_file) as json_file:
try:
self.zappa_settings = json.load(json_file)
except ValueError: # pragma: no cover
raise ValueError("Unable to load the Zappa settings JSON. It may be malformed.")
def create_package(self, output=None, use_zappa_release: Optional[str] = None):
"""
Ensure that the package can be properly configured,
and then create it.
"""
# Create the Lambda zip package (includes project and virtualenvironment)
# Also define the path the handler file so it can be copied to the zip
# root for Lambda.
current_file = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) # type: ignore[arg-type]
handler_file = os.sep.join(current_file.split(os.sep)[0:]) + os.sep + "handler.py"
# Create the zip file(s)
if self.stage_config.get("slim_handler", False):
# Create two zips. One with the application and the other with just the handler.
# https://github.com/Miserlou/Zappa/issues/510
self.zip_path = self.zappa.create_lambda_zip( # type: ignore[attr-defined]
prefix=self.lambda_name,
use_precompiled_packages=self.stage_config.get("use_precompiled_packages", True),
exclude=self.stage_config.get("exclude", []),
exclude_glob=self.stage_config.get("exclude_glob", []),
disable_progress=self.disable_progress,
archive_format="tarball",
)
# Make sure the normal venv is not included in the handler's zip
exclude = self.stage_config.get("exclude", [])
cur_venv = self.zappa.get_current_venv() # type: ignore[attr-defined]
exclude.append(cur_venv.split("/")[-1])
self.handler_path = self.zappa.create_lambda_zip( # type: ignore[attr-defined]
prefix="handler_{0!s}".format(self.lambda_name),
venv=self.zappa.create_handler_venv(use_zappa_release=use_zappa_release), # type: ignore[attr-defined]
handler_file=handler_file,
slim_handler=True,
exclude=exclude,
exclude_glob=self.stage_config.get("exclude_glob", []),
output=output,
disable_progress=self.disable_progress,
)
else:
exclude = self.stage_config.get("exclude", [])
# Create a single zip that has the handler and application
self.zip_path = self.zappa.create_lambda_zip( # type: ignore[attr-defined]
prefix=self.lambda_name,
handler_file=handler_file,
use_precompiled_packages=self.stage_config.get("use_precompiled_packages", True),
exclude=exclude,
exclude_glob=self.stage_config.get("exclude_glob", []),
output=output,
disable_progress=self.disable_progress,
)
# Warn if this is too large for Lambda.
file_stats = os.stat(self.zip_path)
if file_stats.st_size > 52428800: # pragma: no cover
print(
"\n\nWarning: Application zip package is likely to be too large for AWS Lambda. "
'Try setting "slim_handler" to true in your Zappa settings file.\n\n'
)
# Throw custom settings into the zip that handles requests
if self.stage_config.get("slim_handler", False):
handler_zip = self.handler_path
else:
handler_zip = self.zip_path
with zipfile.ZipFile(handler_zip, "a") as lambda_zip: # type: ignore[call-overload]
settings_s = self.get_zappa_settings_string()
# Copy our Django app into root of our package.
# It doesn't work otherwise.
if self.django_settings:
base = __file__.rsplit(os.sep, 1)[0]
django_py = "".join(os.path.join(base, "ext", "django_zappa.py"))
lambda_zip.write(django_py, "django_zappa_app.py")
# Lambda requires a specific chmod
temp_settings = tempfile.NamedTemporaryFile(delete=False)
os.chmod(temp_settings.name, 0o644)
temp_settings.write(bytes(settings_s, "utf-8"))
temp_settings.close()
lambda_zip.write(temp_settings.name, "zappa_settings.py")
os.unlink(temp_settings.name)
def get_zappa_settings_string(self):
settings_s = "# Generated by Zappa\n"
if self.app_function:
if "." not in self.app_function: # pragma: no cover
raise ClickException(
"Your "
+ click.style("app_function", fg="red", bold=True)
+ " value is not a modular path."
+ " It needs to be in the format `"
+ click.style("your_module.your_app_object", bold=True)
+ "`."
)
app_module, app_function = self.app_function.rsplit(".", 1)
settings_s = settings_s + "APP_MODULE='{0!s}'\nAPP_FUNCTION='{1!s}'\n".format(app_module, app_function)
if self.exception_handler:
settings_s += "EXCEPTION_HANDLER='{0!s}'\n".format(self.exception_handler)
else:
settings_s += "EXCEPTION_HANDLER=None\n"
if self.debug:
settings_s = settings_s + "DEBUG=True\n"
else:
settings_s = settings_s + "DEBUG=False\n"
settings_s = settings_s + "LOG_LEVEL='{0!s}'\n".format((self.log_level))
if self.binary_support:
settings_s = settings_s + "BINARY_SUPPORT=True\n"
else:
settings_s = settings_s + "BINARY_SUPPORT=False\n"
head_map_dict = {}
head_map_dict.update(dict(self.context_header_mappings))
settings_s = settings_s + "CONTEXT_HEADER_MAPPINGS={0}\n".format(head_map_dict)
# If we're on a domain, we don't need to define the /<<env>> in
# the WSGI PATH
if self.domain:
settings_s = settings_s + "DOMAIN='{0!s}'\n".format((self.domain))
else:
settings_s = settings_s + "DOMAIN=None\n"
if self.base_path:
settings_s = settings_s + "BASE_PATH='{0!s}'\n".format((self.base_path))
else:
settings_s = settings_s + "BASE_PATH=None\n"
# Pass through remote config bucket and path
if self.remote_env:
settings_s = settings_s + "REMOTE_ENV='{0!s}'\n".format(self.remote_env)
# DEPRECATED. use remove_env instead
elif self.remote_env_bucket and self.remote_env_file:
settings_s = settings_s + "REMOTE_ENV='s3://{0!s}/{1!s}'\n".format(self.remote_env_bucket, self.remote_env_file)
# Local envs
env_dict = {}
if self.aws_region:
env_dict["AWS_REGION"] = self.aws_region
env_dict.update(dict(self.environment_variables))
# Environment variable keys must be ascii
# https://github.com/Miserlou/Zappa/issues/604
# https://github.com/Miserlou/Zappa/issues/998
try:
env_dict = dict((k.encode("ascii").decode("ascii"), v) for (k, v) in env_dict.items())
except Exception:
raise ValueError("Environment variable keys must be ascii.")
settings_s = settings_s + "ENVIRONMENT_VARIABLES={0}\n".format(env_dict)
# We can be environment-aware
settings_s = settings_s + "API_STAGE='{0!s}'\n".format((self.api_stage))
settings_s = settings_s + "PROJECT_NAME='{0!s}'\n".format((self.project_name))
if self.settings_file:
settings_s = settings_s + "SETTINGS_FILE='{0!s}'\n".format((self.settings_file))
else:
settings_s = settings_s + "SETTINGS_FILE=None\n"
if self.django_settings:
settings_s = settings_s + "DJANGO_SETTINGS='{0!s}'\n".format((self.django_settings))
else:
settings_s = settings_s + "DJANGO_SETTINGS=None\n"
# If slim handler, path to project zip
if self.stage_config.get("slim_handler", False):
settings_s += "ARCHIVE_PATH='s3://{0!s}/{1!s}_{2!s}_current_project.tar.gz'\n".format(
self.s3_bucket_name, self.api_stage, self.project_name
)
# since includes are for slim handler add the setting here by joining arbitrary list from zappa_settings file
# and tell the handler we are the slim_handler
# https://github.com/Miserlou/Zappa/issues/776
settings_s += "SLIM_HANDLER=True\n"
include = self.stage_config.get("include", [])
if len(include) >= 1:
settings_s += "INCLUDE=" + str(include) + "\n"
# AWS Events function mapping
event_mapping = {}
events = self.stage_config.get("events", [])
for event in events:
arn = event.get("event_source", {}).get("arn")
function = event.get("function")
if arn and function:
event_mapping[arn] = function
settings_s = settings_s + "AWS_EVENT_MAPPING={0!s}\n".format(event_mapping)
# Map Lext bot events
bot_events = self.stage_config.get("bot_events", [])
bot_events_mapping = {}
for bot_event in bot_events:
event_source = bot_event.get("event_source", {})
intent = event_source.get("intent")
invocation_source = event_source.get("invocation_source")
function = bot_event.get("function")
if intent and invocation_source and function:
bot_events_mapping[str(intent) + ":" + str(invocation_source)] = function
settings_s = settings_s + "AWS_BOT_EVENT_MAPPING={0!s}\n".format(bot_events_mapping)
# Map cognito triggers
cognito_trigger_mapping = {}
cognito_config = self.stage_config.get("cognito", {})
triggers = cognito_config.get("triggers", [])
for trigger in triggers:
source = trigger.get("source")
function = trigger.get("function")
if source and function:
cognito_trigger_mapping[source] = function
settings_s = settings_s + "COGNITO_TRIGGER_MAPPING={0!s}\n".format(cognito_trigger_mapping)
# Authorizer config
authorizer_function = self.authorizer.get("function", None)
if authorizer_function:
settings_s += "AUTHORIZER_FUNCTION='{0!s}'\n".format(authorizer_function)
# async response
async_response_table = self.stage_config.get("async_response_table", "")
settings_s += "ASYNC_RESPONSE_TABLE='{0!s}'\n".format(async_response_table)
# additional_text_mimetypes
additional_text_mimetypes = self.stage_config.get("additional_text_mimetypes", [])
settings_s += f"ADDITIONAL_TEXT_MIMETYPES={additional_text_mimetypes}\n"
return settings_s
def remove_local_zip(self):
"""
Remove our local zip file.
"""
if self.stage_config.get("delete_local_zip", True):
try:
if os.path.isfile(self.zip_path):
os.remove(self.zip_path)
if self.handler_path and os.path.isfile(self.handler_path):
os.remove(self.handler_path)
except Exception: # pragma: no cover
sys.exit(-1)
def remove_uploaded_zip(self):
"""
Remove the local and S3 zip file after uploading and updating.
"""
# Remove the uploaded zip from S3, because it is now registered..
if self.stage_config.get("delete_s3_zip", True):
self.zappa.remove_from_s3(self.zip_path, self.s3_bucket_name)
if self.stage_config.get("slim_handler", False):
# Need to keep the project zip as the slim handler uses it.
self.zappa.remove_from_s3(self.handler_path, self.s3_bucket_name)
def on_exit(self):
"""
Cleanup after the command finishes.
Always called: SystemExit, KeyboardInterrupt and any other Exception that occurs.
"""
if self.zip_path:
# Only try to remove uploaded zip if we're running a command that has loaded credentials
if self.load_credentials:
self.remove_uploaded_zip()
self.remove_local_zip()
def print_logs(self, logs, colorize=True, http=False, non_http=False, force_colorize=None):
"""
Parse, filter and print logs to the console.
"""
for log in logs:
timestamp = log["timestamp"]
message = log["message"]
if "START RequestId" in message:
continue
if "REPORT RequestId" in message:
continue
if "END RequestId" in message:
continue
if not colorize and not force_colorize:
if http:
if self.is_http_log_entry(message.strip()):
print("[" + str(timestamp) + "] " + message.strip())
elif non_http:
if not self.is_http_log_entry(message.strip()):
print("[" + str(timestamp) + "] " + message.strip())
else:
print("[" + str(timestamp) + "] " + message.strip())
else:
if http:
if self.is_http_log_entry(message.strip()):
click.echo(
click.style("[", fg="cyan")
+ click.style(str(timestamp), bold=True)
+ click.style("]", fg="cyan")
+ self.colorize_log_entry(message.strip()),
color=force_colorize,
)
elif non_http:
if not self.is_http_log_entry(message.strip()):
click.echo(
click.style("[", fg="cyan")
+ click.style(str(timestamp), bold=True)
+ click.style("]", fg="cyan")
+ self.colorize_log_entry(message.strip()),
color=force_colorize,
)
else:
click.echo(
click.style("[", fg="cyan")
+ click.style(str(timestamp), bold=True)
+ click.style("]", fg="cyan")
+ self.colorize_log_entry(message.strip()),
color=force_colorize,
)
def is_http_log_entry(self, string):
"""
Determines if a log entry is an HTTP-formatted log string or not.
"""
# Debug event filter
if "Zappa Event" in string:
return False
# IP address filter
for token in string.replace("\t", " ").split(" "):
try:
if token.count(".") == 3 and token.replace(".", "").isnumeric():
return True
except Exception: # pragma: no cover
pass
return False
def get_project_name(self):
return slugify.slugify(os.getcwd().split(os.sep)[-1])[:15]
def colorize_log_entry(self, string):
"""
Apply various heuristics to return a colorized version of a string.
If these fail, simply return the string in plaintext.
"""
final_string = string
try:
# First, do stuff in square brackets
inside_squares = re.findall(r"\[([^]]*)\]", string)
for token in inside_squares:
if token in ["CRITICAL", "ERROR", "WARNING", "DEBUG", "INFO", "NOTSET"]:
final_string = final_string.replace(
"[" + token + "]",
click.style("[", fg="cyan") + click.style(token, fg="cyan", bold=True) + click.style("]", fg="cyan"),
)
else:
final_string = final_string.replace(
"[" + token + "]",
click.style("[", fg="cyan") + click.style(token, bold=True) + click.style("]", fg="cyan"),
)
# Then do quoted strings
quotes = re.findall(r'"[^"]*"', string)
for token in quotes:
final_string = final_string.replace(token, click.style(token, fg="yellow"))
# And UUIDs
for token in final_string.replace("\t", " ").split(" "):
try:
if token.count("-") == 4 and token.replace("-", "").isalnum():
final_string = final_string.replace(token, click.style(token, fg="magenta"))
except Exception: # pragma: no cover
pass
# And IP addresses
try:
if token.count(".") == 3 and token.replace(".", "").isnumeric():
final_string = final_string.replace(token, click.style(token, fg="red"))
except Exception: # pragma: no cover
pass
# And status codes
try:
if token in ["200"]:
final_string = final_string.replace(token, click.style(token, fg="green"))
if token in ["400", "401", "403", "404", "405", "500"]:
final_string = final_string.replace(token, click.style(token, fg="red"))
except Exception: # pragma: no cover
pass
# And Zappa Events
try:
if "Zappa Event:" in final_string:
final_string = final_string.replace(
"Zappa Event:",
click.style("Zappa Event:", bold=True, fg="green"),
)
except Exception: # pragma: no cover
pass
# And dates
for token in final_string.split("\t"):
try:
final_string = final_string.replace(token, click.style(token, fg="green"))
except Exception: # pragma: no cover
pass
final_string = final_string.replace("\t", " ").replace(" ", " ")
if final_string[0] != " ":
final_string = " " + final_string
return final_string
except Exception: # pragma: no cover
return string
def execute_prebuild_script(self):
"""
Parse and execute the prebuild_script from the zappa_settings.
"""
(pb_mod_path, pb_func) = self.prebuild_script.rsplit(".", 1)
try: # Prefer prebuild script in working directory
if pb_mod_path.count(".") >= 1: # Prebuild script func is nested in a folder
(mod_folder_path, mod_name) = pb_mod_path.rsplit(".", 1)
mod_folder_path_fragments = mod_folder_path.split(".")
working_dir = os.path.join(os.getcwd(), *mod_folder_path_fragments)
else:
mod_name = pb_mod_path
working_dir = os.getcwd()
working_dir_importer = pkgutil.get_importer(working_dir)
module_ = working_dir_importer.find_module(mod_name).load_module(mod_name)
except (ImportError, AttributeError):
try: # Prebuild func might be in virtualenv
module_ = importlib.import_module(pb_mod_path)
except ImportError: # pragma: no cover
raise ClickException(
click.style("Failed ", fg="red")
+ "to "
+ click.style("import prebuild script ", bold=True)
+ 'module: "{pb_mod_path}"'.format(pb_mod_path=click.style(pb_mod_path, bold=True))
)
if not hasattr(module_, pb_func): # pragma: no cover
raise ClickException(
click.style("Failed ", fg="red")
+ "to "
+ click.style("find prebuild script ", bold=True)
+ 'function: "{pb_func}" '.format(pb_func=click.style(pb_func, bold=True))
+ 'in module "{pb_mod_path}"'.format(pb_mod_path=pb_mod_path)
)
prebuild_function = getattr(module_, pb_func)
prebuild_function() # Call the function
def collision_warning(self, item):
"""
Given a string, print a warning if this could
collide with a Zappa core package module.
Use for app functions and events.
"""
namespace_collisions = [
"zappa.",
"wsgi.",
"middleware.",
"handler.",
"util.",
"letsencrypt.",
"cli.",
]
for namespace_collision in namespace_collisions:
if item.startswith(namespace_collision):
click.echo(
click.style("Warning!", fg="red", bold=True)
+ " You may have a namespace collision between "
+ click.style(item, bold=True)
+ " and "
+ click.style(namespace_collision, bold=True)
+ "! You may want to rename that file."
)
def deploy_api_gateway(self, api_id):
cache_cluster_enabled = self.stage_config.get("cache_cluster_enabled", False)
cache_cluster_size = str(self.stage_config.get("cache_cluster_size", 0.5))
endpoint_url = self.zappa.deploy_api_gateway(
api_id=api_id,
stage_name=self.api_stage,
cache_cluster_enabled=cache_cluster_enabled,
cache_cluster_size=cache_cluster_size,
cloudwatch_log_level=self.stage_config.get("cloudwatch_log_level", "OFF"),
cloudwatch_data_trace=self.stage_config.get("cloudwatch_data_trace", False),
cloudwatch_metrics_enabled=self.stage_config.get("cloudwatch_metrics_enabled", False),
cache_cluster_ttl=self.stage_config.get("cache_cluster_ttl", 300),
cache_cluster_encrypted=self.stage_config.get("cache_cluster_encrypted", False),
)
return endpoint_url
def check_venv(self):
"""Ensure we're inside a virtualenv."""
if self.vargs and self.vargs.get("no_venv"):
return
if self.zappa:
venv = self.zappa.get_current_venv()
else:
# Just for `init`, when we don't have settings yet.
venv = Zappa.get_current_venv()
if not venv:
raise ClickException(
click.style("Zappa", bold=True)
+ " requires an "
+ click.style("active virtual environment", bold=True, fg="red")
+ "!\n"
+ "Learn more about virtual environments here: "
+ click.style(
"http://docs.python-guide.org/en/latest/dev/virtualenvs/",
bold=False,
fg="cyan",
)
)
def silence(self):
"""
Route all stdout to null.
"""
sys.stdout = open(os.devnull, "w")
sys.stderr = open(os.devnull, "w")
def touch_endpoint(self, endpoint_url):
"""
Test the deployed endpoint with a GET request.
"""
# Private APIGW endpoints most likely can't be reached by a deployer
# unless they're connected to the VPC by VPN. Instead of trying
# connect to the service, print a warning and let the user know
# to check it manually.
# See: https://github.com/Miserlou/Zappa/pull/1719#issuecomment-471341565
if "PRIVATE" in self.stage_config.get("endpoint_configuration", []):
print(
click.style("Warning!", fg="yellow", bold=True) + " Since you're deploying a private API Gateway endpoint,"
" Zappa cannot determine if your function is returning "
" a correct status code. You should check your API's response"
" manually before considering this deployment complete."
)
return
touch_path = self.stage_config.get("touch_path", "/")
req = requests.get(endpoint_url + touch_path)
# Sometimes on really large packages, it can take 60-90 secs to be
# ready and requests will return 504 status_code until ready.
# So, if we get a 504 status code, rerun the request up to 4 times or
# until we don't get a 504 error
if req.status_code == 504:
i = 0
status_code = 504
while status_code == 504 and i <= 4:
req = requests.get(endpoint_url + touch_path)
status_code = req.status_code
i += 1
if req.status_code >= 500:
raise ClickException(
click.style("Warning!", fg="red", bold=True)
+ " Status check on the deployed lambda failed."
+ " A GET request to '"
+ touch_path
+ "' yielded a "
+ click.style(str(req.status_code), fg="red", bold=True)
+ " response code."
)
def shamelessly_promote():
"""
Shamelessly promote our little community.
"""
click.echo(
"Need "
+ click.style("help", fg="green", bold=True)
+ "? Found a "
+ click.style("bug", fg="green", bold=True)
+ "? Let us "
+ click.style("know", fg="green", bold=True)
+ "! :D"
)
click.echo(
"File bug reports on "
+ click.style("GitHub", bold=True)
+ " here: "
+ click.style("https://github.com/Zappa/Zappa", fg="cyan", bold=True)
)
click.echo(
"And join our "
+ click.style("Slack", bold=True)
+ " channel here: "
+ click.style("https://zappateam.slack.com", fg="cyan", bold=True)
)
click.echo("Love!,")
click.echo(" ~ Team " + click.style("Zappa", bold=True) + "!")
The provided code snippet includes necessary dependencies for implementing the `handle` function. Write a Python function `def handle()` to solve the following problem:
Main program execution handler.
Here is the function:
def handle(): # pragma: no cover
"""
Main program execution handler.
"""
try:
cli = ZappaCLI()
sys.exit(cli.handle())
except SystemExit as e: # pragma: no cover
cli.on_exit()
sys.exit(e.code)
except KeyboardInterrupt: # pragma: no cover
cli.on_exit()
sys.exit(130)
except Exception:
cli.on_exit()
click.echo("Oh no! An " + click.style("error occurred", fg="red", bold=True) + "! :(")
click.echo("\n==============\n")
import traceback
traceback.print_exc()
click.echo("\n==============\n")
shamelessly_promote()
sys.exit(-1) | Main program execution handler. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.