Skip to content

Commit 0cf5b66

Browse files
authored
Merge pull request #2008 from GSA/main
10/01/2025 Production Deploy
2 parents 1715201 + ec390d5 commit 0cf5b66

File tree

17 files changed

+795
-521
lines changed

17 files changed

+795
-521
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,7 @@ htmlcov/
4949
.coverage.*
5050
.cache
5151
.pytest_cache
52+
.hypothesis/
5253
coverage.xml
5354
test_results.xml
5455
*,cover

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -131,7 +131,7 @@ configuration after installation to get working out of the box:
131131
You can install them by running the following:
132132

133133
```sh
134-
brew install jq git tfenv cloudfoundry/tap/cf-cli@8 redis vim wget
134+
brew install jq git tenv cloudfoundry/tap/cf-cli@8 redis vim wget
135135
```
136136

137137
#### Terraform Installation

app/__init__.py

Lines changed: 23 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -389,16 +389,31 @@ def setup_sqlalchemy_events(app):
389389

390390
@event.listens_for(db.engine, "connect")
391391
def connect(dbapi_connection, connection_record):
392-
current_app.logger.debug(f"Using {dbapi_connection} {connection_record}")
392+
if dbapi_connection is None or connection_record is None:
393+
current_app.logger.warning(
394+
f"Something wrong with sqalalchemy \
395+
dbapi_connection {dbapi_connection} connection_record {connection_record}"
396+
)
393397
pass
394398

395399
@event.listens_for(db.engine, "close")
396400
def close(dbapi_connection, connection_record):
401+
402+
if dbapi_connection is None or connection_record is None:
403+
current_app.logger.warning(
404+
f"Something wrong with sqalalchemy \
405+
dbapi_connection {dbapi_connection} connection_record {connection_record}"
406+
)
397407
pass
398408

399409
@event.listens_for(db.engine, "checkout")
400410
def checkout(dbapi_connection, connection_record, connection_proxy):
401-
current_app.logger.debug(f"Using {dbapi_connection} {connection_proxy}")
411+
412+
if dbapi_connection is None or connection_proxy is None:
413+
current_app.logger.warning(
414+
f"Something wrong with sqalalchemy \
415+
dbapi_connection {dbapi_connection} connection_record {connection_proxy}"
416+
)
402417

403418
try:
404419
# this will overwrite any previous checkout_at timestamp
@@ -441,6 +456,12 @@ def checkout(dbapi_connection, connection_record, connection_proxy):
441456

442457
@event.listens_for(db.engine, "checkin")
443458
def checkin(dbapi_connection, connection_record):
459+
460+
if dbapi_connection is None or connection_record is None:
461+
current_app.logger.warning(
462+
f"Something wrong with sqalalchemy \
463+
dbapi_connection {dbapi_connection} connection_record {connection_record}"
464+
)
444465
pass
445466

446467

app/aws/s3.py

Lines changed: 14 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -412,6 +412,10 @@ def get_job_from_s3(service_id, job_id):
412412
that indicates things are permanently broken, we want to give up right away
413413
to save time.
414414
"""
415+
416+
job = get_job_cache(job_id)
417+
if job:
418+
return job
415419
# We have to make sure the retries don't take up to much time, because
416420
# we might be retrieving dozens of jobs. So max time is:
417421
# 0.2 + 0.4 + 0.8 + 1.6 = 3.0 seconds
@@ -479,7 +483,9 @@ def extract_phones(job, service_id, job_id):
479483
try:
480484
first_row = next(csv_reader)
481485
except StopIteration:
482-
current_app.logger.warning(f"Empty CSV file for job {job_id} in service {service_id}")
486+
current_app.logger.warning(
487+
f"Empty CSV file for job {job_id} in service {service_id}"
488+
)
483489
return {}
484490

485491
phone_index = 0
@@ -511,7 +517,9 @@ def extract_phones(job, service_id, job_id):
511517

512518
def extract_personalisation(job):
513519
if job is None:
514-
current_app.logger.warning("No job data provided for personalisation extraction")
520+
current_app.logger.warning(
521+
"No job data provided for personalisation extraction"
522+
)
515523
return {}
516524
if isinstance(job, dict):
517525
job = job[0]
@@ -520,7 +528,9 @@ def extract_personalisation(job):
520528
return {}
521529
job = job.split("\r\n")
522530
if not job or not job[0]:
523-
current_app.logger.warning("Empty job data after split for personalisation extraction")
531+
current_app.logger.warning(
532+
"Empty job data after split for personalisation extraction"
533+
)
524534
return {}
525535
first_row = job[0]
526536
job.pop(0)
@@ -554,10 +564,8 @@ def get_phone_number_from_s3(service_id, job_id, job_row_number):
554564

555565
phones = get_job_cache(f"{job_id}_phones")
556566
if phones is None:
557-
current_app.logger.debug("HAVE TO REEXTRACT PHONES!")
558567
phones = extract_phones(job, service_id, job_id)
559568
set_job_cache(f"{job_id}_phones", phones)
560-
current_app.logger.debug(f"SETTING PHONES TO {phones}")
561569
else:
562570
phones = phones[
563571
0
@@ -606,9 +614,7 @@ def get_personalisation_from_s3(service_id, job_id, job_row_number):
606614

607615

608616
def get_job_metadata_from_s3(service_id, job_id):
609-
current_app.logger.debug(
610-
f"#notify-debug-s3-partitioning CALLING GET_JOB_METADATA with {service_id}, {job_id}"
611-
)
617+
612618
obj = get_s3_object(*get_job_location(service_id, job_id))
613619
return obj.get()["Metadata"]
614620

app/celery/provider_tasks.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,9 +23,6 @@
2323
def deliver_sms(self, notification_id):
2424
"""Branch off to the final step in delivering the notification to sns and get delivery receipts."""
2525
try:
26-
current_app.logger.info(
27-
"Start sending SMS for notification id: {}".format(notification_id)
28-
)
2926
notification = notifications_dao.get_notification_by_id(notification_id)
3027
ansi_green = "\033[32m"
3128
ansi_reset = "\033[0m"

app/clients/cloudwatch/aws_cloudwatch.py

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77

88
from app.clients import AWS_CLIENT_CONFIG, Client
99
from app.cloudfoundry_config import cloud_config
10+
from app.utils import hilite
1011

1112

1213
class AwsCloudwatchClient(Client):
@@ -178,6 +179,19 @@ def check_delivery_receipts(self, start, end):
178179
)
179180
failed_event_set = self._get_receipts(log_group_name, start, end)
180181
current_app.logger.info((f"Failed message count: {len(failed_event_set)}"))
182+
raise_exception = False
183+
for failure in failed_event_set:
184+
try:
185+
failure = json.loads(failure)
186+
if "No quota left for account" == failure["delivery.providerResponse"]:
187+
current_app.logger.warning(
188+
hilite("**********NO QUOTA LEFT TO SEND MESSAGES!!!**********")
189+
)
190+
raise_exception = True
191+
except Exception:
192+
current_app.logger.exception("Malformed delivery receipt")
193+
if raise_exception:
194+
raise Exception("No Quota Left")
181195

182196
return delivered_event_set, failed_event_set
183197

app/clients/sms/aws_sns.py

Lines changed: 3 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -5,10 +5,12 @@
55
import botocore
66
import phonenumbers
77
from boto3 import client
8+
from flask import current_app
89

910
from app.clients import AWS_CLIENT_CONFIG
1011
from app.clients.sms import SmsClient
1112
from app.cloudfoundry_config import cloud_config
13+
from app.utils import hilite
1214

1315

1416
class AwsSnsClient(SmsClient):
@@ -66,32 +68,13 @@ def send_sms(self, to, content, reference, sender=None, international=False):
6668
}
6769
}
6870

69-
default_num = " ".join(self.current_app.config["AWS_US_TOLL_FREE_NUMBER"])
70-
if isinstance(sender, str):
71-
non_scrubbable = " ".join(sender)
72-
73-
self.current_app.logger.info(
74-
f"notify-debug-api-1385 sender {non_scrubbable} is a {type(sender)} \
75-
default is a {type(default_num)}"
76-
)
77-
else:
78-
self.current_app.logger.warning(
79-
f"notify-debug-api-1385 sender is type {type(sender)}!! {sender}"
80-
)
8171
if self._valid_sender_number(sender):
82-
self.current_app.logger.info(
83-
f"notify-debug-api-1385 use valid sender {non_scrubbable} instead of default {default_num}"
84-
)
8572

8673
attributes["AWS.MM.SMS.OriginationNumber"] = {
8774
"DataType": "String",
8875
"StringValue": sender,
8976
}
9077
else:
91-
self.current_app.logger.info(
92-
f"notify-debug-api-1385 use default {default_num} instead of invalid sender"
93-
)
94-
9578
attributes["AWS.MM.SMS.OriginationNumber"] = {
9679
"DataType": "String",
9780
"StringValue": self.current_app.config["AWS_US_TOLL_FREE_NUMBER"],
@@ -102,6 +85,7 @@ def send_sms(self, to, content, reference, sender=None, international=False):
10285
response = self._client.publish(
10386
PhoneNumber=to, Message=content, MessageAttributes=attributes
10487
)
88+
current_app.logger.info(hilite(f"send response = {response}"))
10589
except botocore.exceptions.ClientError as e:
10690
self.current_app.logger.exception("An error occurred sending sms")
10791
raise str(e)

app/delivery/send_to_providers.py

Lines changed: 0 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77
from flask import current_app
88

99
from app import (
10-
aws_pinpoint_client,
1110
create_uuid,
1211
db,
1312
notification_provider_clients,
@@ -101,10 +100,6 @@ def send_sms_to_provider(notification):
101100
notification.job_row_number,
102101
)
103102

104-
# TODO This is temporary to test the capability of validating phone numbers
105-
# The future home of the validation is TBD
106-
_experimentally_validate_phone_numbers(recipient)
107-
108103
# TODO current we allow US phone numbers to be uploaded without the country code (1)
109104
# This will break certain international phone numbers (Norway, Denmark, East Timor)
110105
# When we officially announce support for international numbers, US numbers must contain
@@ -134,9 +129,6 @@ def send_sms_to_provider(notification):
134129
# interleave spaces to bypass PII scrubbing since sender number is not PII
135130
arr = list(real_sender_number)
136131
real_sender_number = " ".join(arr)
137-
current_app.logger.info(
138-
f"#notify-debug-api-1701 real sender number going to AWS is {real_sender_number}"
139-
)
140132
message_id = provider.send_sms(**send_sms_kwargs)
141133

142134
update_notification_message_id(notification.id, message_id)
@@ -162,18 +154,6 @@ def send_sms_to_provider(notification):
162154
return message_id
163155

164156

165-
def _experimentally_validate_phone_numbers(recipient):
166-
if "+" not in recipient:
167-
recipient_lookup = f"+{recipient}"
168-
else:
169-
recipient_lookup = recipient
170-
if recipient_lookup in current_app.config["SIMULATED_SMS_NUMBERS"] and os.getenv(
171-
"NOTIFY_ENVIRONMENT"
172-
) in ["development", "test"]:
173-
current_app.logger.info(hilite("#notify-debug-validate-phone-number fired"))
174-
aws_pinpoint_client.validate_phone_number("01", recipient)
175-
176-
177157
def _get_verify_code(notification):
178158
key = f"2facode-{notification.id}".replace(" ", "")
179159
recipient = redis_store.get(key)

app/job/rest.py

Lines changed: 35 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@
3131
)
3232
from app.dao.services_dao import dao_fetch_service_by_id
3333
from app.dao.templates_dao import dao_get_template_by_id
34-
from app.enums import JobStatus
34+
from app.enums import JobStatus, NotificationStatus
3535
from app.errors import InvalidRequest, register_errors
3636
from app.schemas import (
3737
JobSchema,
@@ -62,6 +62,40 @@ def get_job_by_service_and_job_id(service_id, job_id):
6262
return jsonify(data=data)
6363

6464

65+
@job_blueprint.route("/<job_id>/status", methods=["GET"])
66+
def get_job_status(service_id, job_id):
67+
"""Fast job status endpoint for real-time polling. No S3 calls, no caching."""
68+
check_suspicious_id(service_id, job_id)
69+
70+
job = dao_get_job_by_service_id_and_job_id(service_id, job_id)
71+
statistics = dao_get_notification_outcomes_for_job(service_id, job_id)
72+
73+
delivered_statuses = (NotificationStatus.DELIVERED, NotificationStatus.SENT)
74+
failed_statuses = (NotificationStatus.FAILED,) + NotificationStatus.failed_types()
75+
76+
delivered_count = failed_count = 0
77+
for stat in statistics:
78+
if stat.status in delivered_statuses:
79+
delivered_count += stat.count
80+
elif stat.status in failed_statuses:
81+
failed_count += stat.count
82+
83+
total_count = job.notification_count or 0
84+
pending_calculated = max(0, total_count - delivered_count - failed_count)
85+
86+
is_finished = job.processing_finished is not None and pending_calculated == 0
87+
88+
response_data = {
89+
"total": total_count,
90+
"delivered": delivered_count,
91+
"failed": failed_count,
92+
"pending": pending_calculated,
93+
"finished": is_finished,
94+
}
95+
96+
return jsonify(response_data)
97+
98+
6599
@job_blueprint.route("/<job_id>/cancel", methods=["POST"])
66100
def cancel_job(service_id, job_id):
67101
check_suspicious_id(service_id, job_id)
@@ -263,9 +297,6 @@ def create_job(service_id):
263297
original_file_name = data.get("original_file_name")
264298
data.update({"service": service_id})
265299
try:
266-
current_app.logger.info(
267-
f"#notify-debug-s3-partitioning DATA IN CREATE_JOB: {data}"
268-
)
269300
data.update(**get_job_metadata_from_s3(service_id, data["id"]))
270301
except KeyError:
271302
raise InvalidRequest(

app/utils.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -149,7 +149,6 @@ def debug_not_production(msg):
149149
def emit_job_update_summary(job):
150150
from app import socketio
151151

152-
current_app.logger.info(f"Emitting summary for job {job.id}")
153152
socketio.emit(
154153
"job_updated",
155154
{

0 commit comments

Comments
 (0)