133
133
from __future__ import annotations
134
134
135
135
import base64
136
+ import json
136
137
import socket
137
138
import string
138
139
import uuid
144
145
from vine import ensure_promise , promise , transform
145
146
146
147
from kombu .asynchronous import get_event_loop
147
- from kombu .asynchronous .aws .ext import boto3 , exceptions
148
+ from kombu .asynchronous .aws .ext import boto3 , exceptions , sqs_extended_client
148
149
from kombu .asynchronous .aws .sqs .connection import AsyncSQSConnection
149
150
from kombu .asynchronous .aws .sqs .message import AsyncMessage
150
151
from kombu .log import get_logger
@@ -498,6 +499,25 @@ def _message_to_python(self, message, queue_name, q_url):
498
499
message ['ReceiptHandle' ],
499
500
)
500
501
else :
502
+
503
+ if (
504
+ sqs_extended_client and
505
+ isinstance (payload , list )
506
+ and payload [0 ] == sqs_extended_client .client .MESSAGE_POINTER_CLASS
507
+ ):
508
+ # Used the sqs_extended_client, so we need to fetch the file from S3 and use that as the payload
509
+ s3_details = payload [1 ]
510
+ s3_bucket_name , s3_key = s3_details ["s3BucketName" ], s3_details ["s3Key" ]
511
+
512
+ s3_client = self .s3 ()
513
+ response = s3_client .get_object (Bucket = s3_bucket_name , Key = s3_key )
514
+
515
+ # The message body is under a wrapper class called StreamingBody
516
+ streaming_body = response ["Body" ]
517
+ payload = json .loads (
518
+ self ._optional_b64_decode (streaming_body .read ())
519
+ )
520
+
501
521
try :
502
522
properties = payload ['properties' ]
503
523
delivery_info = payload ['properties' ]['delivery_info' ]
@@ -713,6 +733,32 @@ def close(self):
713
733
# if "can't set attribute" not in str(exc):
714
734
# raise
715
735
736
+ def new_s3_client (
737
+ self , region , access_key_id , secret_access_key , session_token = None
738
+ ):
739
+ session = boto3 .session .Session (
740
+ region_name = region ,
741
+ aws_access_key_id = access_key_id ,
742
+ aws_secret_access_key = secret_access_key ,
743
+ aws_session_token = session_token ,
744
+ )
745
+ is_secure = self .is_secure if self .is_secure is not None else True
746
+ client_kwargs = {"use_ssl" : is_secure }
747
+
748
+ if self .endpoint_url is not None :
749
+ client_kwargs ["endpoint_url" ] = self .endpoint_url
750
+
751
+ client = session .client ("s3" , ** client_kwargs )
752
+
753
+ return client
754
+
755
+ def s3 (self ):
756
+ return self .new_s3_client (
757
+ region = self .region ,
758
+ access_key_id = self .conninfo .userid ,
759
+ secret_access_key = self .conninfo .password ,
760
+ )
761
+
716
762
def new_sqs_client (self , region , access_key_id ,
717
763
secret_access_key , session_token = None ):
718
764
session = boto3 .session .Session (
@@ -729,7 +775,13 @@ def new_sqs_client(self, region, access_key_id,
729
775
client_kwargs ['endpoint_url' ] = self .endpoint_url
730
776
client_config = self .transport_options .get ('client-config' ) or {}
731
777
config = Config (** client_config )
732
- return session .client ('sqs' , config = config , ** client_kwargs )
778
+ client = session .client ('sqs' , config = config , ** client_kwargs )
779
+
780
+ if self .transport_options .get ('large_payload_bucket' ) and sqs_extended_client :
781
+ client .large_payload_support = self .transport_options .get ('large_payload_bucket' )
782
+ client .use_legacy_attribute = False
783
+
784
+ return client
733
785
734
786
def sqs (self , queue = None ):
735
787
if queue is not None and self .predefined_queues :
0 commit comments