Skip to content

Commit

Permalink
Enhance AWS S3 Integration for Multi-Region Support (#19)
Browse files Browse the repository at this point in the history
  • Loading branch information
bardabun authored Apr 1, 2024
1 parent d73516e commit be8312c
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 13 deletions.
3 changes: 2 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,8 @@ Give your Cloudformation a few minutes to be created, and that's it!

## Changelog:
- **1.3.3**
- Updates the REGIONS list to include additional AWS regions.
- Expanded the REGIONS list with new AWS regions.
- Enhanced the script for better AWS multi-region support by implementing region-specific S3 client usage.
- **1.3.2**
- Upgrade the runtime from Go 1.x to the provided Amazon Linux 2023.
- **1.3.1**
Expand Down
24 changes: 12 additions & 12 deletions release/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@
'eu-central-1', 'eu-west-1', 'eu-west-2', 'eu-west-3', 'eu-north-1',
'sa-east-1',
'ca-central-1',
'eu-central-2', 'eu-south-1', 'eu-south-2', 'ap-south-2', 'ap-southeast-3', 'ap-southeast-4', 'ap-east-1', 'ca-west-1', 'af-south-1', 'me-central-1', 'il-central-1'
'eu-central-2', 'eu-south-1', 'eu-south-2', 'ap-south-2', 'ap-southeast-3', 'ap-southeast-4', 'ap-east-1', 'ca-west-1', 'af-south-1', 'me-central-1',
'il-central-1','me-south-1'
]

BUCKET_NAME_PREFIX = 'logzio-aws-integrations-'
Expand All @@ -24,13 +25,14 @@


def upload_public_to_s3(access_key, secret_key, folder_name, version_number, path_to_file):
s3 = get_s3_client(access_key, secret_key)
file_name = path_to_file.split('/')[-1]
print(f'File name: {file_name}')
success = 0
for region in REGIONS:
try:
print(f'Region: {region}')
# Create a new S3 client for each region
s3 = get_s3_client(access_key, secret_key, region)
object_name = f'{folder_name}/{version_number}/{file_name}'
bucket_name = f'{BUCKET_NAME_PREFIX}{region}'
s3.upload_file(path_to_file, bucket_name, object_name, ExtraArgs={'ACL': 'public-read'})
Expand All @@ -43,7 +45,6 @@ def upload_public_to_s3(access_key, secret_key, folder_name, version_number, pat


def cf_template_workflow(access_key, secret_key, folder_name, version_number, path_to_file):
s3 = get_s3_client(access_key, secret_key)
file_name = path_to_file.split('/')[-1]
print(f'File name: {file_name}')
success = 0
Expand All @@ -56,30 +57,29 @@ def cf_template_workflow(access_key, secret_key, folder_name, version_number, pa
try:
print(f'Region: {region}')
print(f'Version: {version_number}')
tmp_arr = []
for line in base_arr:
tmp_line = line.replace(REGION_PLACEHOLDER, region)
tmp_line = tmp_line.replace(VERSION_PLACEHOLDER, version_number)
tmp_arr.append(tmp_line)
new_path = f'./{file_name}'
# Adjust the template content for each region
tmp_arr = [line.replace(REGION_PLACEHOLDER, region).replace(VERSION_PLACEHOLDER, version_number) for line in base_arr]
new_path = f'./{file_name}'
with open(new_path, 'w') as new_file:
new_file.writelines(tmp_arr)
object_name = f'{folder_name}/{version_number}/{file_name}'
bucket_name = f'{BUCKET_NAME_PREFIX}{region}'
# Create a new S3 client for the current region
s3 = get_s3_client(access_key, secret_key, region)
s3.upload_file(new_path, bucket_name, object_name, ExtraArgs={'ACL': 'public-read'})
success += 1
os.remove(new_path) # Clean up the locally created file
except Exception as e:
print(f'Error occurred for region {region}: {e}')
print('Skipping this region')

print(f'Uploaded to {success} regions')
os.remove(new_path)


def get_s3_client(access_key, secret_key):
def get_s3_client(access_key, secret_key, region_name):
session = boto3.Session(
aws_access_key_id=access_key,
aws_secret_access_key=secret_key,
region_name=region_name
)

return session.client('s3')
Expand Down

0 comments on commit be8312c

Please sign in to comment.