Skip to content

Commit

Permalink
go
Browse files Browse the repository at this point in the history
  • Loading branch information
jackietung-redpanda committed Sep 3, 2024
1 parent 8f121c3 commit 4d75827
Show file tree
Hide file tree
Showing 2 changed files with 58 additions and 45 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/release_rpk_connect_plugin_dry_run.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ jobs:
,sdlc/prod/github/rpk_plugin_publisher
parse-json-secrets: true

- run: "ACCESS_THING ${{ env.RPK_PLUGIN_PUBLISHER_AWS_ACCESS_KEY_ID }}"
- run: "echo ACCESS_THING ${{ env.RPK_PLUGIN_PUBLISHER_AWS_ACCESS_KEY_ID }}"

- name: Check Out Repo
uses: actions/checkout@v4
Expand Down
101 changes: 57 additions & 44 deletions resources/plugin_uploader/plugin_uploader.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,6 @@ def create_tar_gz_archive(single_filepath: str) -> str:
TAG_VERSION = "redpanda/version"



@contextmanager
def cwd(new_dir: str):
# Code to acquire resource, e.g.:
Expand All @@ -165,6 +164,53 @@ def cwd(new_dir: str):
os.chdir(old_dir)


def create_and_upload_one_archive(artifact: Artifact, plugin_config: PluginConfig, project_root_dir: str, version: str,
bucket: str, region: str, dry_run: bool):
if dry_run:
s3_bucket_client = None
else:
s3_bucket_client = S3BucketClient(bucket, region)
logging.info(f"Processing {artifact}")

with cwd(project_root_dir):
binary_sha256 = get_binary_sha256_digest(artifact.path)
logging.info(f"Binary SHA256 = {binary_sha256}")
tmp_archive = None
try:
tmp_archive = create_tar_gz_archive(artifact.path)
logging.info(f"Created archive {tmp_archive}")
s3_path_for_archive = plugin_config.get_archive_full_path(
binary_artifact=artifact, version=version
)

tags = {
TAG_BINARY_NAME: plugin_config.binary_name,
TAG_BINARY_SHA256: binary_sha256,
TAG_GOOS: artifact.goos,
TAG_GOARCH: artifact.goarch,
TAG_VERSION: version,
}
if dry_run:
logging.info(
f"DRY-RUN - Would have uploaded archive to S3 bucket {bucket} as {s3_path_for_archive}"
)
logging.info(f"Tags: {json.dumps(tags, indent=4)}")
else:
logging.info(
f"Uploading archive to S3 bucket {bucket} as {s3_path_for_archive}"
)
assert (
s3_bucket_client is not None
), "s3_bucket_client should be initialized in non-dry-run mode"
s3_bucket_client.upload_file_with_tags(
file=tmp_archive, object_path=s3_path_for_archive, tags=tags
)
finally:
if tmp_archive and os.path.exists(tmp_archive):
os.unlink(tmp_archive)
logging.info("DONE")


def create_and_upload_archives(
project_root_dir: str,
plugin_config: PluginConfig,
Expand All @@ -174,49 +220,16 @@ def create_and_upload_archives(
version: str,
dry_run: bool,
):
if dry_run:
s3_bucket_client = None
else:
s3_bucket_client = S3BucketClient(bucket, region)
with cwd(project_root_dir):
for artifact in artifacts:
logging.info(f"Processing {artifact}")
binary_sha256 = get_binary_sha256_digest(artifact.path)
logging.info(f"Binary SHA256 = {binary_sha256}")
tmp_archive = None
try:
tmp_archive = create_tar_gz_archive(artifact.path)
logging.info(f"Created archive {tmp_archive}")
s3_path_for_archive = plugin_config.get_archive_full_path(
binary_artifact=artifact, version=version
)

tags = {
TAG_BINARY_NAME: plugin_config.binary_name,
TAG_BINARY_SHA256: binary_sha256,
TAG_GOOS: artifact.goos,
TAG_GOARCH: artifact.goarch,
TAG_VERSION: version,
}
if dry_run:
logging.info(
f"DRY-RUN - Would have uploaded archive to S3 bucket {bucket} as {s3_path_for_archive}"
)
logging.info(f"Tags: {json.dumps(tags, indent=4)}")
else:
logging.info(
f"Uploading archive to S3 bucket {bucket} as {s3_path_for_archive}"
)
assert (
s3_bucket_client is not None
), "s3_bucket_client should be initialized in non-dry-run mode"
s3_bucket_client.upload_file_with_tags(
file=tmp_archive, object_path=s3_path_for_archive, tags=tags
)
finally:
if tmp_archive and os.path.exists(tmp_archive):
os.unlink(tmp_archive)
logging.info("DONE")
for artifact in artifacts:
create_and_upload_one_archive(
artifact=artifact,
plugin_config=plugin_config,
project_root_dir=project_root_dir,
version=version,
bucket=bucket,
region=region,
dry_run=dry_run,
)


def get_max_version_str(version_strs: list[str]) -> str | None:
Expand Down

0 comments on commit 4d75827

Please sign in to comment.