diff --git a/.github/workflows/call-build-packages.yaml b/.github/workflows/call-build-packages.yaml index 4b20e7831c3..6c2208af10b 100644 --- a/.github/workflows/call-build-packages.yaml +++ b/.github/workflows/call-build-packages.yaml @@ -104,7 +104,7 @@ jobs: working-directory: packaging - name: Upload the ${{ matrix.distro }} artifacts - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: packages-${{ inputs.version }}-${{ steps.formatted_distro.outputs.replaced }} path: packaging/packages/ diff --git a/.github/workflows/call-integration-image-build.yaml b/.github/workflows/call-integration-image-build.yaml index 90bd4b7d3f3..d5dc21841d5 100644 --- a/.github/workflows/call-integration-image-build.yaml +++ b/.github/workflows/call-integration-image-build.yaml @@ -79,7 +79,7 @@ jobs: shell: bash - name: Upload artifact - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: pr-${{ github.event.pull_request.number }}-image path: /tmp/pr-image.tar diff --git a/.github/workflows/cron-scorecards-analysis.yaml b/.github/workflows/cron-scorecards-analysis.yaml index 5caac355178..1ec7d311cb2 100644 --- a/.github/workflows/cron-scorecards-analysis.yaml +++ b/.github/workflows/cron-scorecards-analysis.yaml @@ -44,7 +44,7 @@ jobs: publish_results: true - name: "Upload artifact" - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: SARIF file path: scorecard-results.sarif diff --git a/.github/workflows/cron-trivy.yaml b/.github/workflows/cron-trivy.yaml index 27c8ca722c2..973d4e3f0d0 100644 --- a/.github/workflows/cron-trivy.yaml +++ b/.github/workflows/cron-trivy.yaml @@ -80,7 +80,7 @@ jobs: # In case we need to analyse the uploaded files for some reason. - name: Detain results for debug if needed - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: trivy-results-${{ matrix.local_tag }}.sarif path: trivy-results-${{ matrix.local_tag }}.sarif diff --git a/.github/workflows/pr-fuzz.yaml b/.github/workflows/pr-fuzz.yaml index 384148e0a45..805b6a30822 100644 --- a/.github/workflows/pr-fuzz.yaml +++ b/.github/workflows/pr-fuzz.yaml @@ -25,7 +25,7 @@ jobs: dry-run: false language: c - name: Upload Crash - uses: actions/upload-artifact@v1 + uses: actions/upload-artifact@v4 if: failure() && steps.build.outcome == 'success' with: name: artifacts diff --git a/.github/workflows/windows-build.yaml b/.github/workflows/windows-build.yaml index e539f0c9445..941b1134531 100644 --- a/.github/workflows/windows-build.yaml +++ b/.github/workflows/windows-build.yaml @@ -51,7 +51,7 @@ jobs: working-directory: build - name: Upload build packages - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: windows-packages path: | @@ -73,7 +73,7 @@ jobs: docker save -o windows-${{ github.sha }}.tar fluent/fluent-bit:windows-${{ github.sha }} - name: Upload containers - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: windows-containers path: windows-${{ github.sha }}.tar diff --git a/plugins/out_datadog/datadog.c b/plugins/out_datadog/datadog.c index f684842776c..8d903753c20 100644 --- a/plugins/out_datadog/datadog.c +++ b/plugins/out_datadog/datadog.c @@ -25,6 +25,7 @@ #include #include #include +#include #include @@ -354,7 +355,11 @@ static void cb_datadog_flush(struct flb_event_chunk *event_chunk, FLB_OUTPUT_RETURN(FLB_ERROR); } + /* Add the required headers to the URI */ flb_http_add_header(client, "User-Agent", 10, "Fluent-Bit", 10); + flb_http_add_header(client, FLB_DATADOG_API_HDR, sizeof(FLB_DATADOG_API_HDR) - 1, ctx->api_key, flb_sds_len(ctx->api_key)); + flb_http_add_header(client, FLB_DATADOG_ORIGIN_HDR, sizeof(FLB_DATADOG_ORIGIN_HDR) - 1, "Fluent-Bit", 10); + flb_http_add_header(client, FLB_DATADOG_ORIGIN_VERSION_HDR, sizeof(FLB_DATADOG_ORIGIN_VERSION_HDR) - 1, FLB_VERSION_STR, sizeof(FLB_VERSION_STR) - 1); flb_http_add_header(client, FLB_DATADOG_CONTENT_TYPE, sizeof(FLB_DATADOG_CONTENT_TYPE) - 1, FLB_DATADOG_MIME_JSON, sizeof(FLB_DATADOG_MIME_JSON) - 1); diff --git a/plugins/out_datadog/datadog.h b/plugins/out_datadog/datadog.h index abcf8d26952..1ca2d6f058a 100644 --- a/plugins/out_datadog/datadog.h +++ b/plugins/out_datadog/datadog.h @@ -36,6 +36,10 @@ #define FLB_DATADOG_REMAP_PROVIDER "ecs" #define FLB_DATADOG_TAG_SEPERATOR "," +#define FLB_DATADOG_API_HDR "DD-API-KEY" +#define FLB_DATADOG_ORIGIN_HDR "DD-EVP-ORIGIN" +#define FLB_DATADOG_ORIGIN_VERSION_HDR "DD-EVP-ORIGIN-VERSION" + #define FLB_DATADOG_CONTENT_TYPE "Content-Type" #define FLB_DATADOG_MIME_JSON "application/json" diff --git a/plugins/out_datadog/datadog_conf.c b/plugins/out_datadog/datadog_conf.c index 3ddad3e276b..def064c48c5 100644 --- a/plugins/out_datadog/datadog_conf.c +++ b/plugins/out_datadog/datadog_conf.c @@ -115,14 +115,13 @@ struct flb_out_datadog *flb_datadog_conf_create(struct flb_output_instance *ins, ctx->remap = tmp && (strlen(tmp) == strlen(FLB_DATADOG_REMAP_PROVIDER)) && \ (strncmp(tmp, FLB_DATADOG_REMAP_PROVIDER, strlen(tmp)) == 0); - ctx->uri = flb_sds_create("/v1/input/"); + ctx->uri = flb_sds_create("/api/v2/logs"); if (!ctx->uri) { flb_plg_error(ctx->ins, "error on uri generation"); flb_datadog_conf_destroy(ctx); return NULL; } - /* Add the api_key to the URI */ - ctx->uri = flb_sds_cat(ctx->uri, ctx->api_key, flb_sds_len(ctx->api_key)); + flb_plg_debug(ctx->ins, "uri: %s", ctx->uri); /* Get network configuration */ diff --git a/plugins/out_datadog/datadog_conf.h b/plugins/out_datadog/datadog_conf.h index a4547ba5e9a..057a5e5f2dc 100644 --- a/plugins/out_datadog/datadog_conf.h +++ b/plugins/out_datadog/datadog_conf.h @@ -21,7 +21,7 @@ #define FLB_OUT_DATADOG_CONF_H #include -#include +#include #include "datadog.h"