diff --git a/.github/Dockerfile b/.github/Dockerfile deleted file mode 100644 index 1d064b714..000000000 --- a/.github/Dockerfile +++ /dev/null @@ -1,52 +0,0 @@ -# Use a Rust base image -FROM rust:bookworm as builder -ARG BUILD_TYPE -RUN apt-get update && apt-get install -y libclang-dev cmake libssl-dev libc++-dev libc++abi-dev lld protobuf-compiler - -# Install nvm, npm and node -RUN rm /bin/sh && ln -s /bin/bash /bin/sh -ENV NVM_DIR /usr/local/nvm -ENV NODE_VERSION v16.20.1 -RUN mkdir $NVM_DIR - -RUN curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.4/install.sh | bash -RUN source $NVM_DIR/nvm.sh \ - && nvm install $NODE_VERSION \ - && nvm alias default $NODE_VERSION \ - && nvm use default - -ENV NODE_PATH $NVM_DIR/v$NODE_VERSION/lib/node_modules -ENV PATH $NVM_DIR/versions/node/$NODE_VERSION/bin:$PATH -RUN node -v - -# Create a new directory for your app -WORKDIR /app - -# Copy the Cargo.toml and Cargo.lock files to the container - -COPY . . - -# Build the dependencies (cached) - -RUN cargo clean -RUN rustup component add rustfmt -RUN CARGO_BUILD_RERUN_IF_CHANGED=1 cargo build $([ "$BUILD_TYPE" = "release" ] && echo "--release") - -COPY .github/run-main*.sh /entrypoints/ -RUN chmod 755 /entrypoints/*.sh - -# Runtime stage -FROM debian:bookworm-slim as runner -ARG BUILD_TYPE - -# Install runtime dependencies only -RUN apt-get update && apt-get install -y libssl3 ca-certificates - -# Copy only necessary files from builder -WORKDIR /app -COPY --from=builder /app/target/${BUILD_TYPE:-debug}/shinkai_node /app/ -COPY --from=builder /entrypoints/*.sh /app/ - -# Set entrypoint -EXPOSE 9550 -ENTRYPOINT ["/bin/sh", "-c", "/app/shinkai_node"] diff --git a/.github/build-workflow.yml b/.github/build-workflow.yml deleted file mode 100644 index cc65bd514..000000000 --- a/.github/build-workflow.yml +++ /dev/null @@ -1,63 +0,0 @@ -name: Build Workflow - -on: - workflow_call: - inputs: - REGISTRY: - description: Docker registry - type: string - required: true - REPOSITORY: - description: Docker repository - type: string - required: true - - secrets: - DOCKERHUB_USERNAME: - description: Dockerhub username - required: true - DOCKERHUB_TOKEN: - description: Dockerhub username - required: true - -env: - IMAGE_TAG: ${{ github.sha }} - -jobs: - build-docker-image: - name: build docker image - runs-on: self-hosted - - steps: - - name: Checkout - uses: actions/checkout@v3 - - - name: Set up QEMU - uses: docker/setup-qemu-action@v2 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - - - name: Login to Docker Hub - uses: docker/login-action@v2 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Set Docker image tags - id: meta - uses: docker/metadata-action@v4 - with: - images: | - ${{ inputs.REGISTRY }}/${{ inputs.REPOSITORY }} - tags: | - type=ref,event=branch - type=pep440,pattern={{version}} - type=sha - - - name: Build and push ${{ inputs.REGISTRY }}/${{ inputs.REPOSITORY }} - uses: docker/build-push-action@v3 - with: - context: . - push: true - tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} diff --git a/.github/build.yml b/.github/build.yml deleted file mode 100644 index d01ff2f12..000000000 --- a/.github/build.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: Build shinkai-node docker image - -on: - push: - branches: - - 'main' - - 'devops' - tags: - - '[0-9]+.[0-9]+.[0-9]+' - - 'v[0-9]+.[0-9]+.[0-9]+' - -jobs: - build-and-deploy: - uses: dcSpark/shinkai-node/.github/workflows/build-workflow.yml@devops - with: - REGISTRY: dcspark - REPOSITORY: shinkai-node - secrets: - DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} - DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} - \ No newline at end of file diff --git a/.github/run-main-cargo-tests.sh b/.github/run-main-cargo-tests.sh deleted file mode 100644 index a1cd2a9e4..000000000 --- a/.github/run-main-cargo-tests.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash - -export IS_TESTING=1 -export WELCOME_MESSAGE=false -cd /app && cargo test -- --test-threads=1 diff --git a/.github/run-main-primitives-cargo-tests.sh b/.github/run-main-primitives-cargo-tests.sh deleted file mode 100644 index 595ebeac5..000000000 --- a/.github/run-main-primitives-cargo-tests.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash - -cd /app/shinkai-libs/shinkai-message-primitives && cargo test -- --test-threads=1 - diff --git a/.github/workflows/main.yaml b/.github/workflows/main.yaml deleted file mode 100644 index bbb0f961b..000000000 --- a/.github/workflows/main.yaml +++ /dev/null @@ -1,49 +0,0 @@ -on: - push: - branches: [main] - paths: - - ".github/workflows/main.yml" - - "Cargo.*" - - "shinkai-libs/**" - - "shinkai-bin/**" - pull_request: - branches: [main] - types: [opened, reopened, synchronize, ready_for_review] - paths: - - ".github/workflows/main.yml" - - "Cargo.*" - - "shinkai-libs/**" - - "shinkai-bin/**" - -name: Shinkai Test Workflow - -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} - cancel-in-progress: true - -jobs: - test: - runs-on: self-hosted - if: ${{ github.event_name == 'push' || !github.event.pull_request.draft }} - steps: - - name: Checkout - uses: actions/checkout@v3 - - - name: Add SHORT_SHA env property with commit short sha - run: echo "SHORT_SHA=`echo ${GITHUB_SHA} | cut -c1-8`" >> $GITHUB_ENV - - - name: Build testing image - run: docker build -t testing_image:${SHORT_SHA} -f .github/Dockerfile . - - - name: Run tests primitives cargo tests - run: docker run --rm --entrypoint /entrypoints/run-main-primitives-cargo-tests.sh testing_image:${SHORT_SHA} - - - name: Run tests main cargo tests - run: docker run --rm -e AWS_ACCESS_KEY_ID='${{ secrets.AWS_ACCESS_KEY_ID }}' -e AWS_SECRET_ACCESS_KEY='${{ secrets.AWS_SECRET_ACCESS_KEY }}' -e AWS_URL='${{ secrets.AWS_URL }}' --entrypoint /entrypoints/run-main-cargo-tests.sh testing_image:${SHORT_SHA} - - - name: Remove image - if: always() - run: docker rmi testing_image:${SHORT_SHA} || true - env: - FROM_WALLET_MNEMONICS: ${{ secrets.FROM_WALLET_MNEMONICS }} - FROM_WALLET_PRIVATE_KEY: ${{ secrets.FROM_WALLET_PRIVATE_KEY }} diff --git a/.github/workflows/only_linux_binary.yml b/.github/workflows/only_linux_binary.yml deleted file mode 100644 index f81cdf948..000000000 --- a/.github/workflows/only_linux_binary.yml +++ /dev/null @@ -1,52 +0,0 @@ -name: Build shinkai-node Linux binary - -on: - workflow_dispatch: - -jobs: - build: - name: Build binary - runs-on: ubuntu-22.04 - steps: - - name: Checkout sources - uses: actions/checkout@v4 - - - name: Cache cargo assets - id: cache - uses: actions/cache@v4 - with: - path: | - ~/.cargo/bin/ - ~/.cargo/registry/index/ - ~/.cargo/registry/cache/ - ~/.cargo/git/db/ - target/ - key: linux-build-cargo-${{ hashFiles('**/Cargo.lock') }} - - - name: Build - run: cargo build --release - - - name: Upload binaries to release - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: target/release/shinkai_node - asset_name: shinkai-node-x86_64-unknown-linux-gnu - tag: ${{ github.ref }} - overwrite: true - - - name: Prepare binary files - run: | - mkdir files-to-r2 - cp target/release/shinkai_node files-to-r2/shinkai-node-${{ github.ref_name }} - cp target/release/shinkai_node files-to-r2/shinkai-node-latest - - - name: Upload binaries to R2 bucket - uses: shallwefootball/s3-upload-action@master - with: - endpoint: https://54bf1bf573b3e6471e574cc4d318db64.r2.cloudflarestorage.com - aws_key_id: ${{ secrets.R2_ACCESS_KEY_ID }} - aws_secret_access_key: ${{ secrets.R2_SECRET_ACCESS_KEY }} - aws_bucket: shinkai-download - source_dir: files-to-r2 - destination_dir: ./shinkai-node/binaries/x86_64-unknown-linux-gnu/ diff --git a/.github/workflows/pr-check.yml b/.github/workflows/pr-check.yml new file mode 100644 index 000000000..df3f39e68 --- /dev/null +++ b/.github/workflows/pr-check.yml @@ -0,0 +1,47 @@ +name: PR Check + +on: + pull_request: + branches: [main, feature/tools-def-exec] + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + build: + name: Check PR + runs-on: ubuntu-22.04 + steps: + - name: Checkout sources + uses: actions/checkout@v4 + + - name: Cache cargo assets + id: cache + uses: actions/cache@v4 + with: + path: | + ~/.cargo/bin/ + ~/.cargo/registry/index/ + ~/.cargo/registry/cache/ + ~/.cargo/git/db/ + target/ + key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} + + - name: (Linux) Install build deps + run: | + sudo apt-get install -y protobuf-compiler + + - name: build workspace + run: cargo check --workspace + + - name: run cargo test + run: cargo test --workspace -- --test-threads=1 + env: + IS_TESTING: 1 + WELCOME_MESSAGE: false + FROM_WALLET_MNEMONICS: ${{ secrets.FROM_WALLET_MNEMONICS }} + FROM_WALLET_PRIVATE_KEY: ${{ secrets.FROM_WALLET_PRIVATE_KEY }} + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + AWS_URL: ${{ secrets.AWS_URL }} diff --git a/README.md b/README.md index dc517a80c..9156b11ea 100644 --- a/README.md +++ b/README.md @@ -81,29 +81,16 @@ For running a specific test (useful for debugging) you can use: cargo test tcp_node_test -- --nocapture --test-threads=1 ``` -### Running Dockerized Tests +### Running Tests #### Main tests ``` -# Build testing image -docker build -t testing_image -f .github/Dockerfile . +# Run tests +cargo test --workspace -# Run tests main cargo tests -docker run --entrypoint /entrypoints/run-main-cargo-tests.sh testing_image -``` - -#### WASM tests - -``` -# Build testing image - shinkai-message-wasm -docker build -t testing_image_wasm -f .github/Dockerfile.wasm . - -# Run tests shinkai-message-wasm cargo tests -docker run --entrypoint /entrypoints/run-wasm-pack-tests.sh testing_image_wasm - -# Run tests shinkai-message-wasm wasm-pack tests -docker run --entrypoint /entrypoints/run-wasm-cargo-tests.sh testing_image_wasm +# For specific projects +cargo test -p shinkai_message_primitives ``` ### Shinkai App tests diff --git a/docker-build/.gitignore b/docker-build/.gitignore deleted file mode 100644 index acdb54340..000000000 --- a/docker-build/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -shinkai_deploy -shinkai_deploy_partner \ No newline at end of file diff --git a/docker-build/Dockerfile-RELEASE b/docker-build/Dockerfile-RELEASE deleted file mode 100644 index 209bea115..000000000 --- a/docker-build/Dockerfile-RELEASE +++ /dev/null @@ -1,16 +0,0 @@ -FROM rust:1.75.0-slim-bookworm AS base-builder -RUN apt-get update && apt-get install pkg-config clang cmake libssl-dev --no-install-recommends -y -# -FROM base-builder AS shinkai_node-builder -WORKDIR shinkai -COPY . . -RUN cargo build --release --bin shinkai_node -# -FROM debian:bookworm-slim AS base-release -RUN apt-get update && apt-get -y upgrade && apt-get -y --no-install-recommends install ca-certificates libssl3 && apt-get -y clean -# -FROM base-release AS shinkai_node -ARG APP=/app -WORKDIR ${APP} -COPY --from=shinkai_node-builder /shinkai/target/release/shinkai_node ${APP} -CMD ["./shinkai_node"] \ No newline at end of file diff --git a/docker-build/Dockerfile-RELEASE.dockerignore b/docker-build/Dockerfile-RELEASE.dockerignore deleted file mode 100644 index d60d54f57..000000000 --- a/docker-build/Dockerfile-RELEASE.dockerignore +++ /dev/null @@ -1,9 +0,0 @@ -* -!Cargo.* -!src -!shinkai-libs/shinkai-message-primitives/src -!shinkai-libs/shinkai-message-primitives/Cargo.* -!shinkai-libs/shinkai-vector-resources/src -!shinkai-libs/shinkai-vector-resources/Cargo.* -!shinkai-libs/llama-tokenizer/src -!shinkai-libs/llama-tokenizer/Cargo.* diff --git a/docker-build/README.md b/docker-build/README.md deleted file mode 100644 index 733b1f15b..000000000 --- a/docker-build/README.md +++ /dev/null @@ -1,83 +0,0 @@ -## Local build - -Inside the folder `docker-build` run: - -```sh -DOCKER_BUILDKIT=1 docker build -f Dockerfile-RELEASE -t dcspark/shinkai-node:latest ../ -``` - -Inside the folder `docker-build`, to start the services, run: - -```sh -INITIAL_AGENT_API_KEYS=sk-abc,sk-abc docker compose up -d -``` - -The following configuration items can be set from environment: -- __INITIAL_AGENT_NAMES__=${INITIAL_AGENT_NAMES:-openai_gpt,openai_gpt_vision} -- __INITIAL_AGENT_MODELS__=${INITIAL_AGENT_MODELS:-openai:gpt-4-1106-preview,openai:gpt-4-vision-preview} -- __INITIAL_AGENT_URLS__=${INITIAL_AGENT_URLS:-https://api.openai.com,https://api.openai.com} -- __INITIAL_AGENT_API_KEYS__=${INITIAL_AGENT_API_KEYS} - -Point Visor to `http://127.0.0.1:9550` - -## Prepare for partner - -Inside the folder `docker-build` run: - -```sh -sh partner_prepare.sh -``` - -output example: - -```sh -$ sh partner_prepare.sh - -[INFO] Docker building dcspark/shinkai-node:latest using Dockerfile-RELEASE with source at ../ -sha256:b5fe5c4c8fc6229c15ea0cbde4881c090a0dcd72a1f6f8f42d29d7f9bfc8b4be -[INFO] Preparing docker compose environment at shinkai_deploy -[INFO] Docker save dcspark/shinkai-node:latest to dcspark_shinkai-node.tar -[INFO] Preparing partner data at shinkai_deploy_partner/shinkai_deploy.tar.gz -[INFO] Cleaning shinkai_deploy -[INFO] Send to partner the file shinkai_deploy_partner/shinkai_deploy.tar.gz -``` - -Send to partner the final output generated at `shinkai_deploy_partner/shinkai_deploy.tar.gz` - -## Partner info - -Partner extracts the file `tar xzvf shinkai_deploy.tar.gz` - -```sh -$ tar xzvf shinkai_deploy.tar.gz - -shinkai_deploy/ -shinkai_deploy/.env -shinkai_deploy/docker-compose.yml -shinkai_deploy/prepare.sh -shinkai_deploy/dcspark_shinkai-node.tar -``` - -and ends up with a folder `shinkai_deploy` containing: - -```sh -shinkai_deploy -├── dcspark_shinkai-node.tar -├── docker-compose.yml -├── .env -└── prepare.sh -``` - -runs `sh prepare.sh` that outputs additional information: - -```sh -$ sh prepare.sh - -[INFO] Docker loading dcspark_shinkai-node.tar -Loaded image: dcspark/shinkai-node:latest -[INFO] Edit ".env" if you want to start the node with preconfigured ai agents. You have the possibility to add ai agents also from Shinkai Visor. -[INFO] Once done with ".env" changes, to start on-prem infrastructure run: docker compose up -d -[INFO] Once everything is up and running, install/start Shinkai Visor and use the default provided settings on the ui. -``` - -final step is to run `docker compose up -d`. \ No newline at end of file diff --git a/docker-build/docker-compose.yml b/docker-build/docker-compose.yml deleted file mode 100644 index afbdca12e..000000000 --- a/docker-build/docker-compose.yml +++ /dev/null @@ -1,67 +0,0 @@ -version: '3.9' - -services: - text-embeddings-router: - image: ghcr.io/huggingface/text-embeddings-inference:cpu-latest - container_name: text-embeddings-router - restart: "always" - command: --hostname 0.0.0.0 --port 9081 --model-id sentence-transformers/all-MiniLM-L6-v2 --revision refs/pr/21 --dtype float32 - volumes: - - embeddings-models:/data - networks: - - shinkai - ports: - - 9081:9081/tcp - logging: - driver: "json-file" - options: - max-size: "1m" - max-file: "10" - - shinkai-node: - image: dcspark/shinkai-node:latest - container_name: shinkai-node - restart: "always" - environment: - - GLOBAL_IDENTITY_NAME=@@localhost.shinkai - - FIRST_DEVICE_NEEDS_REGISTRATION_CODE=false - - STARTING_NUM_QR_PROFILES=0 - - STARTING_NUM_QR_DEVICES=0 - - INITIAL_AGENT_NAMES=${INITIAL_AGENT_NAMES:-openai_gpt,openai_gpt_vision} - - INITIAL_AGENT_MODELS=${INITIAL_AGENT_MODELS:-openai:gpt-4-1106-preview,openai:gpt-4-vision-preview} - - INITIAL_AGENT_URLS=${INITIAL_AGENT_URLS:-https://api.openai.com,https://api.openai.com} - - INITIAL_AGENT_API_KEYS=${INITIAL_AGENT_API_KEYS} - - EMBEDDINGS_SERVER_URL=http://text-embeddings-router:9081/ - - NODE_STORAGE_PATH=storage - - NODE_IP=0.0.0.0 - - NODE_PORT=9552 - - PING_INTERVAL_SECS=10 - - NODE_API_IP=0.0.0.0 - - NODE_API_PORT=9550 - - NODE_WS_PORT=9551 - - LOG_SIMPLE=true - - LOG_ALL=true - - RUST_LOG=info - volumes: - - shinode-storage:/app/storage - networks: - - shinkai - ports: - - 9550:9550/tcp - - 9551:9551/tcp - - 9552:9552/tcp - logging: - driver: "json-file" - options: - max-size: "1m" - max-file: "10" - -volumes: - shinode-storage: - name: shinode-storage - embeddings-models: - name: embeddings-models - -networks: - shinkai: - name: shinkai diff --git a/docker-build/partner_prepare.sh b/docker-build/partner_prepare.sh deleted file mode 100644 index c4eb5fea4..000000000 --- a/docker-build/partner_prepare.sh +++ /dev/null @@ -1,225 +0,0 @@ -#!/bin/sh -set -e -set -o noglob - - -SED_COMMAND=sed -COPY_COMMAND=cp - -SHINKAI_NODE_IMAGE=${SHINKAI_NODE_IMAGE:-dcspark/shinkai-node} -SHINKAI_NODE_VERSION=${SHINKAI_NODE_VERSION:-latest} - -SHINKAI_COMPOSE_FILE=docker-compose.yml -SHINKAI_NODE_DOCKERFILE=Dockerfile-RELEASE - -SHINKAI_NODE_ARCHIVE=dcspark_shinkai-node.tar -SHINKAI_SOURCE_PATH=../ - -DOCKER_BUILD_CMD="docker build --quiet" -DOCKER_COMPOSE_CMD="docker compose" # docker-compose -DOCKER_LOAD_CMD="docker load --input" -DOCKER_SAVE_CMD="docker save --output" - -SHINKAI_TMP_LOCAL_FOLDER=shinkai_deploy -SHINKAI_TMP_PARTNER_FOLDER=shinkai_deploy_partner -DOCKER_COMPOSE_ENV_FILE=.env -DOCKER_COMPOSE_ENV_DATA=$(cat << EOF -# -# single agent example -# -#INITIAL_AGENT_NAMES=openai_gpt -#INITIAL_AGENT_URLS=https://api.openai.com -#INITIAL_AGENT_MODELS=openai:gpt-4-1106-preview -#INITIAL_AGENT_API_KEYS=sk-abc -# -# multi agent example -# -#INITIAL_AGENT_NAMES=openai_gpt,openai_gpt_vision -#INITIAL_AGENT_URLS=https://api.openai.com,https://api.openai.com -#INITIAL_AGENT_MODELS=openai:gpt-4-1106-preview,openai:gpt-4-vision-preview -#INITIAL_AGENT_API_KEYS=sk-abc,sk-abc -# -# default none -# -INITIAL_AGENT_NAMES= -INITIAL_AGENT_URLS= -INITIAL_AGENT_MODELS= -INITIAL_AGENT_API_KEYS= -EOF -) - - - -PARTNER_PREPARE_SCRIPT=$(cat << EOF -#!/bin/sh -set -e -set -o noglob - -SHINKAI_NODE_ARCHIVE=dcspark_shinkai-node.tar -DOCKER_LOAD_CMD="docker load --input" -DOCKER_COMPOSE_CMD="docker compose" # docker-compose -DOCKER_COMPOSE_ENV_FILE=.env - - -# --- helper functions for logs --- -info() { - echo '[INFO] ' "\$@" -} -warn() { - echo '[WARN] ' "\$@" >&2 -} -fatal() { - echo '[ERRO] ' "\$@" >&2 - exit 1 -} - -# --- load image --- -load_docker_image() { - msg="Docker loading \${SHINKAI_NODE_ARCHIVE}" - if [ -f \${SHINKAI_NODE_ARCHIVE} ]; then - info \${msg} - \${DOCKER_LOAD_CMD} \${SHINKAI_NODE_ARCHIVE} - else - fatal "\${msg} - failed (missing file - \${SHINKAI_NODE_ARCHIVE})" - fi -} - -# --- info about initial agents configuration --- -post_prepare_env_info() { - msg="Edit \"\${DOCKER_COMPOSE_ENV_FILE}\" if you want to start the node with preconfigured ai agents. You have the possibility to add ai agents also from Shinkai Visor." - info \${msg} -} - -# --- info docker compose --- -post_prepare_compose_info() { - msg="Once done with \"\${DOCKER_COMPOSE_ENV_FILE}\" changes, to start on-prem infrastructure run: \${DOCKER_COMPOSE_CMD} up -d" - info \${msg} -} - - -# --- info visor --- -post_prepare_visor_info() { - msg="Once everything is up and running, install/start Shinkai Visor and use the default provided settings on the ui." - info \${msg} -} - -load_docker_image -post_prepare_env_info -post_prepare_compose_info -post_prepare_visor_info - -EOF -) - -# --- helper functions for logs --- -info() { - echo '[INFO] ' "$@" -} -warn() { - echo '[WARN] ' "$@" >&2 -} -fatal() { - echo '[ERRO] ' "$@" >&2 - exit 1 -} - -# write $1 (content) to $2 (file) -write_to_file() { - echo "$1" >| "$2" || fatal "failed to write data to $2" -} - -# --- build image --- -build_docker_image() { - msg="Docker building ${SHINKAI_NODE_IMAGE}:${SHINKAI_NODE_VERSION} using ${SHINKAI_NODE_DOCKERFILE} with source at ${SHINKAI_SOURCE_PATH}" - if [ -f ${SHINKAI_NODE_DOCKERFILE} ]; then - info ${msg} - export DOCKER_BUILDKIT=1 - ${DOCKER_BUILD_CMD} -f ${SHINKAI_NODE_DOCKERFILE} -t ${SHINKAI_NODE_IMAGE}:${SHINKAI_NODE_VERSION} ${SHINKAI_SOURCE_PATH} - else - fatal "${msg} - failed (missing file - ${SHINKAI_NODE_DOCKERFILE})" - fi -} - -# --- save image --- -save_docker_image() { - if [ ! -d "${SHINKAI_TMP_LOCAL_FOLDER}" ]; then - mkdir ${SHINKAI_TMP_LOCAL_FOLDER} || fatal "failed to create local folder ${SHINKAI_TMP_LOCAL_FOLDER}" - fi - msg="Docker save ${SHINKAI_NODE_IMAGE}:${SHINKAI_NODE_VERSION} to ${SHINKAI_NODE_ARCHIVE}" - if [ ! -f ${SHINKAI_NODE_ARCHIVE} ]; then - info ${msg} - ${DOCKER_SAVE_CMD} ${SHINKAI_TMP_LOCAL_FOLDER}/${SHINKAI_NODE_ARCHIVE} ${SHINKAI_NODE_IMAGE}:${SHINKAI_NODE_VERSION} - else - fatal "${msg} - failed (file already exists - ${SHINKAI_TMP_LOCAL_FOLDER}/${SHINKAI_NODE_ARCHIVE})" - fi -} - -# --- prepare docker-compose infra for partner --- -prepare_docker_compose() { - msg="Preparing docker compose environment at ${SHINKAI_TMP_LOCAL_FOLDER}" - if [ ! -d "${SHINKAI_TMP_LOCAL_FOLDER}" ]; then - mkdir ${SHINKAI_TMP_LOCAL_FOLDER} || fatal "failed to create local folder ${SHINKAI_TMP_LOCAL_FOLDER}" - fi - info ${msg} - # copy original compose file - ${COPY_COMMAND} ${SHINKAI_COMPOSE_FILE} ${SHINKAI_TMP_LOCAL_FOLDER}/${SHINKAI_COMPOSE_FILE} || fatal "failed to copy ${SHINKAI_TMP_LOCAL_FOLDER}/${SHINKAI_COMPOSE_FILE}" - # replace any INITIAL_AGENT_* initial value with envs - ${SED_COMMAND} -i "s/INITIAL_AGENT_NAMES=.*/INITIAL_AGENT_NAMES=\${INITIAL_AGENT_NAMES}/g" ${SHINKAI_TMP_LOCAL_FOLDER}/${SHINKAI_COMPOSE_FILE} || fatal 'sed failed - INITIAL_AGENT_NAMES' - ${SED_COMMAND} -i "s/INITIAL_AGENT_URLS=.*/INITIAL_AGENT_URLS=\${INITIAL_AGENT_URLS}/g" ${SHINKAI_TMP_LOCAL_FOLDER}/${SHINKAI_COMPOSE_FILE} || fatal 'sed failed - INITIAL_AGENT_URLS' - ${SED_COMMAND} -i "s/INITIAL_AGENT_MODELS=.*/INITIAL_AGENT_MODELS=\${INITIAL_AGENT_MODELS}/g" ${SHINKAI_TMP_LOCAL_FOLDER}/${SHINKAI_COMPOSE_FILE} || fatal 'sed failed - INITIAL_AGENT_MODELS' - ${SED_COMMAND} -i "s/INITIAL_AGENT_API_KEYS=.*/INITIAL_AGENT_API_KEYS=\${INITIAL_AGENT_API_KEYS}/g" ${SHINKAI_TMP_LOCAL_FOLDER}/${SHINKAI_COMPOSE_FILE} || fatal 'sed failed - INITIAL_AGENT_API_KEYS' - # write .env sample file - write_to_file "${DOCKER_COMPOSE_ENV_DATA}" ${SHINKAI_TMP_LOCAL_FOLDER}/${DOCKER_COMPOSE_ENV_FILE} - # write partner prepare.sh - write_to_file "${PARTNER_PREPARE_SCRIPT}" ${SHINKAI_TMP_LOCAL_FOLDER}/prepare.sh - -} - -# --- load image --- -load_docker_image() { - msg="Docker loading ${SHINKAI_NODE_ARCHIVE}" - if [ -f ${SHINKAI_NODE_ARCHIVE} ]; then - info ${msg} - ${DOCKER_LOAD_CMD} ${SHINKAI_NODE_ARCHIVE} - else - fatal "${msg} - failed (missing file - ${SHINKAI_NODE_ARCHIVE})" - fi -} - -# --- prepare partner archive --- -prepare_partner_archive() { - msg="Preparing partner data at ${SHINKAI_TMP_PARTNER_FOLDER}/${SHINKAI_TMP_LOCAL_FOLDER}.tar.gz" - if [ ! -d "${SHINKAI_TMP_PARTNER_FOLDER}" ]; then - mkdir ${SHINKAI_TMP_PARTNER_FOLDER} || fatal "failed to create local folder ${SHINKAI_TMP_PARTNER_FOLDER}" - fi - info ${msg} - tar -zcf ${SHINKAI_TMP_PARTNER_FOLDER}/${SHINKAI_TMP_LOCAL_FOLDER}.tar.gz ${SHINKAI_TMP_LOCAL_FOLDER} -} - - -# --- clean temp partner folder --- -clean_temporary_folder() { - msg="Cleaning ${SHINKAI_TMP_LOCAL_FOLDER}" - if [ -d "${SHINKAI_TMP_LOCAL_FOLDER}" ]; then - info ${msg} - rm -rf ${SHINKAI_TMP_LOCAL_FOLDER} || fatal "failed delete local folder ${SHINKAI_TMP_LOCAL_FOLDER}" - fi -} - -# --- info what to send to partner --- -partner_file_info() { - msg="Send to partner the file ${SHINKAI_TMP_PARTNER_FOLDER}/${SHINKAI_TMP_LOCAL_FOLDER}.tar.gz" - if [ -f "${SHINKAI_TMP_PARTNER_FOLDER}/${SHINKAI_TMP_LOCAL_FOLDER}.tar.gz" ]; then - info ${msg} - else - fatal "${msg} - error (missing file - ${SHINKAI_TMP_PARTNER_FOLDER}/${SHINKAI_TMP_LOCAL_FOLDER}.tar.gz)" - fi -} - - -build_docker_image -prepare_docker_compose -save_docker_image -prepare_partner_archive -clean_temporary_folder -partner_file_info \ No newline at end of file diff --git a/shinkai-bin/shinkai-node/tests/it/a3_micropayment_flow_tests.rs b/shinkai-bin/shinkai-node/tests/it/a3_micropayment_flow_tests.rs index 95bd8fca7..ed5b65395 100644 --- a/shinkai-bin/shinkai-node/tests/it/a3_micropayment_flow_tests.rs +++ b/shinkai-bin/shinkai-node/tests/it/a3_micropayment_flow_tests.rs @@ -337,6 +337,7 @@ fn micropayment_flow_test() { config: Some(vec![]), usage_type: None, tool_offering: Some(shinkai_tool_offering.clone()), + output_arg: ToolOutputArg { json: String::from("") }, }; { @@ -498,6 +499,7 @@ fn micropayment_flow_test() { output_arg: ToolOutputArg::empty(), embedding: None, restrictions: None, + output_arg: ToolOutputArg { json: String::from("") }, }; let shinkai_tool = ShinkaiTool::Network(network_tool, true);