From bb38ef40a7f74238c3e35746000610ebe228f666 Mon Sep 17 00:00:00 2001
From: Tushar <30565750+tushar5526@users.noreply.github.com>
Date: Wed, 27 Dec 2023 18:46:09 +0530
Subject: [PATCH] feat: basic deployment flow and logic setup
---
.github/workflows/test.yml | 19 +++
.gitignore | 21 +++
LICENSE | 8 ++
README.md | 20 +++
action/.dockerignore | 27 ++++
action/Dockerfile | 11 ++
action/action.yml | 23 ++++
action/io.py | 23 ++++
action/main.py | 23 ++++
action/requirements.txt | 1 +
server/app.py | 52 ++++++++
server/deployer.py | 86 ++++++++++++
server/docker-compose.yml | 10 ++
server/requirements.txt | 4 +
server/utils.py | 266 +++++++++++++++++++++++++++++++++++++
15 files changed, 594 insertions(+)
create mode 100644 .github/workflows/test.yml
create mode 100644 .gitignore
create mode 100644 LICENSE
create mode 100644 README.md
create mode 100644 action/.dockerignore
create mode 100644 action/Dockerfile
create mode 100644 action/action.yml
create mode 100644 action/io.py
create mode 100644 action/main.py
create mode 100644 action/requirements.txt
create mode 100644 server/app.py
create mode 100644 server/deployer.py
create mode 100644 server/docker-compose.yml
create mode 100644 server/requirements.txt
create mode 100644 server/utils.py
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
new file mode 100644
index 0000000..d325d2c
--- /dev/null
+++ b/.github/workflows/test.yml
@@ -0,0 +1,19 @@
+name: Testing
+
+on:
+ push:
+ branches:
+ - main
+
+jobs:
+ Test:
+ runs-on: ubuntu-latest
+ name: Testing the action
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v2
+
+ - name: Run action
+ uses: ./action
+ with:
+ name: 'John'
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..92c5d7f
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,21 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
+__pypackages__/
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+.idea**
+
+server/deployments/*
+server/nginx-confs/*
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..26441fd
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,8 @@
+The MIT License (MIT)
+Copyright (c) 2023, Tushar Gupta
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..e449ce6
--- /dev/null
+++ b/README.md
@@ -0,0 +1,20 @@
+## Sarthi
+
+Easy to setup Docker based empheral previews!
+
+### Usage
+```yml
+example usage..
+```
+
+### License
+This action is licensed under some specific terms. Check [here](LICENSE) for more information.
+
+
+# TODOs
+
+1. Grafana + Loki + Prometheus Setup in docker compose
+2. Dockerize the project
+3. Vault Setup
+4. GitHub Actions Setup
+5. Tests
\ No newline at end of file
diff --git a/action/.dockerignore b/action/.dockerignore
new file mode 100644
index 0000000..a606a14
--- /dev/null
+++ b/action/.dockerignore
@@ -0,0 +1,27 @@
+# Ignore all files and folders that start with a dot.
+**/*.
+
+# Ignore all virtual envs'
+venv/
+
+# Ignore all Python bytecode files.
+__pycache__/
+
+# Ignore all temporary files.
+*.tmp
+*.swp
+
+# Ignore all build artifacts.
+build/
+dist/
+
+# Ignore all pyaction-related files.
+README.md
+CONTRIBUTING.md
+CHANGELOG.md
+LICENSE
+Dockerfile
+.env
+.github/
+
+.git/
diff --git a/action/Dockerfile b/action/Dockerfile
new file mode 100644
index 0000000..92e8c07
--- /dev/null
+++ b/action/Dockerfile
@@ -0,0 +1,11 @@
+# setting the base-image to alpine
+FROM python:3-slim
+
+# importing the action
+COPY . /action
+
+# installing the requirements
+RUN pip install -U pip -r /action/requirements.txt
+
+# running the main.py file
+CMD [ "python", "/action/main.py" ]
diff --git a/action/action.yml b/action/action.yml
new file mode 100644
index 0000000..e1507c6
--- /dev/null
+++ b/action/action.yml
@@ -0,0 +1,23 @@
+name: Sarthi
+description: Easy to setup Docker based empheral previews!
+author: Tushar Gupta
+
+branding:
+ icon: check
+ color: blue
+
+runs:
+ using: docker
+ image: Dockerfile
+
+# == inputs and outputs ==
+
+inputs:
+ name:
+ required: false
+ description: the person/thing you want to greet
+ default: World
+
+outputs:
+ phrase:
+ description: output variable
diff --git a/action/io.py b/action/io.py
new file mode 100644
index 0000000..a514c11
--- /dev/null
+++ b/action/io.py
@@ -0,0 +1,23 @@
+import os
+from typing import Dict
+
+BUFFER_PATH = os.environ["GITHUB_OUTPUT"]
+
+
+def write_to_output(context: Dict[str, str]) -> None:
+ """writes the keys (as variables) and values (as values) to the output buffer
+
+ Args:
+ context: variables and values
+
+ Examples:
+ In your project, use this function like:
+
+ >>> write_to_output({"name": "John", ...})
+
+ ``name`` will be the variable name and ``John`` is the value.
+ """
+
+ with open(BUFFER_PATH, "a") as _buffer:
+ for var, val in context.items():
+ _buffer.write(f"{var}={val}\r\n")
diff --git a/action/main.py b/action/main.py
new file mode 100644
index 0000000..6324991
--- /dev/null
+++ b/action/main.py
@@ -0,0 +1,23 @@
+import os
+import sys
+import typing
+
+
+def main(args: typing.List[str]) -> None:
+ """main function
+
+ Args:
+ args: STDIN arguments
+ """
+ # GITHUB_REPOSITORY : octocat/Hello-World
+ project_name = os.environ.get("GITHUB_REPOSITORY").split("/")[1]
+ branch_name = os.environ.get("GITHUB_HEAD_REF")
+ username = os.environ.get("INPUT_REMOTE_USER")
+ password = os.environ.get("INPUT_REMOTE_PASSWORD")
+ host = os.environ.get("INPUT_REMOTE_HOST")
+ port = os.environ.get("INPUT_PORT") or 22
+ deployment_domain = os.environ.get("INPUT_DEPLOYMENT_DOMAIN")
+
+
+if __name__ == "__main__":
+ main(sys.argv)
diff --git a/action/requirements.txt b/action/requirements.txt
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/action/requirements.txt
@@ -0,0 +1 @@
+
diff --git a/server/app.py b/server/app.py
new file mode 100644
index 0000000..3855e8a
--- /dev/null
+++ b/server/app.py
@@ -0,0 +1,52 @@
+import logging
+import os
+
+import jwt
+from dotenv import load_dotenv
+from flask import Flask, jsonify, request
+from flask_httpauth import HTTPTokenAuth
+
+from deployer import Deployer, DeploymentConfig
+
+load_dotenv()
+
+if os.environ.get("ENV").lower() == "local":
+ logging.basicConfig(level=logging.NOTSET)
+
+
+app = Flask(__name__)
+auth = HTTPTokenAuth("Bearer")
+app.config["SECRET_KEY"] = os.environ.get("SECRET_KEY")
+
+
+@auth.verify_token
+def verify_token(token):
+ try:
+ data = jwt.decode(token, app.config["SECRET_KEY"], algorithms=["HS256"])
+ except: # noqa: E722
+ return False
+ return "root"
+
+
+# Your deployment endpoint
+@app.route("/deploy", methods=["POST"])
+# @auth.login_required
+def deploy():
+ data = request.get_json()
+
+ # Create DeploymentConfig object
+ project_url_split = data.get("project_git_url").split('/')
+ config = DeploymentConfig(
+ project_name=f'{project_url_split[-2]}_{project_url_split[-1]}',
+ branch_name=data.get("branch_name"),
+ project_git_url=data.get("project_git_url"),
+ compose_file_location=data.get("compose_file_location") or "docker-compose.yml",
+ )
+
+ deployer = Deployer(config)
+ urls = deployer.deploy()
+ return jsonify(urls)
+
+
+if __name__ == "__main__":
+ app.run(debug=True, use_reloader=False)
diff --git a/server/deployer.py b/server/deployer.py
new file mode 100644
index 0000000..cd89c66
--- /dev/null
+++ b/server/deployer.py
@@ -0,0 +1,86 @@
+import logging
+import os
+import shutil
+import subprocess
+import typing
+
+from utils import ComposeHelper, DeploymentConfig, NginxHelper
+
+logger = logging.getLogger(__name__)
+
+
+class Deployer:
+ def __init__(self, config: DeploymentConfig):
+ self._config = config
+ self._BASE_DIR: typing.Final[str] = os.environ.get("BASE_DIR")
+ self._project_path: typing.Final[str] = os.path.join(
+ self._BASE_DIR, config.get_project_hash()
+ )
+ self._setup_project()
+
+ self._compose_helper = ComposeHelper(
+ os.path.join(self._project_path, config.compose_file_location)
+ )
+ self._nginx_helper = NginxHelper(config)
+ self._deployment_namespace = config.get_project_hash()
+ self._outer_proxy_conf_location = (
+ os.environ.get("NGINX_PROXY_CONF_LOCATION") or "/etc/nginx/conf.d"
+ )
+
+ def _clone_project(self):
+ process = subprocess.Popen(
+ [
+ "git",
+ "clone",
+ "-b",
+ self._config.branch_name,
+ self._config.project_git_url,
+ self._project_path,
+ ],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ )
+
+ stdout, stderr = process.communicate()
+
+ if process.returncode == 0:
+ logger.info("Git clone successful.")
+ else:
+ logger.error(f"Git clone failed. Return code: {process.returncode}")
+ logger.error("Standard Output:")
+ logger.error(stdout.decode())
+ logger.error("Standard Error:")
+ logger.error(stderr.decode())
+ raise Exception("Git clone failed")
+
+ def _setup_project(self):
+ if os.path.exists(self._project_path):
+ # TODO: Run docker compose down -v
+ logger.debug(f"Removing older project path {self._project_path}")
+ shutil.rmtree(self._project_path)
+ self._clone_project()
+
+ def _configure_outer_proxy(self):
+ if not self._project_nginx_port:
+ raise Exception("Project Proxy not deployed, project_nginx_port is None")
+ self._nginx_helper.generate_outer_proxy_conf_file(
+ self._project_nginx_port, self._outer_proxy_conf_location
+ )
+ self._nginx_helper.reload_nginx()
+
+ def _deploy_project(self):
+ services = self._compose_helper.get_service_ports_config()
+ conf_file_path, urls = self._nginx_helper.generate_project_proxy_conf_file(
+ services, self._project_path
+ )
+ # TODO: Keep retrying finding a new port for race conditions
+ self._project_nginx_port = self._nginx_helper.find_free_port()
+ self._compose_helper.start_services(
+ self._project_nginx_port, conf_file_path, self._deployment_namespace
+ )
+ return urls
+
+ def deploy(self):
+ urls = self._deploy_project()
+ self._configure_outer_proxy()
+ return urls
diff --git a/server/docker-compose.yml b/server/docker-compose.yml
new file mode 100644
index 0000000..38bc658
--- /dev/null
+++ b/server/docker-compose.yml
@@ -0,0 +1,10 @@
+version: '3'
+
+services:
+ nginx:
+ image: nginx:latest
+ container_name: sarthi_nginx
+ ports:
+ - "80:80"
+ volumes:
+ - ./nginx-confs:/etc/nginx/conf.d
diff --git a/server/requirements.txt b/server/requirements.txt
new file mode 100644
index 0000000..352f903
--- /dev/null
+++ b/server/requirements.txt
@@ -0,0 +1,4 @@
+pyyaml
+flask
+pyjwt
+Flask - HTTPAuth
diff --git a/server/utils.py b/server/utils.py
new file mode 100644
index 0000000..b917ab0
--- /dev/null
+++ b/server/utils.py
@@ -0,0 +1,266 @@
+import hashlib
+import logging
+import os
+import pathlib
+import socket
+import subprocess
+import typing
+from dataclasses import dataclass, fields
+
+import yaml
+
+logger = logging.getLogger(__name__)
+
+
+@dataclass
+class DeploymentConfig:
+ project_name: str
+ branch_name: str
+ project_git_url: str
+ compose_file_location: str = "docker-compose.yml"
+
+ def __post_init__(self):
+ # Check if all members are specified
+ missing_members = [
+ field.name for field in fields(self) if not hasattr(self, field.name)
+ ]
+ if missing_members:
+ raise ValueError(f"Missing members: {', '.join(missing_members)}")
+
+ def get_project_hash(self):
+ return get_random_stub(f"{self.project_name}:{self.branch_name}")
+
+
+class ComposeHelper:
+ NGINX_SERVICE_TEMPLATE: typing.Final[
+ str
+ ] = """
+services:
+ nginx:
+ image: nginx
+ ports:
+ - '%s:80'
+ volumes:
+ - %s:/etc/nginx/conf.d/default.conf
+ networks:
+ - default
+ """
+
+ def __init__(self, compose_file_location: str):
+ self._compose_file_location = compose_file_location
+ self._compose = load_yaml_file(self._compose_file_location)
+
+ def start_services(
+ self, nginx_port: str, conf_file_path: str, deployment_namespace: str
+ ):
+ self._generate_processed_compose_file(
+ nginx_port, conf_file_path, deployment_namespace
+ )
+
+ command = ["docker-compose", "up", "-d", "--build"]
+ project_dir = pathlib.Path(self._compose_file_location).parent
+ subprocess.run(command, check=True, cwd=project_dir)
+ logger.info("Docker Compose up -d --build executed successfully.")
+
+ def _generate_processed_compose_file(
+ self, nginx_port: str, conf_file_path: str, deployment_namespace: str
+ ):
+ """
+ This should ideally be called after get_service_ports_config as it will overwrite the compose file
+ 1. Remove ports mapping
+ 2. Add in a nginx config
+ """
+ for service in self._compose["services"]:
+ if "ports" in self._compose["services"][service]:
+ del self._compose["services"][service]["ports"]
+
+ if "container_name" in self._compose["services"][service]:
+ del self._compose["services"][service]["container_name"]
+
+ service_proxy_template = ComposeHelper.NGINX_SERVICE_TEMPLATE % (
+ nginx_port,
+ conf_file_path,
+ )
+ proxy_yaml = yaml.safe_load(service_proxy_template)
+
+ # Add the proxy nginx to all networks, along with default
+ if "networks" in self._compose:
+ proxy_yaml["services"]["nginx"]["networks"].extend(
+ self._compose["networks"]
+ )
+
+ self._compose["services"][f"nginx_{deployment_namespace}"] = proxy_yaml[
+ "services"
+ ]["nginx"]
+
+ with open(self._compose_file_location, "w") as yaml_file:
+ # Dump the data to the YAML file
+ yaml.dump(self._compose, yaml_file, default_flow_style=False)
+
+ logger.info(f"YAML data written to {self._compose_file_location} successfully.")
+
+ def get_service_ports_config(
+ self,
+ ) -> typing.Dict[str, typing.List[typing.Tuple[int, int]]]:
+ services = {}
+ for service in self._compose["services"]:
+ if service not in services:
+ services[service] = []
+
+ port_mappings = []
+
+ if "ports" in self._compose["services"][service]:
+ port_mappings = self._compose["services"][service]["ports"]
+
+ for port_mapping in port_mappings:
+ ports = port_mapping.split(":")
+ services[service].append((ports[-2], ports[-1]))
+ return services
+
+
+class NginxHelper:
+ SERVER_BLOCK_TEMPLATE: typing.Final[
+ str
+ ] = """
+ server {
+ listen 80;
+ server_name %s;
+ %s
+ }
+
+ """
+
+ ROUTES_BLOCK_TEMPLATE: typing.Final[
+ str
+ ] = """
+ location / {
+ proxy_pass http://%s:%s;
+ proxy_set_header Host $host;
+ proxy_set_header X-Real-IP $remote_addr;
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+ proxy_set_header X-Forwarded-Proto $scheme;
+ }
+ """
+
+ PROJECT_BLOCK_TEMPLATE: typing.Final[
+ str
+ ] = """
+ server {
+ listen 80;
+ server_name %s;
+
+ location / {
+ proxy_pass http://%s:%s;
+ proxy_set_header Host $host;
+ proxy_set_header X-Real-IP $remote_addr;
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+ proxy_set_header X-Forwarded-Proto $scheme;
+ }
+ }
+ """
+
+ def __init__(self, config: DeploymentConfig):
+ self._project_name = config.project_name
+ self._project_hash = config.get_project_hash()
+ self._port = None
+ self._host_name = os.environ.get("DEPLOYMENT_HOST") or "host.docker.internal"
+ self._start_port = os.environ.get("DEPLOYMENT_PORT_START") or 15000
+ self._end_port = os.environ.get("DEPLOYMENT_PORT_END") or 20000
+ self._DOMAIN_NAME = os.environ.get("DOMAIN_NAME") or "localhost"
+ self._DOCKER_INTERNAL_HOSTNAME: typing.Final[str] = "host.docker.internal"
+
+ def find_free_port(self) -> str:
+ current_port = self._start_port
+
+ while current_port <= self._end_port:
+ with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
+ try:
+ s.bind((self._host_name, current_port))
+ self._port = current_port
+ return current_port
+ except socket.error:
+ current_port += 1
+
+ raise RuntimeError(f"Could not find a free port in the specified range.")
+
+ def generate_outer_proxy_conf_file(
+ self, port: str, project_conf_location: str
+ ) -> str:
+ port = port or self._port
+ server_name_regex = f"~{self._project_hash}.{self._DOMAIN_NAME}"
+ conf = NginxHelper.PROJECT_BLOCK_TEMPLATE % (
+ server_name_regex,
+ self._DOCKER_INTERNAL_HOSTNAME,
+ port,
+ )
+ conf_file_name = f"{self._project_name}-{self._project_hash}.conf"
+ conf_file_location = os.path.join(project_conf_location, conf_file_name)
+
+ with open(conf_file_location, "w") as file:
+ file.write(conf)
+
+ if not self._test_nginx_config():
+ os.remove(conf_file_location)
+ raise Exception("Failed creating outer_proxy_conf_file", conf)
+ return conf
+
+ def generate_project_proxy_conf_file(
+ self,
+ services: typing.Dict[str, typing.List[typing.Tuple[int, int]]],
+ project_path: str,
+ ) -> typing.Tuple[str, typing.List[str]]:
+ urls: typing.List[str] = []
+ routes = ""
+ for service, ports_mappings in services.items():
+ for ports in ports_mappings:
+ routes_block = NginxHelper.ROUTES_BLOCK_TEMPLATE % (
+ service,
+ ports[1],
+ )
+
+ service_url = f"{self._project_name}-{ports[0]}-{self._project_hash}.{self._DOMAIN_NAME}"
+ server_name_regex = f"~{service_url}"
+ urls.append(f"http://{service_url}")
+
+ server_block = NginxHelper.SERVER_BLOCK_TEMPLATE % (
+ server_name_regex,
+ routes_block,
+ )
+ routes += server_block
+
+ conf_file_name = f"{self._project_name}-{self._project_hash}.conf"
+ conf_file_path = os.path.join(project_path, conf_file_name)
+ with open(conf_file_path, "w") as file:
+ file.write(routes)
+
+ return str(conf_file_path), urls
+
+ def _test_nginx_config(self):
+ try:
+ command = subprocess.run(
+ ["docker", "exec", "sarthi_nginx", "nginx", "-t"],
+ check=True,
+ capture_output=True,
+ text=True,
+ )
+ return True
+ except subprocess.CalledProcessError as e:
+ logger.error(f"Error testing Nginx configuration: {e}")
+ raise Exception(f"Nginx configs error {e}")
+
+ def reload_nginx(self):
+ self._test_nginx_config()
+ subprocess.run(
+ ["docker", "exec", "-it", "sarthi_nginx", "nginx", "-s", "reload"],
+ check=True,
+ )
+ logger.info("Nginx reloaded successfully.")
+
+
+def get_random_stub(project_name: str) -> str:
+ return hashlib.md5(project_name.encode()).hexdigest()[:16]
+
+
+def load_yaml_file(filename: str):
+ with open(filename) as file:
+ return yaml.safe_load(file)