diff --git a/.github/actions/terraform-deploy-sm2a/action.yml b/.github/actions/terraform-deploy-sm2a/action.yml index 59f53ab..4f146a9 100644 --- a/.github/actions/terraform-deploy-sm2a/action.yml +++ b/.github/actions/terraform-deploy-sm2a/action.yml @@ -33,20 +33,6 @@ runs: python-version: "3.11" cache: "pip" - - name: Install python dependencies - shell: bash - working-directory: ${{ inputs.dir }} - run: pip install -r ../deploy_requirements.txt - - - name: Get relevant environment configuration from aws secrets - shell: bash - working-directory: ${{ inputs.dir }} - env: - AWS_DEFAULT_REGION: ${{ inputs.aws-region }} - AWS_REGION: ${{ inputs.aws-region }} - run: | - python scripts/generate_env_file.py --secret-id ${{ inputs.env_aws_secret_name }} --env-file ${{ inputs.env-file }} - - name: Setup Terraform uses: hashicorp/setup-terraform@v1 with: @@ -54,14 +40,11 @@ runs: - name: Deploy shell: bash - working-directory: ${{ inputs.dir }} env: AWS_DEFAULT_REGION: ${{ inputs.aws-region }} AWS_REGION: ${{ inputs.aws-region }} run: | - cp -r ../dags . - ./scripts/deploy.sh ${{ inputs.env-file }} <<< init - ./scripts/deploy.sh ${{ inputs.env-file }} <<< deploy + make sm2a-deploy ENV_FILE=${{ inputs.env-file }} SECRET_NAME=${{ inputs.env_aws_secret_name }} - name: Output workflows API endpoint id: output_sm2a_workflows_endpoint diff --git a/.gitignore b/.gitignore index 547284f..cc646d8 100644 --- a/.gitignore +++ b/.gitignore @@ -44,6 +44,5 @@ cdk.context.json env.sh .hypothesis -Makefile .env_sit terraform.tfstate diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..55d2c02 --- /dev/null +++ b/Makefile @@ -0,0 +1,76 @@ +SECRET_NAME="" +ENV_FILE=".env" +SM2A_FOLDER="sm2a" + +CHDIR_SHELL := $(SHELL) +define chdir + $(eval _D=$(firstword $(1) $(@D))) + $(info $(MAKE): cd $(_D)) $(eval SHELL = cd $(_D); $(CHDIR_SHELL)) +endef + + +important_message = \ + @echo "\033[0;31m$(1) \033[0m" + +info_message = \ + @echo "\033[0;32m$(1) \033[0m" + + +count_down = \ + @echo "Spinning up the system please wait..."; \ + secs=40 ;\ + while [ $$secs -gt 0 ]; do \ + printf "%d\033[0K\r" $$secs; \ + sleep 1; \ + : $$((secs--)); \ + done; + + +.PHONY: + clean + all + test + + +all: switch-to-sm2a sm2a-local-init sm2a-local-run + +test: + pytest tests + +switch-to-sm2a: + $(call chdir,${SM2A_FOLDER}) + +sm2a-local-run: switch-to-sm2a sm2a-local-stop sm2a-cp-dags + @echo "Running SM2A" + docker compose up -d + $(call important_message, "Give the resources a minute to be healthy 💪") + $(count_down) + $(call info_message, "Please visit http://localhost:8080") + echo "username:airflow | password:airflow" + echo "To use local SM2A with AWS update ${SM2A_FOLDER}/sm2a-local-config/.env AWS credentials" + +sm2a-local-init: switch-to-sm2a sm2a-cp-dags + cp sm2a-local-config/env_example sm2a-local-config/.env + docker compose run --rm airflow-cli db init + docker compose run --rm airflow-cli users create --email airflow@example.com --firstname airflow --lastname airflow --password airflow --username airflow --role Admin + +sm2a-local-stop: switch-to-sm2a + docker compose down + +sm2a-cp-dags: + cp -r ../dags dags + +sm2a-deploy: switch-to-sm2a sm2a-cp-dags + @echo "Installing the deployment dependency" + pip install -r deploy_requirements.txt + echo "Deploying SM2A" + python scripts/generate_env_file.py --secret-id ${SECRET_NAME} --env-file ${ENV_FILE} + ./scripts/deploy.sh ${ENV_FILE} <<< init + ./scripts/deploy.sh ${ENV_FILE} <<< deploy + +clean: switch-to-sm2a sm2a-local-stop + @echo "Cleaning local env" + docker container prune -f + docker image prune -f + docker volume prune -f + diff --git a/README.md b/README.md index f2654a8..3f9857b 100644 --- a/README.md +++ b/README.md @@ -35,6 +35,35 @@ See [terraform-getting-started](https://developer.hashicorp.com/terraform/tutori See [getting-started-install](https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html) + + +### Setup a local SM2A development environment + +1. Initialize the metadata db + +```shell +make sm2a-local-init +``` +🚨 NOTE: This command is typically required only once at the beginning. +After running it, you generally do not need to run it again unless you run `make clean`, +which will require you to reinitialize SM2A with `make sm2a-local-init` + +This will create an airflow username: `airflow` with password `airflow` + +2. Start all services + +```shell +make sm2a-local-run +``` +This will start SM2A services and will be running on http://localhost:8080 + +3. Stop all services + +```shell +make sm2a-local-stop +``` + + ## Deployment This project uses Terraform modules to deploy Apache Airflow and related AWS resources using Amazon's managed Airflow provider.