Skip to content

Commit

Permalink
Merge pull request #1 from bwhitehead0/develop
Browse files Browse the repository at this point in the history
Develop
  • Loading branch information
bwhitehead0 authored May 23, 2024
2 parents e48f11c + 400b447 commit adbce02
Show file tree
Hide file tree
Showing 5 changed files with 113 additions and 13 deletions.
17 changes: 17 additions & 0 deletions .github/workflows/shellcheck.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
on:
push:

name: "Trigger: Push action"
permissions:
contents: read

jobs:
shellcheck:
name: Shellcheck
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
token: ${{ github.token }}
- name: Run ShellCheck
uses: bwhitehead0/action-shellcheck@master
5 changes: 4 additions & 1 deletion action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,11 @@ inputs:
outputs:
runner-info:
description: Runner information.
value: ${{ steps.gather-info.outputs.runner_info }}
runs:
using: 'composite'
steps:
- run: ${{ github.action_path }}/assets/runner_info.sh
- name: Gather runner info
id: gather-info
run: echo runner_info=${{ github.action_path }}/assets/runner_info.sh >> $GITHUB_OUTPUT
shell: bash
63 changes: 63 additions & 0 deletions assets/kv_to_json.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
#!/bin/bash
# simple script to take 'key: value' pairs and convert to a simple JSON string
#
# such that a series of values from the pipeline like the following:
# value1: abc
# value2: def
# value3: ghi
#
# becomes:
# {"value1":"abc","value2":"def","value3":"ghi"}
#
# use flag -p to output as pretty JSON, and the above example would become:
# {
# "value1": "abc",
# "value2": "def",
# "value3": "ghi"
# }

while getopts "p" opt; do
case $opt in
p)
pretty=true
;;
\?)
echo "Invalid option: -$OPTARG" >&2
exit 1
;;
esac
done

newline=$'\n'
first_entry=true

# Read key-value pairs from pipeline
while IFS=':' read -r key value; do
# Remove leading/trailing whitespace from key and value, and escape double quotes in value
key=$(echo "$key" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')
value=$(echo "$value" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//;s/\"/\\\"/g')

if [ "${pretty}" == true ]; then
# Add a comma before each new entry except the first one
if [ "$first_entry" = false ]; then
json="$json,${newline}"
fi
first_entry=false

# Add the key-value pair to the JSON object with indentation
json="$json \"$key\": \"$value\""
else
# Add key-value pair to JSON object
json+="\"$key\":\"$value\","
fi
done

# build either pretty or non-pretty JSON and remove trailing comma
if [ "${pretty}" == true ]; then
json="{${newline}${json%,}${newline}}"
else
json="{${json%,}}"
fi

# print the JSON object
echo "$json"
39 changes: 28 additions & 11 deletions assets/runner_info.sh
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
# Get OS name
OS_NAME=$(grep "PRETTY_NAME=" /etc/os-release | cut -d'"' -f2)


# Get OS Version
if [[ $OS_NAME == *"Amazon"* ]]; then
# Amazon Linux
Expand All @@ -27,29 +28,45 @@ fi
echo "OS: ${OS_NAME}"
echo "OS Version: ${OS_VERSION}"

# if runner service is running then we can determine installation path and get additional info
if pgrep "runsvc.sh" >/dev/null; then
# runner is running and we can easily find the disk it's installed on
# ignore shellcheck warning as pidof isn't going to get us what we need here
# shellcheck disable=SC2009
RUNNER_PATH="$(dirname "$(ps aux | grep -w "[r]unsvc.sh" | awk '{print $12}')")"
else
# runner is not running, so we'll just default to blank
RUNNER_PATH=""
fi


# if action variable INPUT_detail_level is set, gather additional info
# ignore shellcheck warnings about the variable not being defined, as it's set by the runner execution
# shellcheck disable=SC2154
if [[ $INPUT_detail_level == "full" ]]; then
echo "Kernel Version: $(uname -r)"
echo "OS Hostname: $(hostname)"
echo "Runner User: $(whoami)"

# get free disk % space on runner partition and root partition
if pgrep "runsvc.sh" >/dev/null; then
# runner is running and we can easily find the disk it's installed on
# ignore shellcheck warning as pidof isn't going to get us what we need here
# shellcheck disable=SC2009
DISK_USED=$(df -hP "$(ps aux | grep -w "[r]unsvc.sh" | awk '{print $12}')" | awk 'NR==2 {print $5}')
else
# runner is not running, so we'll just default to blank
if [ -z "${RUNNER_PATH}" ]; then
DISK_USED=""
else
DISK_USED=$(df -hP "${RUNNER_PATH}" | awk 'NR==2 {print $5}')
fi
echo "Runner Path: ${RUNNER_PATH}"
echo "Runner Disk Used: ${DISK_USED}"
echo "Root Disk Used: $(df -hP / | awk 'NR==2 {print $5}')"
fi

TOKEN=$(curl -s -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 21600")
if [ -z "${RUNNER_PATH}" ]; then
# get runner version
RUNNER_VERSION=""
else
RUNNER_VERSION=$("${RUNNER_PATH}"/config.sh --version)
fi

echo "Runner Version: ${RUNNER_VERSION}"

TOKEN=$(curl -m 1 -s -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 21600")
# could use JQ to parse the JSON output but some older instances won't have it installed
# sed to remove quotes and commas and leading whitespace etc
curl -s -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/dynamic/instance-identity/document | grep 'accountId\|architecture\|instanceId\|instanceType\|privateIp\|region' | sed 's/\"//g; s/\,//g; s/^[ \t]*//; s/ : /: /'
curl -s -m 1 -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/dynamic/instance-identity/document | grep 'accountId\|architecture\|instanceId\|instanceType\|privateIp\|region' | sed 's/\"//g; s/\,//g; s/^[ \t]*//; s/ : /: /'
2 changes: 1 addition & 1 deletion readme.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Runner_Info

GitHub Action to return some diagnostic info about the self-hosted runner executing the workflow.
GitHub Action to return some diagnostic info about the self-hosted runner in AWS executing the workflow.

v0.1.0 Returns:
```
Expand Down

0 comments on commit adbce02

Please sign in to comment.