Skip to content

Crawler

Crawler #226

name: Crawler
on:
workflow_dispatch:
inputs:
REPO_NAMES:
required: false
description: comma separated list of repo names within bcgov org. for one of jobs to run for specific repos.
schedule:
# * is a special character in YAML, so you have to quote this string, every day at 8am GMT
- cron: "0 8 * * *"
concurrency:
group: ${{ github.workflow }}
cancel-in-progress: true
jobs:
pubcode-crawler:
name: Crawl Git Repos for bcgovpubcode.yml
runs-on: ubuntu-22.04
environment: prod
defaults:
run:
working-directory: crawler
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Add Node.js
uses: actions/setup-node@v3
with:
node-version: "18.x"
- uses: actions/cache@v3
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
- name: Install Dependencies
run: npm ci
- name: Process script
env:
GIT_TOKEN: ${{ secrets.GITHUB_TOKEN }}
REPO_NAMES: ${{ github.event.inputs.REPO_NAMES }}
run: |
oc login --token=${{ secrets.OC_TOKEN }} --server=${{ vars.OC_SERVER }}
oc project ${{ vars.OC_NAMESPACE }}
# Get API key
API_KEY=$(oc get secrets/pubcode-prod-api --template={{.data.api_key}} | base64 -d)
API_URL=https://$(oc get route/pubcode-prod-api --template={{.spec.host}})
API_KEY="${API_KEY}" API_URL="${API_URL}" node src/main.js