Skip to content

chore(spam-detection): Adjust threshold (#90) #73

chore(spam-detection): Adjust threshold (#90)

chore(spam-detection): Adjust threshold (#90) #73

Workflow file for this run

name: Production
on:
push:
branches: [main]
env:
AWS_REGION: ap-southeast-2
jobs:
lint-format:
name: Linting Checks
uses: ./.github/workflows/lint.yml
build:
needs: lint-format
name: Build
runs-on: ubuntu-latest
permissions:
id-token: write
contents: read
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
role-to-assume: ${{ secrets.AWS_ROLE_TO_ASSUME }}
role-session-name: ${{ secrets.AWS_ROLE_SESSION_NAME }}
aws-region: ${{ env.AWS_REGION }}
- name: Set up QEMU for ARM64
uses: docker/setup-qemu-action@v3
with:
platforms: linux/arm64
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Cache Docker layers
uses: actions/cache@v4
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
- name: Build Docker container
run: |
docker buildx build \
--cache-from=type=local,src=/tmp/.buildx-cache \
--cache-to=type=local,dest=/tmp/.buildx-cache-new,mode=max \
--output type=docker,dest=duckbot.tar \
--platform=linux/arm64 --file=Dockerfile -t duckbot .
gzip duckbot.tar
- name: Save Docker cache
if: success()
run: |
rsync -a --delete /tmp/.buildx-cache-new/ /tmp/.buildx-cache/
- name: Copy image and compose file to S3
run: |
aws s3 cp ./duckbot.tar.gz s3://${{ secrets.AWS_S3_BUCKET }}/duckbot/
aws s3 cp ./docker-compose.yml s3://${{ secrets.AWS_S3_BUCKET }}/duckbot/
deploy:
needs: build
name: Deploy
runs-on: ubuntu-latest
environment: Production
steps:
- name: Deploy on EC2
env:
KEY: ${{ secrets.SSH_EC2_KEY }}
HOSTNAME: ${{ secrets.SSH_EC2_HOSTNAME }}
USER: ${{ secrets.SSH_EC2_USER }}
GUILD_ID: ${{ secrets.GUILD_ID }}
BOT_TOKEN: ${{ secrets.BOT_TOKEN }}
SKULLBOARD_CHANNEL_ID: ${{ secrets.SKULLBOARD_CHANNEL_ID }}
REQUIRED_REACTIONS: ${{ secrets.REQUIRED_REACTIONS }}
TENOR_API_KEY: ${{ secrets.TENOR_API_KEY }}
GEMINI_API_KEY: ${{ secrets.GEMINI_API_KEY }}
REQUESTS_PER_MINUTE: ${{ secrets.REQUESTS_PER_MINUTE }}
LIMIT_WINDOW: ${{ secrets.LIMIT_WINDOW }}
LOG_CHANNEL_ID: ${{ secrets.LOG_CHANNEL_ID }}
ADMIN_USERS: ${{ secrets.ADMIN_USERS }}
run: |
echo "$KEY" > private_key && chmod 600 private_key
ssh -v -o StrictHostKeyChecking=no -i private_key ${USER}@${HOSTNAME} '
cd ~/duckbot
aws s3 cp s3://${{ secrets.AWS_S3_BUCKET }}/duckbot/duckbot.tar.gz .
aws s3 cp s3://${{ secrets.AWS_S3_BUCKET }}/duckbot/docker-compose.yml .
echo GUILD_ID=${{ secrets.GUILD_ID }} > .env
echo BOT_TOKEN=${{ secrets.BOT_TOKEN }} >> .env
echo SKULLBOARD_CHANNEL_ID=${{ secrets.SKULLBOARD_CHANNEL_ID }} >> .env
echo REQUIRED_REACTIONS=${{ secrets.REQUIRED_REACTIONS }} >> .env
echo TENOR_API_KEY=${{ secrets.TENOR_API_KEY }} >> .env
echo GEMINI_API_KEY=${{ secrets.GEMINI_API_KEY }} >> .env
echo REQUESTS_PER_MINUTE=${{ secrets.REQUESTS_PER_MINUTE }} >> .env
echo LIMIT_WINDOW=${{ secrets.LIMIT_WINDOW }} >> .env
echo LOG_CHANNEL_ID=${{ secrets.LOG_CHANNEL_ID }} >> .env
echo ADMIN_USERS=${{ secrets.ADMIN_USERS }} >> .env
docker load -i duckbot.tar.gz
docker compose up -d
'