Weighs the soul of incoming HTTP requests to stop AI crawlers
1name: Docs test build
2
3on:
4 pull_request:
5 branches: [ "main" ]
6
7permissions:
8 contents: read
9 actions: write
10
11jobs:
12 build:
13 runs-on: ubuntu-24.04
14
15 steps:
16 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
17 with:
18 persist-credentials: false
19
20 - name: Set up Docker Buildx
21 uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
22
23 - name: Docker meta
24 id: meta
25 uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
26 with:
27 images: ghcr.io/${{ github.repository }}/docs
28
29 - name: Build and push
30 id: build
31 uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
32 with:
33 context: ./docs
34 cache-to: type=gha
35 cache-from: type=gha
36 tags: ${{ steps.meta.outputs.tags }}
37 labels: ${{ steps.meta.outputs.labels }}
38 platforms: linux/amd64
39 push: false