chore(build): Automation workflows (#972)

* Added fork sync workflow(syncs the main branch with upstream every 30 minutes)
* Added monthly workflow run deletion(all skipped or canceld and runs older than 30days will be deleted)
* Removed Docker image build for main branch on forks
* Added documentation how to enable sync and docker build workflows
This commit is contained in:
Kwitsch 2023-04-03 16:33:16 +02:00 committed by GitHub
parent 2b1786a42d
commit 68a8476e48
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
22 changed files with 110 additions and 59 deletions

View File

@ -0,0 +1,37 @@
name: Delete workflow runs
on:
schedule:
- cron: '0 0 1 * *'
workflow_dispatch:
jobs:
del_runs:
name: Delete workflow runs
runs-on: ubuntu-latest
steps:
- name: Delete skipped
uses: Mattraks/delete-workflow-runs@v2
with:
token: ${{ github.token }}
repository: ${{ github.repository }}
retain_days: 0
keep_minimum_runs: 0
delete_run_by_conclusion_pattern: skipped
- name: Delete cancelled
uses: Mattraks/delete-workflow-runs@v2
with:
token: ${{ github.token }}
repository: ${{ github.repository }}
retain_days: 0
keep_minimum_runs: 0
delete_run_by_conclusion_pattern: cancelled
- name: Delete workflow runs(older than a month)
uses: Mattraks/delete-workflow-runs@v2
with:
token: ${{ github.token }}
repository: ${{ github.repository }}
retain_days: 30
keep_minimum_runs: 6

View File

@ -35,15 +35,18 @@ jobs:
if [[ "${ENABLED,,}" != "true" ]]; then
echo "enabled=0" >> $GITHUB_OUTPUT
echo "Workflow is disabled"
echo "### Workflow is disabled" >> $GITHUB_STEP_SUMMARY
echo "To enable this workflow by creating a secret 'DEVELOPMENT_DOCKER' with the value 'true'" >> $GITHUB_STEP_SUMMARY
else
echo "enabled=1" >> $GITHUB_OUTPUT
echo "Workflow is enabled"
if [[ "${{ github.repository_owner }}" != "0xERR0R" && "${GITHUB_REF#refs/heads/}" == "main" ]]; then
echo "enabled=0" >> $GITHUB_OUTPUT
echo "Workflow is disabled for main branch on forks"
else
echo "enabled=1" >> $GITHUB_OUTPUT
echo "Workflow is enabled"
fi
fi
docker:

36
.github/workflows/fork-sync.yml vendored Normal file
View File

@ -0,0 +1,36 @@
name: Sync Fork
on:
schedule:
- cron: '*/30 * * * *'
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}
jobs:
sync:
name: Sync with Upstream
runs-on: ubuntu-latest
if: github.repository_owner != '0xERR0R'
steps:
- name: Enabled Check
id: check
shell: bash
run: |
if [[ "${{ secrets.FORK_SYNC_TOKEN }}" != "" ]]; then
echo "enabled=1" >> $GITHUB_OUTPUT
echo "Workflow is enabled"
else
echo "enabled=0" >> $GITHUB_OUTPUT
echo "Workflow is disabled(create FORK_SYNC_TOKEN secret with repo write permission to enable it)"
fi
- name: Sync
if: ${{ steps.check.outputs.enabled == 1 }}
env:
GH_TOKEN: ${{ secrets.FORK_SYNC_TOKEN }}
shell: bash
run: |
gh repo sync ${{ github.repository }} -b main

View File

@ -89,6 +89,5 @@ var _ = Describe("Chained grouped cache", func() {
Expect(cache.Contains("both", []string{"group1", "group2"})).Should(ConsistOf("group2"))
})
})
})
})

View File

@ -32,7 +32,6 @@ var _ = Describe("In-Memory grouped cache", func() {
Expect(cache.Contains("searchString", []string{"group1"})).Should(BeEmpty())
})
})
})
Describe("Cache creation", func() {
When("cache with 1 group was created", func() {
@ -127,6 +126,5 @@ var _ = Describe("In-Memory grouped cache", func() {
Expect(cache.Contains("both", []string{"group1", "group2"})).Should(ConsistOf("group2"))
})
})
})
})

View File

@ -10,9 +10,7 @@ import (
)
var _ = Describe("BlockingConfig", func() {
var (
cfg BlockingConfig
)
var cfg BlockingConfig
suiteBeforeEach()

View File

@ -9,9 +9,7 @@ import (
)
var _ = Describe("CachingConfig", func() {
var (
cfg CachingConfig
)
var cfg CachingConfig
suiteBeforeEach()

View File

@ -9,9 +9,7 @@ import (
)
var _ = Describe("ClientLookupConfig", func() {
var (
cfg ClientLookupConfig
)
var cfg ClientLookupConfig
suiteBeforeEach()

View File

@ -9,9 +9,7 @@ import (
)
var _ = Describe("ConditionalUpstreamConfig", func() {
var (
cfg ConditionalUpstreamConfig
)
var cfg ConditionalUpstreamConfig
suiteBeforeEach()

View File

@ -10,9 +10,7 @@ import (
)
var _ = Describe("CustomDNSConfig", func() {
var (
cfg CustomDNSConfig
)
var cfg CustomDNSConfig
suiteBeforeEach()

View File

@ -9,9 +9,7 @@ import (
)
var _ = Describe("FilteringConfig", func() {
var (
cfg FilteringConfig
)
var cfg FilteringConfig
suiteBeforeEach()

View File

@ -9,9 +9,7 @@ import (
)
var _ = Describe("HostsFileConfig", func() {
var (
cfg HostsFileConfig
)
var cfg HostsFileConfig
suiteBeforeEach()

View File

@ -7,9 +7,7 @@ import (
)
var _ = Describe("MetricsConfig", func() {
var (
cfg MetricsConfig
)
var cfg MetricsConfig
suiteBeforeEach()

View File

@ -7,9 +7,7 @@ import (
)
var _ = Describe("ParallelBestConfig", func() {
var (
cfg ParallelBestConfig
)
var cfg ParallelBestConfig
suiteBeforeEach()

View File

@ -9,9 +9,7 @@ import (
)
var _ = Describe("QueryLogConfig", func() {
var (
cfg QueryLogConfig
)
var cfg QueryLogConfig
suiteBeforeEach()

View File

@ -7,9 +7,7 @@ import (
)
var _ = Describe("RewriterConfig", func() {
var (
cfg RewriterConfig
)
var cfg RewriterConfig
suiteBeforeEach()

View File

@ -108,4 +108,16 @@ Main: [:material-docker:Docker Hub](https://hub.docker.com/r/spx01/blocky)
Mirror: [:material-github:GitHub Container Registry](https://ghcr.io/0xerr0r/blocky)
## Developer Information
### Docker Images
To enable Docker image creation on a GitHub fork create a secret with the name `DEVELOPMENT_DOCKER` and the value `true`.
This will trigger a workflow on every push of a branch starting with `fb-` and create an image with the branch name.
### Automatic fork sync
To enable automatic fork synchronisation create a secret with the name `FORK_SYNC_TOKEN` with an access token that has write permission to the fork repository.
The enabled workflow will sync the main branch every 30 minutes with its upstream.
--8<-- "docs/includes/abbreviations.md"

View File

@ -16,7 +16,7 @@ func TryAdapt[From, To any](inner SeriesParser[From], adapt func(From) (To, erro
// TryAdaptMethod returns a parser that wraps `inner` and tries to convert each parsed value
// using the given method with pointer receiver of `To`.
func TryAdaptMethod[ToPtr *To, From any, To any](
func TryAdaptMethod[ToPtr *To, From, To any](
inner SeriesParser[From], method func(ToPtr, From) error,
) SeriesParser[*To] {
return TryAdapt(inner, func(from From) (*To, error) {

View File

@ -11,9 +11,7 @@ import (
var _ = Describe("errorFilter", func() {
Describe("AllowErrors", func() {
var (
parser SeriesParser[struct{}]
)
var parser SeriesParser[struct{}]
BeforeEach(func() {
parser = newMockParser(func(res chan<- struct{}, err chan<- error) {

View File

@ -86,9 +86,7 @@ var _ = Describe("Hosts", func() {
})
Describe("HostsIterator.ForEachHost", func() {
var (
entry *HostsIterator
)
var entry *HostsIterator
BeforeEach(func() {
sutReader = linesReader(
@ -227,9 +225,7 @@ var _ = Describe("HostsFile", func() {
})
Describe("HostsFileEntry.forEachHost", func() {
var (
entry *HostsFileEntry
)
var entry *HostsFileEntry
BeforeEach(func() {
sutReader = linesReader(
@ -374,9 +370,7 @@ var _ = Describe("HostList", func() {
})
Describe("HostListEntry.forEachHost", func() {
var (
entry *HostListEntry
)
var entry *HostListEntry
BeforeEach(func() {
sutReader = linesReader(

View File

@ -11,9 +11,7 @@ import (
)
var _ = Describe("ForEach", func() {
var (
lines SeriesParser[string]
)
var lines SeriesParser[string]
BeforeEach(func() {
lines = Lines(linesReader(

View File

@ -15,9 +15,7 @@ const (
byteBits = 8
)
var (
ErrInvalidArpaAddrLen = errors.New("arpa hostname is not of expected length")
)
var ErrInvalidArpaAddrLen = errors.New("arpa hostname is not of expected length")
func ParseIPFromArpaAddr(arpa string) (net.IP, error) {
if strings.HasSuffix(arpa, IPv4PtrSuffix) {