test: use dynamic unit test data (#624)

This commit is contained in:
Kwitsch 2022-08-06 22:44:26 +02:00 committed by GitHub
parent 5e9eaa2965
commit 9c80a5f9a9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 492 additions and 281 deletions

View File

@ -2,5 +2,5 @@ bin/
dist/
.idea
.github
testdata/
node_modules/
node_modules/
.vscode/

View File

@ -2,27 +2,36 @@ package config
import (
"errors"
"io/ioutil"
"net"
"os"
"path/filepath"
"time"
"github.com/miekg/dns"
"github.com/0xERR0R/blocky/helpertest"
. "github.com/0xERR0R/blocky/log"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("Config", func() {
var (
tmpDir *helpertest.TmpFolder
err error
)
BeforeEach(func() {
tmpDir = helpertest.NewTmpFolder("config")
Expect(tmpDir.Error).Should(Succeed())
DeferCleanup(tmpDir.Clean)
})
Describe("Creation of Config", func() {
When("Test config file will be parsed", func() {
It("should return a valid config struct", func() {
err := os.Chdir("../testdata")
Expect(err).Should(Succeed())
confFile := writeConfigYml(tmpDir)
Expect(confFile.Error).Should(Succeed())
_, err = LoadConfig("config.yml", true)
_, err = LoadConfig(confFile.Path, true)
Expect(err).Should(Succeed())
defaultTestFileConfig()
@ -30,13 +39,16 @@ var _ = Describe("Config", func() {
})
When("Test file does not exist", func() {
It("should fail", func() {
_, err := LoadConfig("../testdata/config-does-not-exist.yaml", true)
_, err := LoadConfig(tmpDir.JoinPath("config-does-not-exist.yaml"), true)
Expect(err).Should(Not(Succeed()))
})
})
When("Multiple config files are used", func() {
It("should return a valid config struct", func() {
_, err := LoadConfig("../testdata/config/", true)
err = writeConfigDir(tmpDir)
Expect(err).Should(Succeed())
_, err := LoadConfig(tmpDir.JoinPath("/"), true)
Expect(err).Should(Succeed())
defaultTestFileConfig()
@ -44,23 +56,16 @@ var _ = Describe("Config", func() {
})
When("Config folder does not exist", func() {
It("should fail", func() {
_, err := LoadConfig("../testdata/does-not-exist-config/", true)
_, err := LoadConfig(tmpDir.JoinPath("does-not-exist-config/"), true)
Expect(err).Should(Not(Succeed()))
})
})
When("config file is malformed", func() {
It("should return error", func() {
cfgFile := tmpDir.CreateStringFile("config.yml", "malformed_config")
Expect(cfgFile.Error).Should(Succeed())
dir, err := ioutil.TempDir("", "blocky")
Expect(err).Should(Succeed())
DeferCleanup(os.RemoveAll, dir)
cfgFile := filepath.Join(dir, "config.yml")
err = ioutil.WriteFile(cfgFile, []byte("malformed_config"), 0600)
Expect(err).Should(Succeed())
_, err = LoadConfig(cfgFile, true)
_, err = LoadConfig(cfgFile.Path, true)
Expect(err).Should(HaveOccurred())
Expect(err.Error()).Should(ContainSubstring("wrong file structure"))
})
@ -176,20 +181,14 @@ bootstrapDns:
When("config directory does not exist", func() {
It("should return error", func() {
err := os.Chdir("../..")
Expect(err).Should(Succeed())
_, err = LoadConfig("config.yml", true)
_, err = LoadConfig(tmpDir.JoinPath("config.yml"), true)
Expect(err).Should(HaveOccurred())
Expect(err.Error()).Should(ContainSubstring("no such file or directory"))
})
It("should use default config if config is not mandatory", func() {
err := os.Chdir("../..")
Expect(err).Should(Succeed())
_, err = LoadConfig("config.yml", false)
_, err = LoadConfig(tmpDir.JoinPath("config.yml"), false)
Expect(err).Should(Succeed())
Expect(config.LogLevel).Should(Equal(LevelInfo))
@ -583,3 +582,120 @@ func defaultTestFileConfig() {
Expect(GetConfig()).Should(Not(BeNil()))
}
func writeConfigYml(tmpDir *helpertest.TmpFolder) *helpertest.TmpFile {
return tmpDir.CreateStringFile("config.yml",
"upstream:",
" default:",
" - tcp+udp:8.8.8.8",
" - tcp+udp:8.8.4.4",
" - 1.1.1.1",
"customDNS:",
" mapping:",
" my.duckdns.org: 192.168.178.3",
" multiple.ips: 192.168.178.3,192.168.178.4,2001:0db8:85a3:08d3:1319:8a2e:0370:7344",
"conditional:",
" mapping:",
" fritz.box: tcp+udp:192.168.178.1",
" multiple.resolvers: tcp+udp:192.168.178.1,tcp+udp:192.168.178.2",
"filtering:",
" queryTypes:",
" - AAAA",
" - A",
"blocking:",
" blackLists:",
" ads:",
" - https://s3.amazonaws.com/lists.disconnect.me/simple_ad.txt",
" - https://raw.githubusercontent.com/StevenBlack/hosts/master/hosts",
" - https://mirror1.malwaredomains.com/files/justdomains",
" - http://sysctl.org/cameleon/hosts",
" - https://zeustracker.abuse.ch/blocklist.php?download=domainblocklist",
" - https://s3.amazonaws.com/lists.disconnect.me/simple_tracking.txt",
" special:",
" - https://hosts-file.net/ad_servers.txt",
" whiteLists:",
" ads:",
" - whitelist.txt",
" clientGroupsBlock:",
" default:",
" - ads",
" - special",
" Laptop-D.fritz.box:",
" - ads",
" blockTTL: 1m",
" refreshPeriod: 120",
"clientLookup:",
" upstream: 192.168.178.1",
" singleNameOrder:",
" - 2",
" - 1",
"queryLog:",
" type: csv-client",
" target: /opt/log",
"port: 55553,:55554,[::1]:55555",
"logLevel: debug",
"dohUserAgent: testBlocky",
"minTlsServeVersion: 1.3")
}
func writeConfigDir(tmpDir *helpertest.TmpFolder) error {
f1 := tmpDir.CreateStringFile("config1.yaml",
"upstream:",
" default:",
" - tcp+udp:8.8.8.8",
" - tcp+udp:8.8.4.4",
" - 1.1.1.1",
"customDNS:",
" mapping:",
" my.duckdns.org: 192.168.178.3",
" multiple.ips: 192.168.178.3,192.168.178.4,2001:0db8:85a3:08d3:1319:8a2e:0370:7344",
"conditional:",
" mapping:",
" fritz.box: tcp+udp:192.168.178.1",
" multiple.resolvers: tcp+udp:192.168.178.1,tcp+udp:192.168.178.2",
"filtering:",
" queryTypes:",
" - AAAA",
" - A")
if f1.Error != nil {
return f1.Error
}
f2 := tmpDir.CreateStringFile("config2.yaml",
"blocking:",
" blackLists:",
" ads:",
" - https://s3.amazonaws.com/lists.disconnect.me/simple_ad.txt",
" - https://raw.githubusercontent.com/StevenBlack/hosts/master/hosts",
" - https://mirror1.malwaredomains.com/files/justdomains",
" - http://sysctl.org/cameleon/hosts",
" - https://zeustracker.abuse.ch/blocklist.php?download=domainblocklist",
" - https://s3.amazonaws.com/lists.disconnect.me/simple_tracking.txt",
" special:",
" - https://hosts-file.net/ad_servers.txt",
" whiteLists:",
" ads:",
" - whitelist.txt",
" clientGroupsBlock:",
" default:",
" - ads",
" - special",
" Laptop-D.fritz.box:",
" - ads",
" blockTTL: 1m",
" refreshPeriod: 120",
"clientLookup:",
" upstream: 192.168.178.1",
" singleNameOrder:",
" - 2",
" - 1",
"queryLog:",
" type: csv-client",
" target: /opt/log",
"port: 55553,:55554,[::1]:55555",
"logLevel: debug",
"dohUserAgent: testBlocky",
"minTlsServeVersion: 1.3")
return f2.Error
}

1
go.sum
View File

@ -352,6 +352,7 @@ github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLe
github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38 h1:yAJXTCF9TqKcTiHJAE8dj7HMvPfh66eeA2JYW7eFpSE=
github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38 h1:yAJXTCF9TqKcTiHJAE8dj7HMvPfh66eeA2JYW7eFpSE=
github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
github.com/google/trillian v1.3.11/go.mod h1:0tPraVHrSDkA3BO6vKX67zgLXs6SsOAbHEivX+9mPgw=

168
helpertest/tmpdata.go Normal file
View File

@ -0,0 +1,168 @@
package helpertest
import (
"bufio"
"io/fs"
"os"
"path/filepath"
)
type TmpFolder struct {
Path string
Error error
prefix string
}
type TmpFile struct {
Path string
Error error
Folder *TmpFolder
}
func NewTmpFolder(prefix string) *TmpFolder {
ipref := prefix
if len(ipref) == 0 {
ipref = "blocky"
}
path, err := os.MkdirTemp("", ipref)
res := &TmpFolder{
Path: path,
Error: err,
prefix: ipref,
}
return res
}
func (tf *TmpFolder) Clean() error {
if len(tf.Path) > 0 {
return os.RemoveAll(tf.Path)
}
return nil
}
func (tf *TmpFolder) CreateSubFolder(name string) *TmpFolder {
var path string
var err error
if len(name) > 0 {
path = filepath.Join(tf.Path, name)
err = os.Mkdir(path, fs.ModePerm)
} else {
path, err = os.MkdirTemp(tf.Path, tf.prefix)
}
res := &TmpFolder{
Path: path,
Error: err,
prefix: tf.prefix,
}
return res
}
func (tf *TmpFolder) CreateEmptyFile(name string) *TmpFile {
f, err := tf.createFile(name)
if err != nil {
return tf.newErrorTmpFile(err)
}
return tf.checkState(f, err)
}
func (tf *TmpFolder) CreateStringFile(name string, lines ...string) *TmpFile {
f, err := tf.createFile(name)
if err != nil {
return tf.newErrorTmpFile(err)
}
first := true
w := bufio.NewWriter(f)
for _, l := range lines {
if first {
first = false
} else {
_, err = w.WriteString("\n")
}
if err != nil {
break
}
_, err = w.WriteString(l)
if err != nil {
break
}
}
w.Flush()
return tf.checkState(f, err)
}
func (tf *TmpFolder) JoinPath(name string) string {
return filepath.Join(tf.Path, name)
}
func (tf *TmpFolder) CountFiles() (int, error) {
files, err := os.ReadDir(tf.Path)
if err != nil {
return 0, err
}
return len(files), nil
}
func (tf *TmpFolder) createFile(name string) (*os.File, error) {
if len(name) > 0 {
return os.Create(filepath.Join(tf.Path, name))
}
return os.CreateTemp(tf.Path, "temp")
}
func (tf *TmpFolder) newErrorTmpFile(err error) *TmpFile {
return &TmpFile{
Path: "",
Error: err,
Folder: tf,
}
}
func (tf *TmpFolder) checkState(file *os.File, ierr error) *TmpFile {
err := ierr
filepath := ""
if file != nil {
filepath = file.Name()
file.Close()
_, err = os.Stat(filepath)
}
return &TmpFile{
Path: filepath,
Error: err,
Folder: tf,
}
}
func (tf *TmpFile) Stat() error {
if tf.Error != nil {
return tf.Error
}
_, res := os.Stat(tf.Path)
return res
}

View File

@ -6,11 +6,10 @@ import (
"errors"
"fmt"
"io"
"io/ioutil"
"os"
"path/filepath"
"time"
"github.com/0xERR0R/blocky/helpertest"
"github.com/0xERR0R/blocky/log"
"github.com/0xERR0R/blocky/model"
@ -22,15 +21,15 @@ import (
var _ = Describe("FileWriter", func() {
var (
tmpDir string
tmpDir *helpertest.TmpFolder
err error
writer *FileWriter
)
JustBeforeEach(func() {
tmpDir, err = ioutil.TempDir("", "fileWriter")
Expect(err).Should(Succeed())
DeferCleanup(func() { os.RemoveAll(tmpDir) })
tmpDir = helpertest.NewTmpFolder("fileWriter")
Expect(tmpDir.Error).Should(Succeed())
DeferCleanup(tmpDir.Clean)
})
Describe("CSV writer", func() {
@ -42,7 +41,7 @@ var _ = Describe("FileWriter", func() {
})
When("New log entry was created", func() {
It("should be logged in one file", func() {
writer, err = NewCSVWriter(tmpDir, false, 0)
writer, err = NewCSVWriter(tmpDir.Path, false, 0)
Expect(err).Should(Succeed())
@ -85,12 +84,13 @@ var _ = Describe("FileWriter", func() {
})
Eventually(func(g Gomega) int {
return len(readCsv(filepath.Join(tmpDir, fmt.Sprintf("%s_ALL.log", time.Now().Format("2006-01-02")))))
return len(readCsv(tmpDir.JoinPath(
fmt.Sprintf("%s_ALL.log", time.Now().Format("2006-01-02")))))
}).Should(Equal(2))
})
It("should be logged in separate files per client", func() {
writer, err = NewCSVWriter(tmpDir, true, 0)
writer, err = NewCSVWriter(tmpDir.Path, true, 0)
Expect(err).Should(Succeed())
@ -133,18 +133,20 @@ var _ = Describe("FileWriter", func() {
})
Eventually(func(g Gomega) int {
return len(readCsv(filepath.Join(tmpDir, fmt.Sprintf("%s_client1.log", time.Now().Format("2006-01-02")))))
return len(readCsv(tmpDir.JoinPath(
fmt.Sprintf("%s_client1.log", time.Now().Format("2006-01-02")))))
}).Should(Equal(1))
Eventually(func(g Gomega) int {
return len(readCsv(filepath.Join(tmpDir, fmt.Sprintf("%s_client2.log", time.Now().Format("2006-01-02")))))
return len(readCsv(tmpDir.JoinPath(
fmt.Sprintf("%s_client2.log", time.Now().Format("2006-01-02")))))
}).Should(Equal(1))
})
})
When("Cleanup is called", func() {
It("should delete old files", func() {
writer, err = NewCSVWriter(tmpDir, false, 1)
writer, err = NewCSVWriter(tmpDir.Path, false, 1)
Expect(err).Should(Succeed())
@ -180,25 +182,26 @@ var _ = Describe("FileWriter", func() {
Reason: "Resolved",
RType: model.ResponseTypeRESOLVED,
},
Start: time.Now().AddDate(0, 0, -2),
Start: time.Now().AddDate(0, 0, -3),
DurationMs: 20,
})
})
fmt.Println(tmpDir.Path)
Eventually(func(g Gomega) int {
files, err := ioutil.ReadDir(tmpDir)
filesCount, err := tmpDir.CountFiles()
g.Expect(err).Should(Succeed())
return len(files)
return filesCount
}, "20s", "1s").Should(Equal(2))
go writer.CleanUp()
Eventually(func(g Gomega) int {
files, err := ioutil.ReadDir(tmpDir)
filesCount, err := tmpDir.CountFiles()
g.Expect(err).Should(Succeed())
return len(files)
return filesCount
}, "20s", "1s").Should(Equal(1))
})
})

View File

@ -11,7 +11,6 @@ import (
"github.com/alicebob/miniredis/v2"
"github.com/creasty/defaults"
"os"
"time"
"github.com/miekg/dns"
@ -20,27 +19,26 @@ import (
"github.com/stretchr/testify/mock"
)
var group1File, group2File, defaultGroupFile *os.File
var group1File, group2File, defaultGroupFile *TmpFile
var tmpDir *TmpFolder
var _ = BeforeSuite(func() {
group1File = TempFile("DOMAIN1.com")
DeferCleanup(os.Remove, group1File.Name())
tmpDir = NewTmpFolder("BlockingResolver")
Expect(tmpDir.Error).Should(Succeed())
DeferCleanup(tmpDir.Clean)
group2File = TempFile("blocked2.com")
DeferCleanup(os.Remove, group2File.Name())
group1File = tmpDir.CreateStringFile("group1File", "DOMAIN1.com")
Expect(group1File.Error).Should(Succeed())
defaultGroupFile = TempFile(
`blocked3.com
123.145.123.145
2001:db8:85a3:08d3::370:7344
badcnamedomain.com`)
DeferCleanup(os.Remove, defaultGroupFile.Name())
})
group2File = tmpDir.CreateStringFile("group2File", "blocked2.com")
Expect(group2File.Error).Should(Succeed())
var _ = AfterSuite(func() {
_ = group1File.Close()
_ = group2File.Close()
_ = defaultGroupFile.Close()
defaultGroupFile = tmpDir.CreateStringFile("defaultGroupFile",
"blocked3.com",
"123.145.123.145",
"2001:db8:85a3:08d3::370:7344",
"badcnamedomain.com")
Expect(defaultGroupFile.Error).Should(Succeed())
})
var _ = Describe("BlockingResolver", Label("blockingResolver"), func() {
@ -90,8 +88,8 @@ var _ = Describe("BlockingResolver", Label("blockingResolver"), func() {
BlockType: "ZEROIP",
BlockTTL: config.Duration(time.Minute),
BlackLists: map[string][]string{
"gr1": {group1File.Name()},
"gr2": {group2File.Name()},
"gr1": {group1File.Path},
"gr2": {group2File.Path},
},
}
})
@ -119,8 +117,8 @@ var _ = Describe("BlockingResolver", Label("blockingResolver"), func() {
BlockType: "ZEROIP",
BlockTTL: config.Duration(time.Minute),
BlackLists: map[string][]string{
"gr1": {group1File.Name()},
"gr2": {group2File.Name()},
"gr1": {group1File.Path},
"gr2": {group2File.Path},
},
ClientGroupsBlock: map[string][]string{
"default": {"gr1"},
@ -159,9 +157,9 @@ var _ = Describe("BlockingResolver", Label("blockingResolver"), func() {
sutConfig = config.BlockingConfig{
BlockTTL: config.Duration(6 * time.Hour),
BlackLists: map[string][]string{
"gr1": {group1File.Name()},
"gr2": {group2File.Name()},
"defaultGroup": {defaultGroupFile.Name()},
"gr1": {group1File.Path},
"gr2": {group2File.Path},
"defaultGroup": {defaultGroupFile.Path},
},
ClientGroupsBlock: map[string][]string{
"client1": {"gr1"},
@ -298,7 +296,7 @@ var _ = Describe("BlockingResolver", Label("blockingResolver"), func() {
sutConfig = config.BlockingConfig{
BlockTTL: config.Duration(time.Minute),
BlackLists: map[string][]string{
"defaultGroup": {defaultGroupFile.Name()},
"defaultGroup": {defaultGroupFile.Path},
},
ClientGroupsBlock: map[string][]string{
"default": {"defaultGroup"},
@ -323,7 +321,7 @@ var _ = Describe("BlockingResolver", Label("blockingResolver"), func() {
sutConfig = config.BlockingConfig{
BlockType: "ZEROIP",
BlackLists: map[string][]string{
"defaultGroup": {defaultGroupFile.Name()},
"defaultGroup": {defaultGroupFile.Path},
},
ClientGroupsBlock: map[string][]string{
"default": {"defaultGroup"},
@ -358,7 +356,7 @@ var _ = Describe("BlockingResolver", Label("blockingResolver"), func() {
sutConfig = config.BlockingConfig{
BlockTTL: config.Duration(6 * time.Hour),
BlackLists: map[string][]string{
"defaultGroup": {defaultGroupFile.Name()},
"defaultGroup": {defaultGroupFile.Path},
},
ClientGroupsBlock: map[string][]string{
"default": {"defaultGroup"},
@ -386,7 +384,7 @@ var _ = Describe("BlockingResolver", Label("blockingResolver"), func() {
BeforeEach(func() {
sutConfig = config.BlockingConfig{
BlackLists: map[string][]string{
"defaultGroup": {defaultGroupFile.Name()},
"defaultGroup": {defaultGroupFile.Path},
},
ClientGroupsBlock: map[string][]string{
"default": {"defaultGroup"},
@ -456,8 +454,8 @@ var _ = Describe("BlockingResolver", Label("blockingResolver"), func() {
sutConfig = config.BlockingConfig{
BlockType: "ZEROIP",
BlockTTL: config.Duration(time.Minute),
BlackLists: map[string][]string{"gr1": {group1File.Name()}},
WhiteLists: map[string][]string{"gr1": {group1File.Name()}},
BlackLists: map[string][]string{"gr1": {group1File.Path}},
WhiteLists: map[string][]string{"gr1": {group1File.Path}},
ClientGroupsBlock: map[string][]string{
"default": {"gr1"},
},
@ -477,8 +475,8 @@ var _ = Describe("BlockingResolver", Label("blockingResolver"), func() {
BlockType: "zeroIP",
BlockTTL: config.Duration(60 * time.Second),
WhiteLists: map[string][]string{
"gr1": {group1File.Name()},
"gr2": {group2File.Name()},
"gr1": {group1File.Path},
"gr2": {group2File.Path},
},
ClientGroupsBlock: map[string][]string{
"default": {"gr1"},
@ -538,8 +536,8 @@ var _ = Describe("BlockingResolver", Label("blockingResolver"), func() {
sutConfig = config.BlockingConfig{
BlockType: "ZEROIP",
BlockTTL: config.Duration(time.Minute),
BlackLists: map[string][]string{"gr1": {group1File.Name()}},
WhiteLists: map[string][]string{"gr1": {defaultGroupFile.Name()}},
BlackLists: map[string][]string{"gr1": {group1File.Path}},
WhiteLists: map[string][]string{"gr1": {defaultGroupFile.Path}},
ClientGroupsBlock: map[string][]string{
"default": {"gr1"},
},
@ -560,7 +558,7 @@ var _ = Describe("BlockingResolver", Label("blockingResolver"), func() {
sutConfig = config.BlockingConfig{
BlockType: "ZEROIP",
BlockTTL: config.Duration(time.Minute),
BlackLists: map[string][]string{"gr1": {group1File.Name()}},
BlackLists: map[string][]string{"gr1": {group1File.Path}},
ClientGroupsBlock: map[string][]string{
"default": {"gr1"},
},
@ -595,8 +593,8 @@ var _ = Describe("BlockingResolver", Label("blockingResolver"), func() {
BeforeEach(func() {
sutConfig = config.BlockingConfig{
BlackLists: map[string][]string{
"defaultGroup": {defaultGroupFile.Name()},
"group1": {group1File.Name()},
"defaultGroup": {defaultGroupFile.Path},
"group1": {group1File.Path},
},
ClientGroupsBlock: map[string][]string{
"default": {"defaultGroup", "group1"},
@ -826,7 +824,7 @@ var _ = Describe("BlockingResolver", Label("blockingResolver"), func() {
sutConfig = config.BlockingConfig{
BlockType: "ZEROIP",
BlockTTL: config.Duration(time.Minute),
BlackLists: map[string][]string{"gr1": {group1File.Name()}},
BlackLists: map[string][]string{"gr1": {group1File.Path}},
ClientGroupsBlock: map[string][]string{
"default": {"gr1"},
},

View File

@ -16,17 +16,26 @@ import (
var _ = Describe("HostsFileResolver", func() {
var (
sut *HostsFileResolver
m *MockResolver
err error
resp *Response
sut *HostsFileResolver
m *MockResolver
err error
resp *Response
tmpDir *TmpFolder
tmpFile *TmpFile
)
TTL := uint32(time.Now().Second())
BeforeEach(func() {
tmpDir = NewTmpFolder("HostsFileResolver")
Expect(tmpDir.Error).Should(Succeed())
DeferCleanup(tmpDir.Clean)
tmpFile = writeHostFile(tmpDir)
Expect(tmpFile.Error).Should(Succeed())
cfg := config.HostsFileConfig{
Filepath: "../testdata/hosts.txt",
Filepath: tmpFile.Path,
HostsTTL: config.Duration(time.Duration(TTL) * time.Second),
RefreshPeriod: config.Duration(30 * time.Minute),
}
@ -79,7 +88,7 @@ var _ = Describe("HostsFileResolver", func() {
When("Hosts file can be located", func() {
It("should parse it successfully", func() {
Expect(sut).Should(Not(BeNil()))
Expect(sut).ShouldNot(BeNil())
Expect(sut.hosts).Should(HaveLen(7))
})
})
@ -192,3 +201,21 @@ var _ = Describe("HostsFileResolver", func() {
})
})
})
func writeHostFile(tmpDir *TmpFolder) *TmpFile {
return tmpDir.CreateStringFile("hosts.txt",
"# Random comment",
"127.0.0.1 localhost",
"127.0.1.1 localhost2 localhost2.local.lan",
"::1 localhost",
"# Two empty lines to follow",
"",
"",
"faaf:faaf:faaf:faaf::1 ipv6host ipv6host.local.lan",
"192.168.2.1 ipv4host ipv4host.local.lan",
"10.0.0.1 router0 router1 router2",
"10.0.0.2 router3 # Another comment",
"10.0.0.3 # Invalid entry",
"300.300.300.300 invalid4 # Invalid IPv4",
"abcd:efgh:ijkl::1 invalid6 # Invalud IPv6")
}

View File

@ -6,11 +6,10 @@ import (
"errors"
"fmt"
"io"
"io/ioutil"
"os"
"path/filepath"
"time"
"github.com/0xERR0R/blocky/helpertest"
"github.com/0xERR0R/blocky/querylog"
"github.com/0xERR0R/blocky/config"
@ -43,15 +42,15 @@ var _ = Describe("QueryLoggingResolver", func() {
err error
resp *Response
m *MockResolver
tmpDir string
tmpDir *helpertest.TmpFolder
mockAnswer *dns.Msg
)
BeforeEach(func() {
mockAnswer = new(dns.Msg)
tmpDir, err = ioutil.TempDir("", "queryLoggingResolver")
Expect(err).Should(Succeed())
DeferCleanup(os.RemoveAll, tmpDir)
tmpDir = helpertest.NewTmpFolder("queryLoggingResolver")
Expect(tmpDir.Error).Should(Succeed())
DeferCleanup(tmpDir.Clean)
})
JustBeforeEach(func() {
@ -80,7 +79,7 @@ var _ = Describe("QueryLoggingResolver", func() {
When("Configuration with logging per client", func() {
BeforeEach(func() {
sutConfig = config.QueryLogConfig{
Target: tmpDir,
Target: tmpDir.Path,
Type: config.QueryLogTypeCsvClient,
CreationAttempts: 1,
CreationCooldown: config.Duration(time.Millisecond),
@ -103,7 +102,8 @@ var _ = Describe("QueryLoggingResolver", func() {
By("check log for client1", func() {
Eventually(func(g Gomega) {
csvLines, err := readCsv(filepath.Join(tmpDir, fmt.Sprintf("%s_client1.log", time.Now().Format("2006-01-02"))))
csvLines, err := readCsv(tmpDir.JoinPath(
fmt.Sprintf("%s_client1.log", time.Now().Format("2006-01-02"))))
g.Expect(err).Should(Succeed())
g.Expect(csvLines).Should(Not(BeEmpty()))
@ -118,7 +118,7 @@ var _ = Describe("QueryLoggingResolver", func() {
By("check log for client2", func() {
Eventually(func(g Gomega) {
csvLines, err := readCsv(filepath.Join(tmpDir,
csvLines, err := readCsv(tmpDir.JoinPath(
fmt.Sprintf("%s_cl_ient2_test.log", time.Now().Format("2006-01-02"))))
g.Expect(err).Should(Succeed())
@ -135,7 +135,7 @@ var _ = Describe("QueryLoggingResolver", func() {
When("Configuration with logging in one file for all clients", func() {
BeforeEach(func() {
sutConfig = config.QueryLogConfig{
Target: tmpDir,
Target: tmpDir.Path,
Type: config.QueryLogTypeCsv,
CreationAttempts: 1,
CreationCooldown: config.Duration(time.Millisecond),
@ -156,7 +156,8 @@ var _ = Describe("QueryLoggingResolver", func() {
By("check log", func() {
Eventually(func(g Gomega) {
csvLines, err := readCsv(filepath.Join(tmpDir, fmt.Sprintf("%s_ALL.log", time.Now().Format("2006-01-02"))))
csvLines, err := readCsv(tmpDir.JoinPath(
fmt.Sprintf("%s_ALL.log", time.Now().Format("2006-01-02"))))
g.Expect(err).Should(Succeed())
g.Expect(csvLines).Should(HaveLen(2))
@ -207,7 +208,7 @@ var _ = Describe("QueryLoggingResolver", func() {
When("resolver is enabled", func() {
BeforeEach(func() {
sutConfig = config.QueryLogConfig{
Target: tmpDir,
Target: tmpDir.Path,
Type: config.QueryLogTypeCsvClient,
LogRetentionDays: 0,
CreationAttempts: 1,
@ -238,7 +239,7 @@ var _ = Describe("QueryLoggingResolver", func() {
When("log directory contains old files", func() {
BeforeEach(func() {
sutConfig = config.QueryLogConfig{
Target: tmpDir,
Target: tmpDir.Path,
Type: config.QueryLogTypeCsv,
LogRetentionDays: 7,
CreationAttempts: 1,
@ -248,27 +249,22 @@ var _ = Describe("QueryLoggingResolver", func() {
It("should remove files older than defined log retention", func() {
// create 2 files, 7 and 8 days old
dateBefore7Days := time.Now().AddDate(0, 0, -7)
dateBefore8Days := time.Now().AddDate(0, 0, -8)
dateBefore9Days := time.Now().AddDate(0, 0, -9)
f1, err := os.Create(filepath.Join(tmpDir, fmt.Sprintf("%s-test.log", dateBefore7Days.Format("2006-01-02"))))
Expect(err).Should(Succeed())
f1Name := f1.Name()
f1.Close()
f1 := tmpDir.CreateEmptyFile(fmt.Sprintf("%s-test.log", dateBefore7Days.Format("2006-01-02")))
Expect(f1.Error).Should(Succeed())
f2, err := os.Create(filepath.Join(tmpDir, fmt.Sprintf("%s-test.log", dateBefore8Days.Format("2006-01-02"))))
Expect(err).Should(Succeed())
f2Name := f2.Name()
f2.Close()
f2 := tmpDir.CreateEmptyFile(fmt.Sprintf("%s-test.log", dateBefore9Days.Format("2006-01-02")))
Expect(f2.Error).Should(Succeed())
sut.doCleanUp()
Eventually(func(g Gomega) {
// file 1 exist
_, ierr1 := os.Stat(f1Name)
g.Expect(ierr1).Should(Succeed())
g.Expect(f1.Stat()).Should(Succeed())
// file 2 was deleted
_, ierr2 := os.Stat(f2Name)
ierr2 := f2.Stat()
g.Expect(ierr2).Should(HaveOccurred())
g.Expect(os.IsNotExist(ierr2)).Should(BeTrue())
}).Should(Succeed())

View File

@ -75,6 +75,28 @@ var _ = BeforeSuite(func() {
upstreamFritzbox = fritzboxMockUpstream.Start()
upstreamGoogle = googleMockUpstream.Start()
tmpDir := NewTmpFolder("server")
Expect(tmpDir.Error).Should(Succeed())
DeferCleanup(tmpDir.Clean)
certPem := writeCertPem(tmpDir)
Expect(certPem.Error).Should(Succeed())
keyPem := writeKeyPem(tmpDir)
Expect(keyPem.Error).Should(Succeed())
doubleclickFile := tmpDir.CreateStringFile("doubleclick.net.txt", "doubleclick.net", "doubleclick.net.cn")
Expect(doubleclickFile.Error).Should(Succeed())
bildFile := tmpDir.CreateStringFile("www.bild.de.txt", "www.bild.de")
Expect(bildFile.Error).Should(Succeed())
heiseFile := tmpDir.CreateStringFile("heise.de.txt", "heise.de")
Expect(heiseFile.Error).Should(Succeed())
youtubeFile := tmpDir.CreateStringFile("youtube.com.txt", "youtube.com")
Expect(youtubeFile.Error).Should(Succeed())
// create server
sut, err = NewServer(&config.Config{
CustomDNS: config.CustomDNSConfig{
@ -97,13 +119,13 @@ var _ = BeforeSuite(func() {
Blocking: config.BlockingConfig{
BlackLists: map[string][]string{
"ads": {
"../testdata/doubleclick.net.txt",
"../testdata/www.bild.de.txt",
"../testdata/heise.de.txt"},
"youtube": {"../testdata/youtube.com.txt"}},
doubleclickFile.Path,
bildFile.Path,
heiseFile.Path},
"youtube": {youtubeFile.Path}},
WhiteLists: map[string][]string{
"ads": {"../testdata/heise.de.txt"},
"whitelist": {"../testdata/heise.de.txt"},
"ads": {heiseFile.Path},
"whitelist": {heiseFile.Path},
},
ClientGroupsBlock: map[string][]string{
"default": {"ads"},
@ -123,8 +145,8 @@ var _ = BeforeSuite(func() {
DNSPorts: config.ListenConfig{"55555"},
TLSPorts: config.ListenConfig{"8853"},
CertFile: "../testdata/cert.pem",
KeyFile: "../testdata/key.pem",
CertFile: certPem.Path,
KeyFile: keyPem.Path,
HTTPPorts: config.ListenConfig{"4000"},
HTTPSPorts: config.ListenConfig{"4443"},
Prometheus: config.PrometheusConfig{
@ -707,3 +729,41 @@ func requestServer(request *dns.Msg) *dns.Msg {
return nil
}
func writeCertPem(tmpDir *TmpFolder) *TmpFile {
return tmpDir.CreateStringFile("cert.pem",
"-----BEGIN CERTIFICATE-----",
"MIICMzCCAZygAwIBAgIRAJCCrDTGEtZfRpxDY1KAoswwDQYJKoZIhvcNAQELBQAw",
"EjEQMA4GA1UEChMHQWNtZSBDbzAgFw03MDAxMDEwMDAwMDBaGA8yMDg0MDEyOTE2",
"MDAwMFowEjEQMA4GA1UEChMHQWNtZSBDbzCBnzANBgkqhkiG9w0BAQEFAAOBjQAw",
"gYkCgYEA4mEaF5yWYYrTfMgRXdBpgGnqsHIADQWlw7BIJWD/gNp+fgp4TUZ/7ggV",
"rrvRORvRFjw14avd9L9EFP7XLi8ViU3uoE1UWI32MlrKqLbGNCXyUIApIoqlbRg6",
"iErxIk5+ChzFuysQOx01S2yv/ML6dx7NOGHs1S38MUzRZtcXBH8CAwEAAaOBhjCB",
"gzAOBgNVHQ8BAf8EBAMCAqQwEwYDVR0lBAwwCgYIKwYBBQUHAwEwDwYDVR0TAQH/",
"BAUwAwEB/zAdBgNVHQ4EFgQUslNI6tYIv909RttHaZVMS/u/VYYwLAYDVR0RBCUw",
"I4IJbG9jYWxob3N0hwR/AAABhxAAAAAAAAAAAAAAAAAAAAABMA0GCSqGSIb3DQEB",
"CwUAA4GBAJ2gRpQHr5Qj7dt26bYVMdN4JGXTsvjbVrJfKI0VfPGJ+SUY/uTVBUeX",
"+Cwv4DFEPBlNx/lzuUkwmRaExC4/w81LWwxe5KltYsjyJuYowiUbLZ6tzLaQ9Bcx",
"jxClAVvgj90TGYOwsv6ESOX7GWteN1FlD3+jk7vefjFagaKKFYR9",
"-----END CERTIFICATE-----")
}
func writeKeyPem(tmpDir *TmpFolder) *TmpFile {
return tmpDir.CreateStringFile("key.pem",
"-----BEGIN PRIVATE KEY-----",
"MIICeAIBADANBgkqhkiG9w0BAQEFAASCAmIwggJeAgEAAoGBAOJhGheclmGK03zI",
"EV3QaYBp6rByAA0FpcOwSCVg/4Dafn4KeE1Gf+4IFa670Tkb0RY8NeGr3fS/RBT+",
"1y4vFYlN7qBNVFiN9jJayqi2xjQl8lCAKSKKpW0YOohK8SJOfgocxbsrEDsdNUts",
"r/zC+ncezThh7NUt/DFM0WbXFwR/AgMBAAECgYEA1exixstPhI+2+OTrHFc1S4dL",
"oz+ncqbSlZEBLGl0KWTQQfVM5+FmRR7Yto1/0lLKDBQL6t0J2x3fjWOhHmCaHKZA",
"VAvZ8+OKxwofih3hlO0tGCB8szUJygp2FAmd0rOUqvPQ+PTohZEUXyDaB8MOIbX+",
"qoo7g19+VlbyKqmM8HkCQQDs4GQJwEn7GXKllSMyOfiYnjQM2pwsqO0GivXkH+p3",
"+h5KDp4g3O4EbmbrvZyZB2euVsBjW3pFMu+xPXuOXf91AkEA9KfC7LGLD2OtLmrM",
"iCZAqHlame+uEEDduDmqjTPnNKUWVeRtYKMF5Hltbeo1jMXMSbVZ+fRWKfQ+HAhQ",
"xjFJowJAV6U7PqRoe0FSO1QwXrA2fHnk9nCY4qlqckZObyckAVqJhIteFPjKFNeo",
"u0dAPxsPUOGGc/zwA9Sx/ZmrMuUy1QJBALl7bqawO/Ng6G0mfwZBqgeQaYYHVnnw",
"E6iV353J2eHpvzNDSUFYlyEOhk4soIindSf0m9CK08Be8a+jBkocF+0CQQC+Hi7L",
"kZV1slpW82BxYIhs9Gb0OQgK8SsI4aQPTFGUarQXXAm4eRqBO0kaG+jGX6TtW353",
"EHK784GIxwVXKej/",
"-----END PRIVATE KEY-----")
}

14
testdata/cert.pem vendored
View File

@ -1,14 +0,0 @@
-----BEGIN CERTIFICATE-----
MIICMzCCAZygAwIBAgIRAJCCrDTGEtZfRpxDY1KAoswwDQYJKoZIhvcNAQELBQAw
EjEQMA4GA1UEChMHQWNtZSBDbzAgFw03MDAxMDEwMDAwMDBaGA8yMDg0MDEyOTE2
MDAwMFowEjEQMA4GA1UEChMHQWNtZSBDbzCBnzANBgkqhkiG9w0BAQEFAAOBjQAw
gYkCgYEA4mEaF5yWYYrTfMgRXdBpgGnqsHIADQWlw7BIJWD/gNp+fgp4TUZ/7ggV
rrvRORvRFjw14avd9L9EFP7XLi8ViU3uoE1UWI32MlrKqLbGNCXyUIApIoqlbRg6
iErxIk5+ChzFuysQOx01S2yv/ML6dx7NOGHs1S38MUzRZtcXBH8CAwEAAaOBhjCB
gzAOBgNVHQ8BAf8EBAMCAqQwEwYDVR0lBAwwCgYIKwYBBQUHAwEwDwYDVR0TAQH/
BAUwAwEB/zAdBgNVHQ4EFgQUslNI6tYIv909RttHaZVMS/u/VYYwLAYDVR0RBCUw
I4IJbG9jYWxob3N0hwR/AAABhxAAAAAAAAAAAAAAAAAAAAABMA0GCSqGSIb3DQEB
CwUAA4GBAJ2gRpQHr5Qj7dt26bYVMdN4JGXTsvjbVrJfKI0VfPGJ+SUY/uTVBUeX
+Cwv4DFEPBlNx/lzuUkwmRaExC4/w81LWwxe5KltYsjyJuYowiUbLZ6tzLaQ9Bcx
jxClAVvgj90TGYOwsv6ESOX7GWteN1FlD3+jk7vefjFagaKKFYR9
-----END CERTIFICATE-----

54
testdata/config.yml vendored
View File

@ -1,54 +0,0 @@
upstream:
default:
- tcp+udp:8.8.8.8
- tcp+udp:8.8.4.4
- 1.1.1.1
customDNS:
mapping:
my.duckdns.org: 192.168.178.3
multiple.ips: 192.168.178.3,192.168.178.4,2001:0db8:85a3:08d3:1319:8a2e:0370:7344
conditional:
mapping:
fritz.box: tcp+udp:192.168.178.1
multiple.resolvers: tcp+udp:192.168.178.1,tcp+udp:192.168.178.2
filtering:
queryTypes:
- AAAA
- A
blocking:
blackLists:
ads:
- https://s3.amazonaws.com/lists.disconnect.me/simple_ad.txt
- https://raw.githubusercontent.com/StevenBlack/hosts/master/hosts
- https://mirror1.malwaredomains.com/files/justdomains
- http://sysctl.org/cameleon/hosts
- https://zeustracker.abuse.ch/blocklist.php?download=domainblocklist
- https://s3.amazonaws.com/lists.disconnect.me/simple_tracking.txt
special:
- https://hosts-file.net/ad_servers.txt
whiteLists:
ads:
- whitelist.txt
clientGroupsBlock:
default:
- ads
- special
Laptop-D.fritz.box:
- ads
blockTTL: 1m
# without unit -> use minutes
refreshPeriod: 120
clientLookup:
upstream: 192.168.178.1
singleNameOrder:
- 2
- 1
queryLog:
type: csv-client
target: /opt/log
port: 55553,:55554,[::1]:55555
logLevel: debug
dohUserAgent: testBlocky
minTlsServeVersion: 1.3

View File

@ -1,18 +0,0 @@
upstream:
default:
- tcp+udp:8.8.8.8
- tcp+udp:8.8.4.4
- 1.1.1.1
customDNS:
mapping:
my.duckdns.org: 192.168.178.3
multiple.ips: 192.168.178.3,192.168.178.4,2001:0db8:85a3:08d3:1319:8a2e:0370:7344
conditional:
mapping:
fritz.box: tcp+udp:192.168.178.1
multiple.resolvers: tcp+udp:192.168.178.1,tcp+udp:192.168.178.2
filtering:
queryTypes:
- AAAA
- A

View File

@ -1,37 +0,0 @@
blocking:
blackLists:
ads:
- https://s3.amazonaws.com/lists.disconnect.me/simple_ad.txt
- https://raw.githubusercontent.com/StevenBlack/hosts/master/hosts
- https://mirror1.malwaredomains.com/files/justdomains
- http://sysctl.org/cameleon/hosts
- https://zeustracker.abuse.ch/blocklist.php?download=domainblocklist
- https://s3.amazonaws.com/lists.disconnect.me/simple_tracking.txt
special:
- https://hosts-file.net/ad_servers.txt
whiteLists:
ads:
- whitelist.txt
clientGroupsBlock:
default:
- ads
- special
Laptop-D.fritz.box:
- ads
blockTTL: 1m
# without unit -> use minutes
refreshPeriod: 120
clientLookup:
upstream: 192.168.178.1
singleNameOrder:
- 2
- 1
queryLog:
type: csv-client
target: /opt/log
port: 55553,:55554,[::1]:55555
logLevel: debug
dohUserAgent: testBlocky
minTlsServeVersion: 1.3

View File

@ -1,2 +0,0 @@
doubleclick.net
doubleclick.net.cn

View File

@ -1 +0,0 @@
heise.de

14
testdata/hosts.txt vendored
View File

@ -1,14 +0,0 @@
# Random comment
127.0.0.1 localhost
127.0.1.1 localhost2 localhost2.local.lan
::1 localhost
# Two empty lines to follow
faaf:faaf:faaf:faaf::1 ipv6host ipv6host.local.lan
192.168.2.1 ipv4host ipv4host.local.lan
10.0.0.1 router0 router1 router2
10.0.0.2 router3 # Another comment
10.0.0.3 # Invalid entry
300.300.300.300 invalid4 # Invalid IPv4
abcd:efgh:ijkl::1 invalid6 # Invalud IPv6

16
testdata/key.pem vendored
View File

@ -1,16 +0,0 @@
-----BEGIN PRIVATE KEY-----
MIICeAIBADANBgkqhkiG9w0BAQEFAASCAmIwggJeAgEAAoGBAOJhGheclmGK03zI
EV3QaYBp6rByAA0FpcOwSCVg/4Dafn4KeE1Gf+4IFa670Tkb0RY8NeGr3fS/RBT+
1y4vFYlN7qBNVFiN9jJayqi2xjQl8lCAKSKKpW0YOohK8SJOfgocxbsrEDsdNUts
r/zC+ncezThh7NUt/DFM0WbXFwR/AgMBAAECgYEA1exixstPhI+2+OTrHFc1S4dL
oz+ncqbSlZEBLGl0KWTQQfVM5+FmRR7Yto1/0lLKDBQL6t0J2x3fjWOhHmCaHKZA
VAvZ8+OKxwofih3hlO0tGCB8szUJygp2FAmd0rOUqvPQ+PTohZEUXyDaB8MOIbX+
qoo7g19+VlbyKqmM8HkCQQDs4GQJwEn7GXKllSMyOfiYnjQM2pwsqO0GivXkH+p3
+h5KDp4g3O4EbmbrvZyZB2euVsBjW3pFMu+xPXuOXf91AkEA9KfC7LGLD2OtLmrM
iCZAqHlame+uEEDduDmqjTPnNKUWVeRtYKMF5Hltbeo1jMXMSbVZ+fRWKfQ+HAhQ
xjFJowJAV6U7PqRoe0FSO1QwXrA2fHnk9nCY4qlqckZObyckAVqJhIteFPjKFNeo
u0dAPxsPUOGGc/zwA9Sx/ZmrMuUy1QJBALl7bqawO/Ng6G0mfwZBqgeQaYYHVnnw
E6iV353J2eHpvzNDSUFYlyEOhk4soIindSf0m9CK08Be8a+jBkocF+0CQQC+Hi7L
kZV1slpW82BxYIhs9Gb0OQgK8SsI4aQPTFGUarQXXAm4eRqBO0kaG+jGX6TtW353
EHK784GIxwVXKej/
-----END PRIVATE KEY-----

View File

@ -1 +0,0 @@
www.bild.de

View File

@ -1 +0,0 @@
youtube.com