diff --git a/cache/expirationcache/expiration_cache_test.go b/cache/expirationcache/expiration_cache_test.go index f8313dff..3e3b8b0c 100644 --- a/cache/expirationcache/expiration_cache_test.go +++ b/cache/expirationcache/expiration_cache_test.go @@ -123,15 +123,15 @@ var _ = Describe("Expiration cache", func() { Expect(val).Should(BeNil()) Expect(onCacheMissChannel).Should(Receive(Equal("notExists"))) - Expect(onCacheHitChannel).Should(Not(Receive())) - Expect(onAfterPutChannel).Should(Not(Receive())) + Expect(onCacheHitChannel).ShouldNot(Receive()) + Expect(onAfterPutChannel).ShouldNot(Receive()) }) By("Put new cache entry", func() { v1 := "v1" cache.Put("key1", &v1, time.Second) - Expect(onCacheMissChannel).Should(Not(Receive())) - Expect(onCacheMissChannel).Should(Not(Receive())) + Expect(onCacheMissChannel).ShouldNot(Receive()) + Expect(onCacheMissChannel).ShouldNot(Receive()) Expect(onAfterPutChannel).Should(Receive(Equal(1))) }) @@ -139,9 +139,9 @@ var _ = Describe("Expiration cache", func() { val, _ := cache.Get("key1") Expect(val).Should(HaveValue(Equal("v1"))) - Expect(onCacheMissChannel).Should(Not(Receive())) + Expect(onCacheMissChannel).ShouldNot(Receive()) Expect(onCacheHitChannel).Should(Receive(Equal("key1"))) - Expect(onAfterPutChannel).Should(Not(Receive())) + Expect(onAfterPutChannel).ShouldNot(Receive()) }) }) }) diff --git a/cache/expirationcache/prefetching_cache_test.go b/cache/expirationcache/prefetching_cache_test.go index fb94c62f..ad8e2eff 100644 --- a/cache/expirationcache/prefetching_cache_test.go +++ b/cache/expirationcache/prefetching_cache_test.go @@ -155,17 +155,17 @@ var _ = Describe("Prefetching expiration cache", func() { By("put a value", func() { v := "v1" cache.Put("key1", &v, 50*time.Millisecond) - Expect(onPrefetchAfterPutChannel).Should(Not(Receive())) - Expect(onPrefetchEntryReloaded).Should(Not(Receive())) - Expect(onnPrefetchCacheHit).Should(Not(Receive())) + Expect(onPrefetchAfterPutChannel).ShouldNot(Receive()) + Expect(onPrefetchEntryReloaded).ShouldNot(Receive()) + Expect(onnPrefetchCacheHit).ShouldNot(Receive()) }) By("get a value 3 times to trigger prefetching", func() { // first get cache.Get("key1") Expect(onPrefetchAfterPutChannel).Should(Receive(Equal(1))) - Expect(onnPrefetchCacheHit).Should(Not(Receive())) - Expect(onPrefetchEntryReloaded).Should(Not(Receive())) + Expect(onnPrefetchCacheHit).ShouldNot(Receive()) + Expect(onPrefetchEntryReloaded).ShouldNot(Receive()) // secont get val, _ := cache.Get("key1") @@ -176,7 +176,7 @@ var _ = Describe("Prefetching expiration cache", func() { // reload was executed Eventually(onPrefetchEntryReloaded).Should(Receive(Equal("key1"))) - Expect(onnPrefetchCacheHit).Should(Not(Receive())) + Expect(onnPrefetchCacheHit).ShouldNot(Receive()) // has new value Eventually(func(g Gomega) { val, _ := cache.Get("key1") diff --git a/config/config_test.go b/config/config_test.go index e706e999..c2aba5a8 100644 --- a/config/config_test.go +++ b/config/config_test.go @@ -165,7 +165,7 @@ var _ = Describe("Config", func() { When("Test file does not exist", func() { It("should fail", func() { _, err := LoadConfig(tmpDir.JoinPath("config-does-not-exist.yaml"), true) - Expect(err).Should(Not(Succeed())) + Expect(err).ShouldNot(Succeed()) }) }) When("Multiple config files are used", func() { @@ -199,7 +199,7 @@ var _ = Describe("Config", func() { When("Config folder does not exist", func() { It("should fail", func() { _, err := LoadConfig(tmpDir.JoinPath("does-not-exist-config/"), true) - Expect(err).Should(Not(Succeed())) + Expect(err).ShouldNot(Succeed()) }) }) When("config file is malformed", func() { diff --git a/e2e/basic_test.go b/e2e/basic_test.go index 1ec7876c..e080cf3f 100644 --- a/e2e/basic_test.go +++ b/e2e/basic_test.go @@ -181,8 +181,8 @@ var _ = Describe("Basic functional tests", func() { HaveTTL(BeNumerically("<=", 123)), )) - Expect(getContainerLogs(ctx, blocky)).Should(Not(ContainElement(ContainSubstring("google.com")))) - Expect(getContainerLogs(ctx, blocky)).Should(Not(ContainElement(ContainSubstring("1.2.3.4")))) + Expect(getContainerLogs(ctx, blocky)).ShouldNot(ContainElement(ContainSubstring("google.com"))) + Expect(getContainerLogs(ctx, blocky)).ShouldNot(ContainElement(ContainSubstring("1.2.3.4"))) }) }) }) diff --git a/e2e/querylog_test.go b/e2e/querylog_test.go index af4c5756..c69ed31e 100644 --- a/e2e/querylog_test.go +++ b/e2e/querylog_test.go @@ -59,7 +59,7 @@ var _ = Describe("Query logs functional tests", func() { // database might be slow on first start, retry here if necessary Eventually(gorm.Open, "10s", "1s"). - WithArguments(mysqlDriver.Open(connectionString), &gorm.Config{}).Should(Not(BeNil())) + WithArguments(mysqlDriver.Open(connectionString), &gorm.Config{}).ShouldNot(BeNil()) db, err = gorm.Open(mysqlDriver.Open(connectionString), &gorm.Config{}) Expect(err).Should(Succeed()) @@ -70,9 +70,9 @@ var _ = Describe("Query logs functional tests", func() { It("Should store query log in the mariaDB database", func(ctx context.Context) { By("Performing 2 queries", func() { Expect(doDNSRequest(ctx, blocky, - util.NewMsgWithQuestion("google.de.", dns.Type(dns.TypeA)))).Should(Not(BeNil())) + util.NewMsgWithQuestion("google.de.", dns.Type(dns.TypeA)))).ShouldNot(BeNil()) Expect(doDNSRequest(ctx, blocky, - util.NewMsgWithQuestion("unknown.domain.", dns.Type(dns.TypeA)))).Should(Not(BeNil())) + util.NewMsgWithQuestion("unknown.domain.", dns.Type(dns.TypeA)))).ShouldNot(BeNil()) }) By("check entries count asynchronously, since blocky flushes log entries in bulk", func() { @@ -136,7 +136,7 @@ var _ = Describe("Query logs functional tests", func() { // database might be slow on first start, retry here if necessary Eventually(gorm.Open, "10s", "1s"). - WithArguments(postgresDriver.Open(connectionString), &gorm.Config{}).Should(Not(BeNil())) + WithArguments(postgresDriver.Open(connectionString), &gorm.Config{}).ShouldNot(BeNil()) db, err = gorm.Open(postgresDriver.Open(connectionString), &gorm.Config{}) Expect(err).Should(Succeed()) @@ -147,8 +147,8 @@ var _ = Describe("Query logs functional tests", func() { msg := util.NewMsgWithQuestion("google.de.", dns.Type(dns.TypeA)) It("Should store query log in the postgres database", func(ctx context.Context) { By("Performing 2 queries", func() { - Expect(doDNSRequest(ctx, blocky, msg)).Should(Not(BeNil())) - Expect(doDNSRequest(ctx, blocky, msg)).Should(Not(BeNil())) + Expect(doDNSRequest(ctx, blocky, msg)).ShouldNot(BeNil()) + Expect(doDNSRequest(ctx, blocky, msg)).ShouldNot(BeNil()) }) By("check entries count asynchronously, since blocky flushes log entries in bulk", func() { diff --git a/lists/downloader_test.go b/lists/downloader_test.go index 25a8f2f2..f7879102 100644 --- a/lists/downloader_test.go +++ b/lists/downloader_test.go @@ -85,7 +85,7 @@ var _ = Describe("Downloader", func() { reader, err := sut.DownloadFile(ctx, server.URL) Expect(err).Should(Succeed()) - Expect(reader).Should(Not(BeNil())) + Expect(reader).ShouldNot(BeNil()) DeferCleanup(reader.Close) buf := new(strings.Builder) _, err = io.Copy(buf, reader) @@ -153,7 +153,7 @@ var _ = Describe("Downloader", func() { It("Should perform a retry and return file content", func(ctx context.Context) { reader, err := sut.DownloadFile(ctx, server.URL) Expect(err).Should(Succeed()) - Expect(reader).Should(Not(BeNil())) + Expect(reader).ShouldNot(BeNil()) DeferCleanup(reader.Close) buf := new(strings.Builder) diff --git a/resolver/parallel_best_resolver_test.go b/resolver/parallel_best_resolver_test.go index c08b5b33..29ee05ca 100644 --- a/resolver/parallel_best_resolver_test.go +++ b/resolver/parallel_best_resolver_test.go @@ -120,7 +120,7 @@ var _ = Describe("ParallelBestResolver", Label("parallelBestResolver"), func() { sutVerify = noVerifyUpstreams }) It("should start", func() { - Expect(err).Should(Not(HaveOccurred())) + Expect(err).ShouldNot(HaveOccurred()) }) }) }) diff --git a/resolver/query_logging_resolver_test.go b/resolver/query_logging_resolver_test.go index 40e326e5..428fd8f3 100644 --- a/resolver/query_logging_resolver_test.go +++ b/resolver/query_logging_resolver_test.go @@ -148,7 +148,7 @@ var _ = Describe("QueryLoggingResolver", func() { fmt.Sprintf("%s_client1.log", time.Now().Format("2006-01-02")))) g.Expect(err).Should(Succeed()) - g.Expect(csvLines).Should(Not(BeEmpty())) + g.Expect(csvLines).ShouldNot(BeEmpty()) g.Expect(csvLines[0][1]).Should(Equal("192.168.178.25")) g.Expect(csvLines[0][2]).Should(Equal("client1")) g.Expect(csvLines[0][4]).Should(Equal("reason")) diff --git a/resolver/resolver_test.go b/resolver/resolver_test.go index 840435a8..5f8793e0 100644 --- a/resolver/resolver_test.go +++ b/resolver/resolver_test.go @@ -61,7 +61,7 @@ var _ = Describe("Resolver", func() { ch := Chain(&CustomDNSResolver{}, &BlockingResolver{}) _, err := GetFromChainWithType[*FilteringResolver](ch) - Expect(err).Should(Not(Succeed())) + Expect(err).ShouldNot(Succeed()) }) }) diff --git a/resolver/strict_resolver_test.go b/resolver/strict_resolver_test.go index b974044f..0ad0e9dc 100644 --- a/resolver/strict_resolver_test.go +++ b/resolver/strict_resolver_test.go @@ -112,7 +112,7 @@ var _ = Describe("StrictResolver", Label("strictResolver"), func() { }) It("should start normally", func() { - Expect(err).Should(Not(HaveOccurred())) + Expect(err).ShouldNot(HaveOccurred()) }) }) @@ -138,7 +138,7 @@ var _ = Describe("StrictResolver", Label("strictResolver"), func() { sutVerify = noVerifyUpstreams }) It("should start", func() { - Expect(err).Should(Not(HaveOccurred())) + Expect(err).ShouldNot(HaveOccurred()) }) }) })