From ec3c604a83a0dd52b6fa4beb7d14d11619eb1252 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fr=C3=A9d=C3=A9ric=20Guillot?= Date: Sun, 21 Feb 2021 13:42:49 -0800 Subject: [PATCH] Add option to allow self-signed or invalid certificates --- api/subscription.go | 1 + cli/cli.go | 4 +- client/{core.go => model.go} | 103 +++++++-------- database/migrations.go | 7 ++ http/client/client.go | 15 ++- locale/translations/de_DE.json | 1 + locale/translations/en_US.json | 1 + locale/translations/es_ES.json | 1 + locale/translations/fr_FR.json | 1 + locale/translations/it_IT.json | 1 + locale/translations/ja_JP.json | 1 + locale/translations/nl_NL.json | 1 + locale/translations/pl_PL.json | 1 + locale/translations/pt_BR.json | 1 + locale/translations/ru_RU.json | 1 + locale/translations/zh_CN.json | 1 + model/feed.go | 117 ++++++++++-------- model/subscription.go | 11 +- reader/handler/handler.go | 23 +++- reader/icon/finder.go | 9 +- reader/processor/processor.go | 17 ++- reader/scraper/scraper.go | 7 +- reader/subscription/finder.go | 3 +- storage/feed.go | 10 +- storage/feed_query_builder.go | 2 + .../templates/views/add_subscription.html | 2 + .../templates/views/choose_subscription.html | 3 + template/templates/views/edit_feed.html | 1 + tests/feed_test.go | 56 +++++++++ ui/entry_scraper.go | 26 +++- ui/feed_edit.go | 31 ++--- ui/form/feed.go | 63 +++++----- ui/form/subscription.go | 46 +++---- ui/subscription_choose.go | 23 ++-- ui/subscription_submit.go | 24 ++-- 35 files changed, 388 insertions(+), 227 deletions(-) rename client/{core.go => model.go} (62%) diff --git a/api/subscription.go b/api/subscription.go index 514671fa..246dde7f 100644 --- a/api/subscription.go +++ b/api/subscription.go @@ -32,6 +32,7 @@ func (h *handler) discoverSubscriptions(w http.ResponseWriter, r *http.Request) subscriptionDiscoveryRequest.Username, subscriptionDiscoveryRequest.Password, subscriptionDiscoveryRequest.FetchViaProxy, + subscriptionDiscoveryRequest.AllowSelfSignedCertificates, ) if finderErr != nil { json.ServerError(w, r, finderErr) diff --git a/cli/cli.go b/cli/cli.go index 826f0f42..084765c6 100644 --- a/cli/cli.go +++ b/cli/cli.go @@ -145,7 +145,9 @@ func Parse() { } if flagMigrate { - database.Migrate(db) + if err := database.Migrate(db); err != nil { + logger.Fatal(`%v`, err) + } return } diff --git a/client/core.go b/client/model.go similarity index 62% rename from client/core.go rename to client/model.go index 02475490..938b13c6 100644 --- a/client/core.go +++ b/client/model.go @@ -100,64 +100,67 @@ type Subscriptions []*Subscription // Feed represents a Miniflux feed. type Feed struct { - ID int64 `json:"id"` - UserID int64 `json:"user_id"` - FeedURL string `json:"feed_url"` - SiteURL string `json:"site_url"` - Title string `json:"title"` - CheckedAt time.Time `json:"checked_at,omitempty"` - EtagHeader string `json:"etag_header,omitempty"` - LastModifiedHeader string `json:"last_modified_header,omitempty"` - ParsingErrorMsg string `json:"parsing_error_message,omitempty"` - ParsingErrorCount int `json:"parsing_error_count,omitempty"` - Disabled bool `json:"disabled"` - IgnoreHTTPCache bool `json:"ignore_http_cache"` - FetchViaProxy bool `json:"fetch_via_proxy"` - ScraperRules string `json:"scraper_rules"` - RewriteRules string `json:"rewrite_rules"` - BlocklistRules string `json:"blocklist_rules"` - KeeplistRules string `json:"keeplist_rules"` - Crawler bool `json:"crawler"` - UserAgent string `json:"user_agent"` - Username string `json:"username"` - Password string `json:"password"` - Category *Category `json:"category,omitempty"` + ID int64 `json:"id"` + UserID int64 `json:"user_id"` + FeedURL string `json:"feed_url"` + SiteURL string `json:"site_url"` + Title string `json:"title"` + CheckedAt time.Time `json:"checked_at,omitempty"` + EtagHeader string `json:"etag_header,omitempty"` + LastModifiedHeader string `json:"last_modified_header,omitempty"` + ParsingErrorMsg string `json:"parsing_error_message,omitempty"` + ParsingErrorCount int `json:"parsing_error_count,omitempty"` + Disabled bool `json:"disabled"` + IgnoreHTTPCache bool `json:"ignore_http_cache"` + AllowSelfSignedCertificates bool `json:"allow_self_signed_certificates"` + FetchViaProxy bool `json:"fetch_via_proxy"` + ScraperRules string `json:"scraper_rules"` + RewriteRules string `json:"rewrite_rules"` + BlocklistRules string `json:"blocklist_rules"` + KeeplistRules string `json:"keeplist_rules"` + Crawler bool `json:"crawler"` + UserAgent string `json:"user_agent"` + Username string `json:"username"` + Password string `json:"password"` + Category *Category `json:"category,omitempty"` } // FeedCreationRequest represents the request to create a feed. type FeedCreationRequest struct { - FeedURL string `json:"feed_url"` - CategoryID int64 `json:"category_id"` - UserAgent string `json:"user_agent"` - Username string `json:"username"` - Password string `json:"password"` - Crawler bool `json:"crawler"` - Disabled bool `json:"disabled"` - IgnoreHTTPCache bool `json:"ignore_http_cache"` - FetchViaProxy bool `json:"fetch_via_proxy"` - ScraperRules string `json:"scraper_rules"` - RewriteRules string `json:"rewrite_rules"` - BlocklistRules string `json:"blocklist_rules"` - KeeplistRules string `json:"keeplist_rules"` + FeedURL string `json:"feed_url"` + CategoryID int64 `json:"category_id"` + UserAgent string `json:"user_agent"` + Username string `json:"username"` + Password string `json:"password"` + Crawler bool `json:"crawler"` + Disabled bool `json:"disabled"` + IgnoreHTTPCache bool `json:"ignore_http_cache"` + AllowSelfSignedCertificates bool `json:"allow_self_signed_certificates"` + FetchViaProxy bool `json:"fetch_via_proxy"` + ScraperRules string `json:"scraper_rules"` + RewriteRules string `json:"rewrite_rules"` + BlocklistRules string `json:"blocklist_rules"` + KeeplistRules string `json:"keeplist_rules"` } // FeedModificationRequest represents the request to update a feed. type FeedModificationRequest struct { - FeedURL *string `json:"feed_url"` - SiteURL *string `json:"site_url"` - Title *string `json:"title"` - ScraperRules *string `json:"scraper_rules"` - RewriteRules *string `json:"rewrite_rules"` - BlocklistRules *string `json:"blocklist_rules"` - KeeplistRules *string `json:"keeplist_rules"` - Crawler *bool `json:"crawler"` - UserAgent *string `json:"user_agent"` - Username *string `json:"username"` - Password *string `json:"password"` - CategoryID *int64 `json:"category_id"` - Disabled *bool `json:"disabled"` - IgnoreHTTPCache *bool `json:"ignore_http_cache"` - FetchViaProxy *bool `json:"fetch_via_proxy"` + FeedURL *string `json:"feed_url"` + SiteURL *string `json:"site_url"` + Title *string `json:"title"` + ScraperRules *string `json:"scraper_rules"` + RewriteRules *string `json:"rewrite_rules"` + BlocklistRules *string `json:"blocklist_rules"` + KeeplistRules *string `json:"keeplist_rules"` + Crawler *bool `json:"crawler"` + UserAgent *string `json:"user_agent"` + Username *string `json:"username"` + Password *string `json:"password"` + CategoryID *int64 `json:"category_id"` + Disabled *bool `json:"disabled"` + IgnoreHTTPCache *bool `json:"ignore_http_cache"` + AllowSelfSignedCertificates *bool `json:"allow_self_signed_certificates"` + FetchViaProxy *bool `json:"fetch_via_proxy"` } // FeedIcon represents the feed icon. diff --git a/database/migrations.go b/database/migrations.go index 7ddd39a2..2f108874 100644 --- a/database/migrations.go +++ b/database/migrations.go @@ -10,6 +10,7 @@ import ( var schemaVersion = len(migrations) +// Order is important. Add new migrations at the end of the list. var migrations = []func(tx *sql.Tx) error{ func(tx *sql.Tx) (err error) { sql := ` @@ -514,4 +515,10 @@ var migrations = []func(tx *sql.Tx) error{ `) return err }, + func(tx *sql.Tx) (err error) { + _, err = tx.Exec(` + ALTER TABLE feeds ADD COLUMN allow_self_signed_certificates boolean not null default false + `) + return err + }, } diff --git a/http/client/client.go b/http/client/client.go index 33f7bd4a..8b3b169a 100644 --- a/http/client/client.go +++ b/http/client/client.go @@ -6,6 +6,7 @@ package client // import "miniflux.app/http/client" import ( "bytes" + "crypto/tls" "crypto/x509" "encoding/json" "fmt" @@ -50,9 +51,10 @@ type Client struct { useProxy bool doNotFollowRedirects bool - ClientTimeout int - ClientMaxBodySize int64 - ClientProxyURL string + ClientTimeout int + ClientMaxBodySize int64 + ClientProxyURL string + AllowSelfSignedCertificates bool } // New initializes a new HTTP client. @@ -87,13 +89,14 @@ func (c *Client) String() string { } return fmt.Sprintf( - `InputURL=%q RequestURL=%q ETag=%s LastModified=%s Auth=%v UserAgent=%q`, + `InputURL=%q RequestURL=%q ETag=%s LastMod=%s Auth=%v UserAgent=%q Verify=%v`, c.inputURL, c.requestURL, etagHeader, lastModifiedHeader, c.requestAuthorizationHeader != "" || (c.requestUsername != "" && c.requestPassword != ""), c.requestUserAgent, + !c.AllowSelfSignedCertificates, ) } @@ -288,6 +291,10 @@ func (c *Client) buildClient() http.Client { IdleConnTimeout: 10 * time.Second, } + if c.AllowSelfSignedCertificates { + transport.TLSClientConfig = &tls.Config{InsecureSkipVerify: true} + } + if c.doNotFollowRedirects { client.CheckRedirect = func(req *http.Request, via []*http.Request) error { return http.ErrUseLastResponse diff --git a/locale/translations/de_DE.json b/locale/translations/de_DE.json index f630eaf8..478a66ba 100644 --- a/locale/translations/de_DE.json +++ b/locale/translations/de_DE.json @@ -267,6 +267,7 @@ "form.feed.label.blocklist_rules": "Blockierregeln", "form.feed.label.keeplist_rules": "Erlaubnisregeln", "form.feed.label.ignore_http_cache": "Ignoriere HTTP-cache", + "form.feed.label.allow_self_signed_certificates": "Erlaube selbstsignierte oder ungültige Zertifikate", "form.feed.label.fetch_via_proxy": "Über Proxy abrufen", "form.feed.label.disabled": "Dieses Abonnement nicht aktualisieren", "form.category.label.title": "Titel", diff --git a/locale/translations/en_US.json b/locale/translations/en_US.json index 4ca9d050..75e7e1b0 100644 --- a/locale/translations/en_US.json +++ b/locale/translations/en_US.json @@ -267,6 +267,7 @@ "form.feed.label.blocklist_rules": "Block Rules", "form.feed.label.keeplist_rules": "Keep Rules", "form.feed.label.ignore_http_cache": "Ignore HTTP cache", + "form.feed.label.allow_self_signed_certificates": "Allow self-signed or invalid certificates", "form.feed.label.fetch_via_proxy": "Fetch via proxy", "form.feed.label.disabled": "Do not refresh this feed", "form.category.label.title": "Title", diff --git a/locale/translations/es_ES.json b/locale/translations/es_ES.json index 9d4c75a3..ab2f5e1d 100644 --- a/locale/translations/es_ES.json +++ b/locale/translations/es_ES.json @@ -267,6 +267,7 @@ "form.feed.label.blocklist_rules": "Reglas de Filtrado(Bloquear)", "form.feed.label.keeplist_rules": "Reglas de Filtrado(Permitir)", "form.feed.label.ignore_http_cache": "Ignorar caché HTTP", + "form.feed.label.allow_self_signed_certificates": "Permitir certificados autofirmados o no válidos", "form.feed.label.fetch_via_proxy": "Buscar a través de proxy", "form.feed.label.disabled": "No actualice este feed", "form.category.label.title": "Título", diff --git a/locale/translations/fr_FR.json b/locale/translations/fr_FR.json index ed7d5a00..708f9d8f 100644 --- a/locale/translations/fr_FR.json +++ b/locale/translations/fr_FR.json @@ -267,6 +267,7 @@ "form.feed.label.blocklist_rules": "Règles de blocage", "form.feed.label.keeplist_rules": "Règles d'autorisation", "form.feed.label.ignore_http_cache": "Ignorer le cache HTTP", + "form.feed.label.allow_self_signed_certificates": "Autoriser les certificats auto-signés ou non valides", "form.feed.label.fetch_via_proxy": "Récupérer via proxy", "form.feed.label.disabled": "Ne pas actualiser ce flux", "form.category.label.title": "Titre", diff --git a/locale/translations/it_IT.json b/locale/translations/it_IT.json index 06628b53..f857d563 100644 --- a/locale/translations/it_IT.json +++ b/locale/translations/it_IT.json @@ -267,6 +267,7 @@ "form.feed.label.blocklist_rules": "Regole di blocco", "form.feed.label.keeplist_rules": "Regole di autorizzazione", "form.feed.label.ignore_http_cache": "Ignora cache HTTP", + "form.feed.label.allow_self_signed_certificates": "Consenti certificati autofirmati o non validi", "form.feed.label.fetch_via_proxy": "Recuperare tramite proxy", "form.feed.label.disabled": "Non aggiornare questo feed", "form.category.label.title": "Titolo", diff --git a/locale/translations/ja_JP.json b/locale/translations/ja_JP.json index 5407fb47..4ac53262 100644 --- a/locale/translations/ja_JP.json +++ b/locale/translations/ja_JP.json @@ -267,6 +267,7 @@ "form.feed.label.blocklist_rules": "ブロックルール", "form.feed.label.keeplist_rules": "許可規則", "form.feed.label.ignore_http_cache": "HTTPキャッシュを無視", + "form.feed.label.allow_self_signed_certificates": "自己署名証明書または無効な証明書を許可する", "form.feed.label.fetch_via_proxy": "プロキシ経由でフェッチ", "form.feed.label.disabled": "このフィードを更新しない", "form.category.label.title": "タイトル", diff --git a/locale/translations/nl_NL.json b/locale/translations/nl_NL.json index f0d7b9ee..fdc86657 100644 --- a/locale/translations/nl_NL.json +++ b/locale/translations/nl_NL.json @@ -267,6 +267,7 @@ "form.feed.label.blocklist_rules": "Blokkeer regels", "form.feed.label.keeplist_rules": "toestemmingsregels", "form.feed.label.ignore_http_cache": "Negeer HTTP-cache", + "form.feed.label.allow_self_signed_certificates": "Sta zelfondertekende of ongeldige certificaten toe", "form.feed.label.fetch_via_proxy": "Ophalen via proxy", "form.feed.label.disabled": "Vernieuw deze feed niet", "form.category.label.title": "Naam", diff --git a/locale/translations/pl_PL.json b/locale/translations/pl_PL.json index d9ee57f0..71f00fbf 100644 --- a/locale/translations/pl_PL.json +++ b/locale/translations/pl_PL.json @@ -269,6 +269,7 @@ "form.feed.label.blocklist_rules": "Zasady blokowania", "form.feed.label.keeplist_rules": "Zasady zezwoleń", "form.feed.label.ignore_http_cache": "Zignoruj ​​pamięć podręczną HTTP", + "form.feed.label.allow_self_signed_certificates": "Zezwalaj na certyfikaty z podpisem własnym lub nieprawidłowe certyfikaty", "form.feed.label.fetch_via_proxy": "Pobierz przez proxy", "form.feed.label.disabled": "Не обновлять этот канал", "form.category.label.title": "Tytuł", diff --git a/locale/translations/pt_BR.json b/locale/translations/pt_BR.json index 8c289dd1..35bda31b 100644 --- a/locale/translations/pt_BR.json +++ b/locale/translations/pt_BR.json @@ -267,6 +267,7 @@ "form.feed.label.blocklist_rules": "Regras de bloqueio", "form.feed.label.keeplist_rules": "Regras de permissão", "form.feed.label.ignore_http_cache": "Ignorar cache HTTP", + "form.feed.label.allow_self_signed_certificates": "Permitir certificados autoassinados ou inválidos", "form.feed.label.disabled": "Não atualizar esta fonte", "form.feed.label.fetch_via_proxy": "Buscar via proxy", "form.category.label.title": "Título", diff --git a/locale/translations/ru_RU.json b/locale/translations/ru_RU.json index ff598c2c..770f3bee 100644 --- a/locale/translations/ru_RU.json +++ b/locale/translations/ru_RU.json @@ -269,6 +269,7 @@ "form.feed.label.blocklist_rules": "Правила блокировки", "form.feed.label.keeplist_rules": "правила разрешений", "form.feed.label.ignore_http_cache": "Игнорировать HTTP-кеш", + "form.feed.label.allow_self_signed_certificates": "Разрешить самоподписанные или недействительные сертификаты", "form.feed.label.fetch_via_proxy": "Получить через прокси", "form.feed.label.disabled": "Не обновлять этот канал", "form.category.label.title": "Название", diff --git a/locale/translations/zh_CN.json b/locale/translations/zh_CN.json index 2b05f89f..29ee1e51 100644 --- a/locale/translations/zh_CN.json +++ b/locale/translations/zh_CN.json @@ -265,6 +265,7 @@ "form.feed.label.blocklist_rules": "封锁规则", "form.feed.label.keeplist_rules": "许可规则", "form.feed.label.ignore_http_cache": "忽略HTTP缓存", + "form.feed.label.allow_self_signed_certificates": "允许自签名或无效的证书", "form.feed.label.fetch_via_proxy": "通过代理获取", "form.feed.label.disabled": "请勿刷新此Feed", "form.category.label.title": "标题", diff --git a/model/feed.go b/model/feed.go index 5bcddfd7..5f3b622c 100644 --- a/model/feed.go +++ b/model/feed.go @@ -24,33 +24,34 @@ const ( // Feed represents a feed in the application. type Feed struct { - ID int64 `json:"id"` - UserID int64 `json:"user_id"` - FeedURL string `json:"feed_url"` - SiteURL string `json:"site_url"` - Title string `json:"title"` - CheckedAt time.Time `json:"checked_at"` - NextCheckAt time.Time `json:"next_check_at"` - EtagHeader string `json:"etag_header"` - LastModifiedHeader string `json:"last_modified_header"` - ParsingErrorMsg string `json:"parsing_error_message"` - ParsingErrorCount int `json:"parsing_error_count"` - ScraperRules string `json:"scraper_rules"` - RewriteRules string `json:"rewrite_rules"` - Crawler bool `json:"crawler"` - BlocklistRules string `json:"blocklist_rules"` - KeeplistRules string `json:"keeplist_rules"` - UserAgent string `json:"user_agent"` - Username string `json:"username"` - Password string `json:"password"` - Disabled bool `json:"disabled"` - IgnoreHTTPCache bool `json:"ignore_http_cache"` - FetchViaProxy bool `json:"fetch_via_proxy"` - Category *Category `json:"category,omitempty"` - Entries Entries `json:"entries,omitempty"` - Icon *FeedIcon `json:"icon"` - UnreadCount int `json:"-"` - ReadCount int `json:"-"` + ID int64 `json:"id"` + UserID int64 `json:"user_id"` + FeedURL string `json:"feed_url"` + SiteURL string `json:"site_url"` + Title string `json:"title"` + CheckedAt time.Time `json:"checked_at"` + NextCheckAt time.Time `json:"next_check_at"` + EtagHeader string `json:"etag_header"` + LastModifiedHeader string `json:"last_modified_header"` + ParsingErrorMsg string `json:"parsing_error_message"` + ParsingErrorCount int `json:"parsing_error_count"` + ScraperRules string `json:"scraper_rules"` + RewriteRules string `json:"rewrite_rules"` + Crawler bool `json:"crawler"` + BlocklistRules string `json:"blocklist_rules"` + KeeplistRules string `json:"keeplist_rules"` + UserAgent string `json:"user_agent"` + Username string `json:"username"` + Password string `json:"password"` + Disabled bool `json:"disabled"` + IgnoreHTTPCache bool `json:"ignore_http_cache"` + AllowSelfSignedCertificates bool `json:"allow_self_signed_certificates"` + FetchViaProxy bool `json:"fetch_via_proxy"` + Category *Category `json:"category,omitempty"` + Entries Entries `json:"entries,omitempty"` + Icon *FeedIcon `json:"icon"` + UnreadCount int `json:"-"` + ReadCount int `json:"-"` } func (f *Feed) String() string { @@ -117,38 +118,40 @@ func (f *Feed) ScheduleNextCheck(weeklyCount int) { // FeedCreationRequest represents the request to create a feed. type FeedCreationRequest struct { - FeedURL string `json:"feed_url"` - CategoryID int64 `json:"category_id"` - UserAgent string `json:"user_agent"` - Username string `json:"username"` - Password string `json:"password"` - Crawler bool `json:"crawler"` - Disabled bool `json:"disabled"` - IgnoreHTTPCache bool `json:"ignore_http_cache"` - FetchViaProxy bool `json:"fetch_via_proxy"` - ScraperRules string `json:"scraper_rules"` - RewriteRules string `json:"rewrite_rules"` - BlocklistRules string `json:"blocklist_rules"` - KeeplistRules string `json:"keeplist_rules"` + FeedURL string `json:"feed_url"` + CategoryID int64 `json:"category_id"` + UserAgent string `json:"user_agent"` + Username string `json:"username"` + Password string `json:"password"` + Crawler bool `json:"crawler"` + Disabled bool `json:"disabled"` + IgnoreHTTPCache bool `json:"ignore_http_cache"` + AllowSelfSignedCertificates bool `json:"allow_self_signed_certificates"` + FetchViaProxy bool `json:"fetch_via_proxy"` + ScraperRules string `json:"scraper_rules"` + RewriteRules string `json:"rewrite_rules"` + BlocklistRules string `json:"blocklist_rules"` + KeeplistRules string `json:"keeplist_rules"` } // FeedModificationRequest represents the request to update a feed. type FeedModificationRequest struct { - FeedURL *string `json:"feed_url"` - SiteURL *string `json:"site_url"` - Title *string `json:"title"` - ScraperRules *string `json:"scraper_rules"` - RewriteRules *string `json:"rewrite_rules"` - BlocklistRules *string `json:"blocklist_rules"` - KeeplistRules *string `json:"keeplist_rules"` - Crawler *bool `json:"crawler"` - UserAgent *string `json:"user_agent"` - Username *string `json:"username"` - Password *string `json:"password"` - CategoryID *int64 `json:"category_id"` - Disabled *bool `json:"disabled"` - IgnoreHTTPCache *bool `json:"ignore_http_cache"` - FetchViaProxy *bool `json:"fetch_via_proxy"` + FeedURL *string `json:"feed_url"` + SiteURL *string `json:"site_url"` + Title *string `json:"title"` + ScraperRules *string `json:"scraper_rules"` + RewriteRules *string `json:"rewrite_rules"` + BlocklistRules *string `json:"blocklist_rules"` + KeeplistRules *string `json:"keeplist_rules"` + Crawler *bool `json:"crawler"` + UserAgent *string `json:"user_agent"` + Username *string `json:"username"` + Password *string `json:"password"` + CategoryID *int64 `json:"category_id"` + Disabled *bool `json:"disabled"` + IgnoreHTTPCache *bool `json:"ignore_http_cache"` + AllowSelfSignedCertificates *bool `json:"allow_self_signed_certificates"` + FetchViaProxy *bool `json:"fetch_via_proxy"` } // Patch updates a feed with modified values. @@ -209,6 +212,10 @@ func (f *FeedModificationRequest) Patch(feed *Feed) { feed.IgnoreHTTPCache = *f.IgnoreHTTPCache } + if f.AllowSelfSignedCertificates != nil { + feed.AllowSelfSignedCertificates = *f.AllowSelfSignedCertificates + } + if f.FetchViaProxy != nil { feed.FetchViaProxy = *f.FetchViaProxy } diff --git a/model/subscription.go b/model/subscription.go index f866e891..8fddd9ed 100644 --- a/model/subscription.go +++ b/model/subscription.go @@ -6,9 +6,10 @@ package model // import "miniflux.app/model" // SubscriptionDiscoveryRequest represents a request to discover subscriptions. type SubscriptionDiscoveryRequest struct { - URL string `json:"url"` - UserAgent string `json:"user_agent"` - Username string `json:"username"` - Password string `json:"password"` - FetchViaProxy bool `json:"fetch_via_proxy"` + URL string `json:"url"` + UserAgent string `json:"user_agent"` + Username string `json:"username"` + Password string `json:"password"` + FetchViaProxy bool `json:"fetch_via_proxy"` + AllowSelfSignedCertificates bool `json:"allow_self_signed_certificates"` } diff --git a/reader/handler/handler.go b/reader/handler/handler.go index ddaaa7bd..c003b970 100644 --- a/reader/handler/handler.go +++ b/reader/handler/handler.go @@ -39,6 +39,7 @@ func CreateFeed(store *storage.Storage, userID int64, feedCreationRequest *model request := client.NewClientWithConfig(feedCreationRequest.FeedURL, config.Opts) request.WithCredentials(feedCreationRequest.Username, feedCreationRequest.Password) request.WithUserAgent(feedCreationRequest.UserAgent) + request.AllowSelfSignedCertificates = feedCreationRequest.AllowSelfSignedCertificates if feedCreationRequest.FetchViaProxy { request.WithProxy() @@ -65,6 +66,7 @@ func CreateFeed(store *storage.Storage, userID int64, feedCreationRequest *model subscription.Crawler = feedCreationRequest.Crawler subscription.Disabled = feedCreationRequest.Disabled subscription.IgnoreHTTPCache = feedCreationRequest.IgnoreHTTPCache + subscription.AllowSelfSignedCertificates = feedCreationRequest.AllowSelfSignedCertificates subscription.FetchViaProxy = feedCreationRequest.FetchViaProxy subscription.ScraperRules = feedCreationRequest.ScraperRules subscription.RewriteRules = feedCreationRequest.RewriteRules @@ -82,7 +84,13 @@ func CreateFeed(store *storage.Storage, userID int64, feedCreationRequest *model logger.Debug("[CreateFeed] Feed saved with ID: %d", subscription.ID) - checkFeedIcon(store, subscription.ID, subscription.SiteURL, feedCreationRequest.FetchViaProxy) + checkFeedIcon( + store, + subscription.ID, + subscription.SiteURL, + feedCreationRequest.FetchViaProxy, + feedCreationRequest.AllowSelfSignedCertificates, + ) return subscription, nil } @@ -116,6 +124,7 @@ func RefreshFeed(store *storage.Storage, userID, feedID int64) error { request := client.NewClientWithConfig(originalFeed.FeedURL, config.Opts) request.WithCredentials(originalFeed.Username, originalFeed.Password) request.WithUserAgent(originalFeed.UserAgent) + request.AllowSelfSignedCertificates = originalFeed.AllowSelfSignedCertificates if !originalFeed.IgnoreHTTPCache { request.WithCacheHeaders(originalFeed.EtagHeader, originalFeed.LastModifiedHeader) @@ -162,7 +171,13 @@ func RefreshFeed(store *storage.Storage, userID, feedID int64) error { // We update caching headers only if the feed has been modified, // because some websites don't return the same headers when replying with a 304. originalFeed.WithClientResponse(response) - checkFeedIcon(store, originalFeed.ID, originalFeed.SiteURL, originalFeed.FetchViaProxy) + checkFeedIcon( + store, + originalFeed.ID, + originalFeed.SiteURL, + originalFeed.FetchViaProxy, + originalFeed.AllowSelfSignedCertificates, + ) } else { logger.Debug("[RefreshFeed] Feed #%d not modified", feedID) } @@ -178,9 +193,9 @@ func RefreshFeed(store *storage.Storage, userID, feedID int64) error { return nil } -func checkFeedIcon(store *storage.Storage, feedID int64, websiteURL string, fetchViaProxy bool) { +func checkFeedIcon(store *storage.Storage, feedID int64, websiteURL string, fetchViaProxy, allowSelfSignedCertificates bool) { if !store.HasIcon(feedID) { - icon, err := icon.FindIcon(websiteURL, fetchViaProxy) + icon, err := icon.FindIcon(websiteURL, fetchViaProxy, allowSelfSignedCertificates) if err != nil { logger.Debug(`[CheckFeedIcon] %v (feedID=%d websiteURL=%s)`, err, feedID, websiteURL) } else if icon == nil { diff --git a/reader/icon/finder.go b/reader/icon/finder.go index 52142270..e53fcd6e 100644 --- a/reader/icon/finder.go +++ b/reader/icon/finder.go @@ -21,12 +21,14 @@ import ( ) // FindIcon try to find the website's icon. -func FindIcon(websiteURL string, fetchViaProxy bool) (*model.Icon, error) { +func FindIcon(websiteURL string, fetchViaProxy, allowSelfSignedCertificates bool) (*model.Icon, error) { rootURL := url.RootURL(websiteURL) clt := client.NewClientWithConfig(rootURL, config.Opts) + clt.AllowSelfSignedCertificates = allowSelfSignedCertificates if fetchViaProxy { clt.WithProxy() } + response, err := clt.Get() if err != nil { return nil, fmt.Errorf("unable to download website index page: %v", err) @@ -46,7 +48,7 @@ func FindIcon(websiteURL string, fetchViaProxy bool) (*model.Icon, error) { } logger.Debug("[FindIcon] Fetching icon => %s", iconURL) - icon, err := downloadIcon(iconURL, fetchViaProxy) + icon, err := downloadIcon(iconURL, fetchViaProxy, allowSelfSignedCertificates) if err != nil { return nil, err } @@ -89,8 +91,9 @@ func parseDocument(websiteURL string, data io.Reader) (string, error) { return iconURL, nil } -func downloadIcon(iconURL string, fetchViaProxy bool) (*model.Icon, error) { +func downloadIcon(iconURL string, fetchViaProxy, allowSelfSignedCertificates bool) (*model.Icon, error) { clt := client.NewClientWithConfig(iconURL, config.Opts) + clt.AllowSelfSignedCertificates = allowSelfSignedCertificates if fetchViaProxy { clt.WithProxy() } diff --git a/reader/processor/processor.go b/reader/processor/processor.go index 90cb5712..885b8515 100644 --- a/reader/processor/processor.go +++ b/reader/processor/processor.go @@ -50,7 +50,12 @@ func ProcessFeedEntries(store *storage.Storage, feed *model.Feed) { logger.Debug("[Processor] Crawling entry %q from feed %q", entry.URL, feed.FeedURL) startTime := time.Now() - content, scraperErr := scraper.Fetch(entry.URL, feed.ScraperRules, feed.UserAgent) + content, scraperErr := scraper.Fetch( + entry.URL, + feed.ScraperRules, + feed.UserAgent, + feed.AllowSelfSignedCertificates, + ) if config.Opts.HasMetricsCollector() { status := "success" @@ -118,9 +123,15 @@ func isAllowedEntry(feed *model.Feed, entry *model.Entry) bool { } // ProcessEntryWebPage downloads the entry web page and apply rewrite rules. -func ProcessEntryWebPage(entry *model.Entry) error { +func ProcessEntryWebPage(feed *model.Feed, entry *model.Entry) error { startTime := time.Now() - content, scraperErr := scraper.Fetch(entry.URL, entry.Feed.ScraperRules, entry.Feed.UserAgent) + content, scraperErr := scraper.Fetch( + entry.URL, + entry.Feed.ScraperRules, + entry.Feed.UserAgent, + feed.AllowSelfSignedCertificates, + ) + if config.Opts.HasMetricsCollector() { status := "success" if scraperErr != nil { diff --git a/reader/scraper/scraper.go b/reader/scraper/scraper.go index f1b7e244..37cf29ec 100644 --- a/reader/scraper/scraper.go +++ b/reader/scraper/scraper.go @@ -20,11 +20,10 @@ import ( ) // Fetch downloads a web page and returns relevant contents. -func Fetch(websiteURL, rules, userAgent string) (string, error) { +func Fetch(websiteURL, rules, userAgent string, allowSelfSignedCertificates bool) (string, error) { clt := client.NewClientWithConfig(websiteURL, config.Opts) - if userAgent != "" { - clt.WithUserAgent(userAgent) - } + clt.WithUserAgent(userAgent) + clt.AllowSelfSignedCertificates = allowSelfSignedCertificates response, err := clt.Get() if err != nil { diff --git a/reader/subscription/finder.go b/reader/subscription/finder.go index f9b7413b..ee755a9e 100644 --- a/reader/subscription/finder.go +++ b/reader/subscription/finder.go @@ -27,13 +27,14 @@ var ( ) // FindSubscriptions downloads and try to find one or more subscriptions from an URL. -func FindSubscriptions(websiteURL, userAgent, username, password string, fetchViaProxy bool) (Subscriptions, *errors.LocalizedError) { +func FindSubscriptions(websiteURL, userAgent, username, password string, fetchViaProxy, allowSelfSignedCertificates bool) (Subscriptions, *errors.LocalizedError) { websiteURL = findYoutubeChannelFeed(websiteURL) websiteURL = parseYoutubeVideoPage(websiteURL) clt := client.NewClientWithConfig(websiteURL, config.Opts) clt.WithCredentials(username, password) clt.WithUserAgent(userAgent) + clt.AllowSelfSignedCertificates = allowSelfSignedCertificates if fetchViaProxy { clt.WithProxy() diff --git a/storage/feed.go b/storage/feed.go index e9d51d1b..9c4fec3d 100644 --- a/storage/feed.go +++ b/storage/feed.go @@ -201,10 +201,11 @@ func (s *Storage) CreateFeed(feed *model.Feed) error { blocklist_rules, keeplist_rules, ignore_http_cache, + allow_self_signed_certificates, fetch_via_proxy ) VALUES - ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18) + ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, $19) RETURNING id ` @@ -227,6 +228,7 @@ func (s *Storage) CreateFeed(feed *model.Feed) error { feed.BlocklistRules, feed.KeeplistRules, feed.IgnoreHTTPCache, + feed.AllowSelfSignedCertificates, feed.FetchViaProxy, ).Scan(&feed.ID) if err != nil { @@ -283,9 +285,10 @@ func (s *Storage) UpdateFeed(feed *model.Feed) (err error) { disabled=$18, next_check_at=$19, ignore_http_cache=$20, - fetch_via_proxy=$21 + allow_self_signed_certificates=$21, + fetch_via_proxy=$22 WHERE - id=$22 AND user_id=$23 + id=$23 AND user_id=$24 ` _, err = s.db.Exec(query, feed.FeedURL, @@ -308,6 +311,7 @@ func (s *Storage) UpdateFeed(feed *model.Feed) (err error) { feed.Disabled, feed.NextCheckAt, feed.IgnoreHTTPCache, + feed.AllowSelfSignedCertificates, feed.FetchViaProxy, feed.ID, feed.UserID, diff --git a/storage/feed_query_builder.go b/storage/feed_query_builder.go index a38c5226..0400a701 100644 --- a/storage/feed_query_builder.go +++ b/storage/feed_query_builder.go @@ -162,6 +162,7 @@ func (f *FeedQueryBuilder) GetFeeds() (model.Feeds, error) { f.username, f.password, f.ignore_http_cache, + f.allow_self_signed_certificates, f.fetch_via_proxy, f.disabled, f.category_id, @@ -220,6 +221,7 @@ func (f *FeedQueryBuilder) GetFeeds() (model.Feeds, error) { &feed.Username, &feed.Password, &feed.IgnoreHTTPCache, + &feed.AllowSelfSignedCertificates, &feed.FetchViaProxy, &feed.Disabled, &feed.Category.ID, diff --git a/template/templates/views/add_subscription.html b/template/templates/views/add_subscription.html index f6a96e68..de9f45b2 100644 --- a/template/templates/views/add_subscription.html +++ b/template/templates/views/add_subscription.html @@ -30,6 +30,8 @@ {{ t "page.add_feed.legend.advanced_options" }}
+ + {{ if .hasProxyConfigured }} {{ end }} diff --git a/template/templates/views/choose_subscription.html b/template/templates/views/choose_subscription.html index 6da42eaf..957aac3d 100644 --- a/template/templates/views/choose_subscription.html +++ b/template/templates/views/choose_subscription.html @@ -20,6 +20,9 @@ {{ if .form.Crawler }} {{ end }} + {{ if .form.AllowSelfSignedCertificates }} + + {{ end }}

{{ t "page.add_feed.choose_feed" }}

diff --git a/template/templates/views/edit_feed.html b/template/templates/views/edit_feed.html index 5828c223..62e339db 100644 --- a/template/templates/views/edit_feed.html +++ b/template/templates/views/edit_feed.html @@ -79,6 +79,7 @@ + {{ if .hasProxyConfigured }} {{ end }} diff --git a/tests/feed_test.go b/tests/feed_test.go index 5400d221..aa9be393 100644 --- a/tests/feed_test.go +++ b/tests/feed_test.go @@ -171,6 +171,37 @@ func TestCreateFeedWithCrawlerEnabled(t *testing.T) { } } +func TestCreateFeedWithSelfSignedCertificatesAllowed(t *testing.T) { + client := createClient(t) + + categories, err := client.Categories() + if err != nil { + t.Fatal(err) + } + + feedID, err := client.CreateFeed(&miniflux.FeedCreationRequest{ + FeedURL: testFeedURL, + CategoryID: categories[0].ID, + AllowSelfSignedCertificates: true, + }) + if err != nil { + t.Fatal(err) + } + + if feedID == 0 { + t.Fatalf(`Invalid feed ID, got %q`, feedID) + } + + feed, err := client.Feed(feedID) + if err != nil { + t.Fatal(err) + } + + if !feed.AllowSelfSignedCertificates { + t.Error(`The feed should have self-signed certificates enabled`) + } +} + func TestCreateFeedWithScraperRule(t *testing.T) { client := createClient(t) @@ -375,6 +406,31 @@ func TestUpdateFeedCrawler(t *testing.T) { } } +func TestUpdateFeedAllowSelfSignedCertificates(t *testing.T) { + client := createClient(t) + feed, _ := createFeed(t, client) + + selfSigned := true + updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{AllowSelfSignedCertificates: &selfSigned}) + if err != nil { + t.Fatal(err) + } + + if updatedFeed.AllowSelfSignedCertificates != selfSigned { + t.Fatalf(`Wrong AllowSelfSignedCertificates value, got "%v" instead of "%v"`, updatedFeed.AllowSelfSignedCertificates, selfSigned) + } + + selfSigned = false + updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{AllowSelfSignedCertificates: &selfSigned}) + if err != nil { + t.Fatal(err) + } + + if updatedFeed.AllowSelfSignedCertificates != selfSigned { + t.Fatalf(`Wrong AllowSelfSignedCertificates value, got "%v" instead of "%v"`, updatedFeed.AllowSelfSignedCertificates, selfSigned) + } +} + func TestUpdateFeedScraperRules(t *testing.T) { client := createClient(t) feed, _ := createFeed(t, client) diff --git a/ui/entry_scraper.go b/ui/entry_scraper.go index 4ece7e82..a17427c1 100644 --- a/ui/entry_scraper.go +++ b/ui/entry_scraper.go @@ -12,15 +12,18 @@ import ( "miniflux.app/model" "miniflux.app/proxy" "miniflux.app/reader/processor" + "miniflux.app/storage" ) func (h *handler) fetchContent(w http.ResponseWriter, r *http.Request) { + loggedUserID := request.UserID(r) entryID := request.RouteInt64Param(r, "entryID") - builder := h.store.NewEntryQueryBuilder(request.UserID(r)) - builder.WithEntryID(entryID) - builder.WithoutStatus(model.EntryStatusRemoved) - entry, err := builder.GetEntry() + entryBuilder := h.store.NewEntryQueryBuilder(loggedUserID) + entryBuilder.WithEntryID(entryID) + entryBuilder.WithoutStatus(model.EntryStatusRemoved) + + entry, err := entryBuilder.GetEntry() if err != nil { json.ServerError(w, r, err) return @@ -31,7 +34,20 @@ func (h *handler) fetchContent(w http.ResponseWriter, r *http.Request) { return } - if err := processor.ProcessEntryWebPage(entry); err != nil { + feedBuilder := storage.NewFeedQueryBuilder(h.store, loggedUserID) + feedBuilder.WithFeedID(entry.FeedID) + feed, err := feedBuilder.GetFeed() + if err != nil { + json.ServerError(w, r, err) + return + } + + if feed == nil { + json.NotFound(w, r) + return + } + + if err := processor.ProcessEntryWebPage(feed, entry); err != nil { json.ServerError(w, r, err) return } diff --git a/ui/feed_edit.go b/ui/feed_edit.go index 5e8f8293..0e5cbd78 100644 --- a/ui/feed_edit.go +++ b/ui/feed_edit.go @@ -41,21 +41,22 @@ func (h *handler) showEditFeedPage(w http.ResponseWriter, r *http.Request) { } feedForm := form.FeedForm{ - SiteURL: feed.SiteURL, - FeedURL: feed.FeedURL, - Title: feed.Title, - ScraperRules: feed.ScraperRules, - RewriteRules: feed.RewriteRules, - BlocklistRules: feed.BlocklistRules, - KeeplistRules: feed.KeeplistRules, - Crawler: feed.Crawler, - UserAgent: feed.UserAgent, - CategoryID: feed.Category.ID, - Username: feed.Username, - Password: feed.Password, - IgnoreHTTPCache: feed.IgnoreHTTPCache, - FetchViaProxy: feed.FetchViaProxy, - Disabled: feed.Disabled, + SiteURL: feed.SiteURL, + FeedURL: feed.FeedURL, + Title: feed.Title, + ScraperRules: feed.ScraperRules, + RewriteRules: feed.RewriteRules, + BlocklistRules: feed.BlocklistRules, + KeeplistRules: feed.KeeplistRules, + Crawler: feed.Crawler, + UserAgent: feed.UserAgent, + CategoryID: feed.Category.ID, + Username: feed.Username, + Password: feed.Password, + IgnoreHTTPCache: feed.IgnoreHTTPCache, + AllowSelfSignedCertificates: feed.AllowSelfSignedCertificates, + FetchViaProxy: feed.FetchViaProxy, + Disabled: feed.Disabled, } sess := session.New(h.store, request.SessionID(r)) diff --git a/ui/form/feed.go b/ui/form/feed.go index 5623f3b4..0f3ded75 100644 --- a/ui/form/feed.go +++ b/ui/form/feed.go @@ -13,21 +13,22 @@ import ( // FeedForm represents a feed form in the UI type FeedForm struct { - FeedURL string - SiteURL string - Title string - ScraperRules string - RewriteRules string - BlocklistRules string - KeeplistRules string - Crawler bool - UserAgent string - CategoryID int64 - Username string - Password string - IgnoreHTTPCache bool - FetchViaProxy bool - Disabled bool + FeedURL string + SiteURL string + Title string + ScraperRules string + RewriteRules string + BlocklistRules string + KeeplistRules string + Crawler bool + UserAgent string + CategoryID int64 + Username string + Password string + IgnoreHTTPCache bool + AllowSelfSignedCertificates bool + FetchViaProxy bool + Disabled bool } // Merge updates the fields of the given feed. @@ -47,6 +48,7 @@ func (f FeedForm) Merge(feed *model.Feed) *model.Feed { feed.Username = f.Username feed.Password = f.Password feed.IgnoreHTTPCache = f.IgnoreHTTPCache + feed.AllowSelfSignedCertificates = f.AllowSelfSignedCertificates feed.FetchViaProxy = f.FetchViaProxy feed.Disabled = f.Disabled return feed @@ -59,20 +61,21 @@ func NewFeedForm(r *http.Request) *FeedForm { categoryID = 0 } return &FeedForm{ - FeedURL: r.FormValue("feed_url"), - SiteURL: r.FormValue("site_url"), - Title: r.FormValue("title"), - ScraperRules: r.FormValue("scraper_rules"), - UserAgent: r.FormValue("user_agent"), - RewriteRules: r.FormValue("rewrite_rules"), - BlocklistRules: r.FormValue("blocklist_rules"), - KeeplistRules: r.FormValue("keeplist_rules"), - Crawler: r.FormValue("crawler") == "1", - CategoryID: int64(categoryID), - Username: r.FormValue("feed_username"), - Password: r.FormValue("feed_password"), - IgnoreHTTPCache: r.FormValue("ignore_http_cache") == "1", - FetchViaProxy: r.FormValue("fetch_via_proxy") == "1", - Disabled: r.FormValue("disabled") == "1", + FeedURL: r.FormValue("feed_url"), + SiteURL: r.FormValue("site_url"), + Title: r.FormValue("title"), + ScraperRules: r.FormValue("scraper_rules"), + UserAgent: r.FormValue("user_agent"), + RewriteRules: r.FormValue("rewrite_rules"), + BlocklistRules: r.FormValue("blocklist_rules"), + KeeplistRules: r.FormValue("keeplist_rules"), + Crawler: r.FormValue("crawler") == "1", + CategoryID: int64(categoryID), + Username: r.FormValue("feed_username"), + Password: r.FormValue("feed_password"), + IgnoreHTTPCache: r.FormValue("ignore_http_cache") == "1", + AllowSelfSignedCertificates: r.FormValue("allow_self_signed_certificates") == "1", + FetchViaProxy: r.FormValue("fetch_via_proxy") == "1", + Disabled: r.FormValue("disabled") == "1", } } diff --git a/ui/form/subscription.go b/ui/form/subscription.go index 73967c24..8b425b3c 100644 --- a/ui/form/subscription.go +++ b/ui/form/subscription.go @@ -14,17 +14,18 @@ import ( // SubscriptionForm represents the subscription form. type SubscriptionForm struct { - URL string - CategoryID int64 - Crawler bool - FetchViaProxy bool - UserAgent string - Username string - Password string - ScraperRules string - RewriteRules string - BlocklistRules string - KeeplistRules string + URL string + CategoryID int64 + Crawler bool + FetchViaProxy bool + AllowSelfSignedCertificates bool + UserAgent string + Username string + Password string + ScraperRules string + RewriteRules string + BlocklistRules string + KeeplistRules string } // Validate makes sure the form values are valid. @@ -56,16 +57,17 @@ func NewSubscriptionForm(r *http.Request) *SubscriptionForm { } return &SubscriptionForm{ - URL: r.FormValue("url"), - CategoryID: int64(categoryID), - Crawler: r.FormValue("crawler") == "1", - FetchViaProxy: r.FormValue("fetch_via_proxy") == "1", - UserAgent: r.FormValue("user_agent"), - Username: r.FormValue("feed_username"), - Password: r.FormValue("feed_password"), - ScraperRules: r.FormValue("scraper_rules"), - RewriteRules: r.FormValue("rewrite_rules"), - BlocklistRules: r.FormValue("blocklist_rules"), - KeeplistRules: r.FormValue("keeplist_rules"), + URL: r.FormValue("url"), + CategoryID: int64(categoryID), + Crawler: r.FormValue("crawler") == "1", + AllowSelfSignedCertificates: r.FormValue("allow_self_signed_certificates") == "1", + FetchViaProxy: r.FormValue("fetch_via_proxy") == "1", + UserAgent: r.FormValue("user_agent"), + Username: r.FormValue("feed_username"), + Password: r.FormValue("feed_password"), + ScraperRules: r.FormValue("scraper_rules"), + RewriteRules: r.FormValue("rewrite_rules"), + BlocklistRules: r.FormValue("blocklist_rules"), + KeeplistRules: r.FormValue("keeplist_rules"), } } diff --git a/ui/subscription_choose.go b/ui/subscription_choose.go index 17efaeae..b8bded99 100644 --- a/ui/subscription_choose.go +++ b/ui/subscription_choose.go @@ -50,17 +50,18 @@ func (h *handler) showChooseSubscriptionPage(w http.ResponseWriter, r *http.Requ } feed, err := feedHandler.CreateFeed(h.store, user.ID, &model.FeedCreationRequest{ - CategoryID: subscriptionForm.CategoryID, - FeedURL: subscriptionForm.URL, - Crawler: subscriptionForm.Crawler, - UserAgent: subscriptionForm.UserAgent, - Username: subscriptionForm.Username, - Password: subscriptionForm.Password, - ScraperRules: subscriptionForm.ScraperRules, - RewriteRules: subscriptionForm.RewriteRules, - BlocklistRules: subscriptionForm.BlocklistRules, - KeeplistRules: subscriptionForm.KeeplistRules, - FetchViaProxy: subscriptionForm.FetchViaProxy, + CategoryID: subscriptionForm.CategoryID, + FeedURL: subscriptionForm.URL, + Crawler: subscriptionForm.Crawler, + AllowSelfSignedCertificates: subscriptionForm.AllowSelfSignedCertificates, + UserAgent: subscriptionForm.UserAgent, + Username: subscriptionForm.Username, + Password: subscriptionForm.Password, + ScraperRules: subscriptionForm.ScraperRules, + RewriteRules: subscriptionForm.RewriteRules, + BlocklistRules: subscriptionForm.BlocklistRules, + KeeplistRules: subscriptionForm.KeeplistRules, + FetchViaProxy: subscriptionForm.FetchViaProxy, }) if err != nil { view.Set("form", subscriptionForm) diff --git a/ui/subscription_submit.go b/ui/subscription_submit.go index 5316ac2e..91f3d365 100644 --- a/ui/subscription_submit.go +++ b/ui/subscription_submit.go @@ -58,6 +58,7 @@ func (h *handler) submitSubscription(w http.ResponseWriter, r *http.Request) { subscriptionForm.Username, subscriptionForm.Password, subscriptionForm.FetchViaProxy, + subscriptionForm.AllowSelfSignedCertificates, ) if findErr != nil { logger.Error("[UI:SubmitSubscription] %s", findErr) @@ -77,17 +78,18 @@ func (h *handler) submitSubscription(w http.ResponseWriter, r *http.Request) { html.OK(w, r, v.Render("add_subscription")) case n == 1: feed, err := feedHandler.CreateFeed(h.store, user.ID, &model.FeedCreationRequest{ - CategoryID: subscriptionForm.CategoryID, - FeedURL: subscriptions[0].URL, - Crawler: subscriptionForm.Crawler, - UserAgent: subscriptionForm.UserAgent, - Username: subscriptionForm.Username, - Password: subscriptionForm.Password, - ScraperRules: subscriptionForm.ScraperRules, - RewriteRules: subscriptionForm.RewriteRules, - BlocklistRules: subscriptionForm.BlocklistRules, - KeeplistRules: subscriptionForm.KeeplistRules, - FetchViaProxy: subscriptionForm.FetchViaProxy, + CategoryID: subscriptionForm.CategoryID, + FeedURL: subscriptions[0].URL, + Crawler: subscriptionForm.Crawler, + AllowSelfSignedCertificates: subscriptionForm.AllowSelfSignedCertificates, + UserAgent: subscriptionForm.UserAgent, + Username: subscriptionForm.Username, + Password: subscriptionForm.Password, + ScraperRules: subscriptionForm.ScraperRules, + RewriteRules: subscriptionForm.RewriteRules, + BlocklistRules: subscriptionForm.BlocklistRules, + KeeplistRules: subscriptionForm.KeeplistRules, + FetchViaProxy: subscriptionForm.FetchViaProxy, }) if err != nil { v.Set("form", subscriptionForm)