mirror of https://github.com/miniflux/v2.git
Merge branch 'main' into unread_first
This commit is contained in:
commit
53ab39474e
|
@ -1,7 +1,7 @@
|
|||
version: '3.8'
|
||||
services:
|
||||
app:
|
||||
image: mcr.microsoft.com/devcontainers/go
|
||||
image: mcr.microsoft.com/devcontainers/go:1.22
|
||||
volumes:
|
||||
- ..:/workspace:cached
|
||||
command: sleep infinity
|
||||
|
@ -24,7 +24,7 @@ services:
|
|||
ports:
|
||||
- 5432:5432
|
||||
apprise:
|
||||
image: caronc/apprise:latest
|
||||
image: caronc/apprise:1.0
|
||||
restart: unless-stopped
|
||||
hostname: apprise
|
||||
volumes:
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
Do you follow the guidelines?
|
||||
|
||||
- [ ] I have tested my changes
|
||||
- [ ] There is no breaking changes
|
||||
- [ ] I really tested my changes and there is no regression
|
||||
- [ ] Ideally, my commit messages use the same convention as the Go project: https://go.dev/doc/contribute#commit_messages
|
||||
- [ ] I read this document: https://miniflux.app/faq.html#pull-request
|
||||
|
|
|
@ -22,7 +22,7 @@ jobs:
|
|||
run: eslint internal/ui/static/js/*.js
|
||||
|
||||
golangci:
|
||||
name: Golang Linter
|
||||
name: Golang Linters
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
@ -32,8 +32,8 @@ jobs:
|
|||
- run: "go vet ./..."
|
||||
- uses: golangci/golangci-lint-action@v4
|
||||
with:
|
||||
args: --timeout 10m --skip-dirs tests --disable errcheck --enable sqlclosecheck --enable misspell --enable gofmt --enable goimports --enable whitespace
|
||||
- uses: dominikh/staticcheck-action@v1.3.0
|
||||
args: --timeout 10m --skip-dirs tests --disable errcheck --enable sqlclosecheck --enable misspell --enable gofmt --enable goimports --enable whitespace --enable gocritic
|
||||
- uses: dominikh/staticcheck-action@v1.3.1
|
||||
with:
|
||||
version: "2023.1.7"
|
||||
install-go: false
|
||||
|
|
81
ChangeLog
81
ChangeLog
|
@ -1,3 +1,84 @@
|
|||
Version 2.1.2 (March 30, 2024)
|
||||
------------------------------
|
||||
|
||||
* `api`: rewrite API integration tests without build tags
|
||||
* `ci`: add basic ESLinter checks
|
||||
* `ci`: enable go-critic linter and fix various issues detected
|
||||
* `ci`: fix JavaScript linter path in GitHub Actions
|
||||
* `cli`: avoid misleading error message when creating an admin user automatically
|
||||
* `config`: add `FILTER_ENTRY_MAX_AGE_DAYS` option
|
||||
* `config`: bump the number of simultaneous workers
|
||||
* `config`: rename `PROXY_*` options to `MEDIA_PROXY_*`
|
||||
* `config`: use `crypto.GenerateRandomBytes` instead of doing it by hand
|
||||
* `http/request`: refactor conditions to be more idiomatic
|
||||
* `http/response`: remove legacy `X-XSS-Protection` header
|
||||
* `integration/rssbrige`: fix rssbrige import
|
||||
* `integration/shaarli`: factorize the header+payload concatenation as data
|
||||
* `integration/shaarli`: no need to base64-encode then remove the padding when we can simply encode without padding
|
||||
* `integration/shaarli`: the JWT token was declared as using HS256 as algorithm, but was using HS512
|
||||
* `integration/webhook`: add category title to request body
|
||||
* `locale`: update Turkish translations
|
||||
* `man page`: sort config options in alphabetical order
|
||||
* `mediaproxy`: reduce the internal indentation of `ProxifiedUrl` by inverting some conditions
|
||||
* `mediaproxy`: simplify and refactor the package
|
||||
* `model`: replace` Optional{Int,Int64,Float64}` with a generic function `OptionalNumber()`
|
||||
* `model`: use struct embedding for `FeedCreationRequestFromSubscriptionDiscovery` to reduce code duplication
|
||||
* `reader/atom`: avoid debug message when the date is empty
|
||||
* `reader/atom`: change `if !a { a = } if !a {a = }` constructs into `if !a { a = ; if !a {a = }}` to reduce the number of comparisons and improve readability
|
||||
* `reader/atom`: Move the population of the feed's entries into a new function, to make BuildFeed easier to understand/separate concerns/implementation details
|
||||
* `reader/atom`: refactor Atom parser to use an adapter
|
||||
* `reader/atom`: use `sort+compact` instead of `compact+sort` to remove duplicates
|
||||
* `reader/atom`: when detecting the format, detect its version as well
|
||||
* `reader/encoding`: inline a one-liner function
|
||||
* `reader/handler`: fix force refresh feature
|
||||
* `reader/json`: refactor JSON Feed parser to use an adapter
|
||||
* `reader/media`: remove a superfluous error-check: `strconv.ParseInt` returns `0` when passed an empty string
|
||||
* `reader/media`: simplify switch-case by moving a common condition above it
|
||||
* `reader/processor`: compile block/keep regex only once per feed
|
||||
* `reader/rdf`: refactor RDF parser to use an adapter
|
||||
* `reader/rewrite`: inline some one-line functions
|
||||
* `reader/rewrite`: simplify `removeClickbait`
|
||||
* `reader/rewrite`: transform a free-standing function into a method
|
||||
* `reader/rewrite`: use a proper constant instead of a magic number in `applyFuncOnTextContent`
|
||||
* `reader/rss`: add support for `<media:category>` element
|
||||
* `reader/rss`: don't add empty tags to RSS items
|
||||
* `reader/rss`: refactor RSS parser to use a default namespace to avoid some limitations of the Go XML parser
|
||||
* `reader/rss`: refactor RSS Parser to use an adapter
|
||||
* `reader/rss`: remove some duplicated code in RSS parser
|
||||
* `reader`: ensure that enclosure URLs are always absolute
|
||||
* `reader`: move iTunes and GooglePlay XML definitions to their own packages
|
||||
* `reader`: parse podcast categories
|
||||
* `reader`: remove trailing space in `SiteURL` and `FeedURL`
|
||||
* `storage`: do not store empty tags
|
||||
* `storage`: simplify `removeDuplicates()` to use a `sort`+`compact` construct instead of doing it by hand with a hashmap
|
||||
* `storage`: Use plain strings concatenation instead of building an array and then joining it
|
||||
* `timezone`: make sure the tests pass when the timezone database is not installed on the host
|
||||
* `ui/css`: align `min-width` with the other `min-width` values
|
||||
* `ui/css`: fix regression: "Add to Home Screen" button is unreadable
|
||||
* `ui/js`: don't use lambdas to return a function, use directly the function instead
|
||||
* `ui/js`: enable trusted-types
|
||||
* `ui/js`: fix download button loading label
|
||||
* `ui/js`: fix JavaScript error on the login page when the user not authenticated
|
||||
* `ui/js`: inline one-line functions
|
||||
* `ui/js`: inline some `querySelectorAll` calls
|
||||
* `ui/js`: reduce the scope of some variables
|
||||
* `ui/js`: remove a hack for "Chrome 67 and earlier" since it was released in 2018
|
||||
* `ui/js`: replace `DomHelper.findParent` with `.closest`
|
||||
* `ui/js`: replace `let` with `const`
|
||||
* `ui/js`: simplify `DomHelper.getVisibleElements` by using a `filter` instead of a loop with an index
|
||||
* `ui/js`: use a `Set` instead of an array in a `KeyboardHandler`'s member
|
||||
* `ui/js`: use some ternaries where it makes sense
|
||||
* `ui/static`: make use of `HashFromBytes` everywhere
|
||||
* `ui/static`: set minifier ECMAScript version
|
||||
* `ui`: add keyboard shortcuts for scrolling to top/bottom of the item list
|
||||
* `ui`: add media player control playback speed
|
||||
* `ui`: remove unused variables and improve JSON decoding in `saveEnclosureProgression()`
|
||||
* `validator`: display an error message on edit feed page when the feed URL is not unique
|
||||
* Bump `github.com/coreos/go-oidc/v3` from `3.9.0` to `3.10.0`
|
||||
* Bump `github.com/go-webauthn/webauthn` from `0.10.1` to `0.10.2`
|
||||
* Bump `github.com/tdewolff/minify/v2` from `2.20.18` to `2.20.19`
|
||||
* Bump `google.golang.org/protobuf` from `1.32.0` to `1.33.0`
|
||||
|
||||
Version 2.1.1 (March 10, 2024)
|
||||
-----------------------------
|
||||
|
||||
|
|
8
Makefile
8
Makefile
|
@ -101,7 +101,7 @@ windows-x86:
|
|||
@ GOOS=windows GOARCH=386 go build -ldflags=$(LD_FLAGS) -o $(APP)-$@.exe main.go
|
||||
|
||||
run:
|
||||
@ LOG_DATE_TIME=1 DEBUG=1 RUN_MIGRATIONS=1 CREATE_ADMIN=1 ADMIN_USERNAME=admin ADMIN_PASSWORD=test123 go run main.go
|
||||
@ LOG_DATE_TIME=1 LOG_LEVEL=debug RUN_MIGRATIONS=1 CREATE_ADMIN=1 ADMIN_USERNAME=admin ADMIN_PASSWORD=test123 go run main.go
|
||||
|
||||
clean:
|
||||
@ rm -f $(APP)-* $(APP) $(APP)*.rpm $(APP)*.deb $(APP)*.exe
|
||||
|
@ -128,7 +128,11 @@ integration-test:
|
|||
./miniflux-test >/tmp/miniflux.log 2>&1 & echo "$$!" > "/tmp/miniflux.pid"
|
||||
|
||||
while ! nc -z localhost 8080; do sleep 1; done
|
||||
go test -v -tags=integration -count=1 miniflux.app/v2/internal/tests
|
||||
|
||||
TEST_MINIFLUX_BASE_URL=http://127.0.0.1:8080 \
|
||||
TEST_MINIFLUX_ADMIN_USERNAME=admin \
|
||||
TEST_MINIFLUX_ADMIN_PASSWORD=test123 \
|
||||
go test -v -count=1 ./internal/api
|
||||
|
||||
clean-integration-test:
|
||||
@ kill -9 `cat /tmp/miniflux.pid`
|
||||
|
|
|
@ -18,16 +18,44 @@ type Client struct {
|
|||
}
|
||||
|
||||
// New returns a new Miniflux client.
|
||||
// Deprecated: use NewClient instead.
|
||||
func New(endpoint string, credentials ...string) *Client {
|
||||
// Web gives "API Endpoint = https://miniflux.app/v1/", it doesn't work (/v1/v1/me)
|
||||
return NewClient(endpoint, credentials...)
|
||||
}
|
||||
|
||||
// NewClient returns a new Miniflux client.
|
||||
func NewClient(endpoint string, credentials ...string) *Client {
|
||||
// Trim trailing slashes and /v1 from the endpoint.
|
||||
endpoint = strings.TrimSuffix(endpoint, "/")
|
||||
endpoint = strings.TrimSuffix(endpoint, "/v1")
|
||||
// trim to https://miniflux.app
|
||||
|
||||
if len(credentials) == 2 {
|
||||
switch len(credentials) {
|
||||
case 2:
|
||||
return &Client{request: &request{endpoint: endpoint, username: credentials[0], password: credentials[1]}}
|
||||
case 1:
|
||||
return &Client{request: &request{endpoint: endpoint, apiKey: credentials[0]}}
|
||||
default:
|
||||
return &Client{request: &request{endpoint: endpoint}}
|
||||
}
|
||||
return &Client{request: &request{endpoint: endpoint, apiKey: credentials[0]}}
|
||||
}
|
||||
|
||||
// Healthcheck checks if the application is up and running.
|
||||
func (c *Client) Healthcheck() error {
|
||||
body, err := c.request.Get("/healthcheck")
|
||||
if err != nil {
|
||||
return fmt.Errorf("miniflux: unable to perform healthcheck: %w", err)
|
||||
}
|
||||
defer body.Close()
|
||||
|
||||
responseBodyContent, err := io.ReadAll(body)
|
||||
if err != nil {
|
||||
return fmt.Errorf("miniflux: unable to read healthcheck response: %w", err)
|
||||
}
|
||||
|
||||
if string(responseBodyContent) != "OK" {
|
||||
return fmt.Errorf("miniflux: invalid healthcheck response: %q", responseBodyContent)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Version returns the version of the Miniflux instance.
|
||||
|
@ -528,6 +556,25 @@ func (c *Client) SaveEntry(entryID int64) error {
|
|||
return err
|
||||
}
|
||||
|
||||
// FetchEntryOriginalContent fetches the original content of an entry using the scraper.
|
||||
func (c *Client) FetchEntryOriginalContent(entryID int64) (string, error) {
|
||||
body, err := c.request.Get(fmt.Sprintf("/v1/entries/%d/fetch-content", entryID))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer body.Close()
|
||||
|
||||
var response struct {
|
||||
Content string `json:"content"`
|
||||
}
|
||||
|
||||
if err := json.NewDecoder(body).Decode(&response); err != nil {
|
||||
return "", fmt.Errorf("miniflux: response error (%v)", err)
|
||||
}
|
||||
|
||||
return response.Content, nil
|
||||
}
|
||||
|
||||
// FetchCounters fetches feed counters.
|
||||
func (c *Client) FetchCounters() (*FeedCounters, error) {
|
||||
body, err := c.request.Get("/v1/feeds/counters")
|
||||
|
|
|
@ -12,7 +12,7 @@ This code snippet fetch the list of users:
|
|||
miniflux "miniflux.app/v2/client"
|
||||
)
|
||||
|
||||
client := miniflux.New("https://api.example.org", "admin", "secret")
|
||||
client := miniflux.NewClient("https://api.example.org", "admin", "secret")
|
||||
users, err := client.Users()
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
|
|
|
@ -41,6 +41,7 @@ type User struct {
|
|||
DefaultHomePage string `json:"default_home_page"`
|
||||
CategoriesSortingOrder string `json:"categories_sorting_order"`
|
||||
MarkReadOnView bool `json:"mark_read_on_view"`
|
||||
MediaPlaybackRate float64 `json:"media_playback_rate"`
|
||||
}
|
||||
|
||||
func (u User) String() string {
|
||||
|
@ -58,28 +59,29 @@ type UserCreationRequest struct {
|
|||
|
||||
// UserModificationRequest represents the request to update a user.
|
||||
type UserModificationRequest struct {
|
||||
Username *string `json:"username"`
|
||||
Password *string `json:"password"`
|
||||
IsAdmin *bool `json:"is_admin"`
|
||||
Theme *string `json:"theme"`
|
||||
Language *string `json:"language"`
|
||||
Timezone *string `json:"timezone"`
|
||||
EntryDirection *string `json:"entry_sorting_direction"`
|
||||
EntryOrder *string `json:"entry_sorting_order"`
|
||||
Stylesheet *string `json:"stylesheet"`
|
||||
GoogleID *string `json:"google_id"`
|
||||
OpenIDConnectID *string `json:"openid_connect_id"`
|
||||
EntriesPerPage *int `json:"entries_per_page"`
|
||||
KeyboardShortcuts *bool `json:"keyboard_shortcuts"`
|
||||
ShowReadingTime *bool `json:"show_reading_time"`
|
||||
EntrySwipe *bool `json:"entry_swipe"`
|
||||
GestureNav *string `json:"gesture_nav"`
|
||||
DisplayMode *string `json:"display_mode"`
|
||||
DefaultReadingSpeed *int `json:"default_reading_speed"`
|
||||
CJKReadingSpeed *int `json:"cjk_reading_speed"`
|
||||
DefaultHomePage *string `json:"default_home_page"`
|
||||
CategoriesSortingOrder *string `json:"categories_sorting_order"`
|
||||
MarkReadOnView *bool `json:"mark_read_on_view"`
|
||||
Username *string `json:"username"`
|
||||
Password *string `json:"password"`
|
||||
IsAdmin *bool `json:"is_admin"`
|
||||
Theme *string `json:"theme"`
|
||||
Language *string `json:"language"`
|
||||
Timezone *string `json:"timezone"`
|
||||
EntryDirection *string `json:"entry_sorting_direction"`
|
||||
EntryOrder *string `json:"entry_sorting_order"`
|
||||
Stylesheet *string `json:"stylesheet"`
|
||||
GoogleID *string `json:"google_id"`
|
||||
OpenIDConnectID *string `json:"openid_connect_id"`
|
||||
EntriesPerPage *int `json:"entries_per_page"`
|
||||
KeyboardShortcuts *bool `json:"keyboard_shortcuts"`
|
||||
ShowReadingTime *bool `json:"show_reading_time"`
|
||||
EntrySwipe *bool `json:"entry_swipe"`
|
||||
GestureNav *string `json:"gesture_nav"`
|
||||
DisplayMode *string `json:"display_mode"`
|
||||
DefaultReadingSpeed *int `json:"default_reading_speed"`
|
||||
CJKReadingSpeed *int `json:"cjk_reading_speed"`
|
||||
DefaultHomePage *string `json:"default_home_page"`
|
||||
CategoriesSortingOrder *string `json:"categories_sorting_order"`
|
||||
MarkReadOnView *bool `json:"mark_read_on_view"`
|
||||
MediaPlaybackRate *float64 `json:"media_playback_rate"`
|
||||
}
|
||||
|
||||
// Users represents a list of users.
|
||||
|
@ -290,3 +292,7 @@ type VersionResponse struct {
|
|||
Arch string `json:"arch"`
|
||||
OS string `json:"os"`
|
||||
}
|
||||
|
||||
func SetOptionalField[T any](value T) *T {
|
||||
return &value
|
||||
}
|
||||
|
|
|
@ -26,6 +26,7 @@ var (
|
|||
ErrForbidden = errors.New("miniflux: access forbidden")
|
||||
ErrServerError = errors.New("miniflux: internal server error")
|
||||
ErrNotFound = errors.New("miniflux: resource not found")
|
||||
ErrBadRequest = errors.New("miniflux: bad request")
|
||||
)
|
||||
|
||||
type errorResponse struct {
|
||||
|
@ -124,10 +125,10 @@ func (r *request) execute(method, path string, data interface{}) (io.ReadCloser,
|
|||
var resp errorResponse
|
||||
decoder := json.NewDecoder(response.Body)
|
||||
if err := decoder.Decode(&resp); err != nil {
|
||||
return nil, fmt.Errorf("miniflux: bad request error (%v)", err)
|
||||
return nil, fmt.Errorf("%w (%v)", ErrBadRequest, err)
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("miniflux: bad request (%s)", resp.ErrorMessage)
|
||||
return nil, fmt.Errorf("%w (%s)", ErrBadRequest, resp.ErrorMessage)
|
||||
}
|
||||
|
||||
if response.StatusCode > 400 {
|
||||
|
|
6
go.mod
6
go.mod
|
@ -5,7 +5,7 @@ module miniflux.app/v2
|
|||
require (
|
||||
github.com/PuerkitoBio/goquery v1.9.1
|
||||
github.com/abadojack/whatlanggo v1.0.1
|
||||
github.com/coreos/go-oidc/v3 v3.9.0
|
||||
github.com/coreos/go-oidc/v3 v3.10.0
|
||||
github.com/go-webauthn/webauthn v0.10.2
|
||||
github.com/gorilla/mux v1.8.1
|
||||
github.com/lib/pq v1.10.9
|
||||
|
@ -16,6 +16,7 @@ require (
|
|||
golang.org/x/net v0.22.0
|
||||
golang.org/x/oauth2 v0.18.0
|
||||
golang.org/x/term v0.18.0
|
||||
golang.org/x/text v0.14.0
|
||||
mvdan.cc/xurls/v2 v2.5.0
|
||||
)
|
||||
|
||||
|
@ -30,7 +31,7 @@ require (
|
|||
github.com/beorn7/perks v1.0.1 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.2.0 // indirect
|
||||
github.com/fxamacker/cbor/v2 v2.6.0 // indirect
|
||||
github.com/go-jose/go-jose/v3 v3.0.3 // indirect
|
||||
github.com/go-jose/go-jose/v4 v4.0.1 // indirect
|
||||
github.com/golang/protobuf v1.5.3 // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
||||
|
@ -40,7 +41,6 @@ require (
|
|||
github.com/tdewolff/parse/v2 v2.7.12 // indirect
|
||||
github.com/x448/float16 v0.8.4 // indirect
|
||||
golang.org/x/sys v0.18.0 // indirect
|
||||
golang.org/x/text v0.14.0 // indirect
|
||||
google.golang.org/appengine v1.6.8 // indirect
|
||||
google.golang.org/protobuf v1.33.0 // indirect
|
||||
)
|
||||
|
|
20
go.sum
20
go.sum
|
@ -8,15 +8,14 @@ github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
|
|||
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
|
||||
github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44=
|
||||
github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||
github.com/coreos/go-oidc/v3 v3.9.0 h1:0J/ogVOd4y8P0f0xUh8l9t07xRP/d8tccvjHl2dcsSo=
|
||||
github.com/coreos/go-oidc/v3 v3.9.0/go.mod h1:rTKz2PYwftcrtoCzV5g5kvfJoWcm0Mk8AF8y1iAQro4=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/coreos/go-oidc/v3 v3.10.0 h1:tDnXHnLyiTVyT/2zLDGj09pFPkhND8Gl8lnTRhoEaJU=
|
||||
github.com/coreos/go-oidc/v3 v3.10.0/go.mod h1:5j11xcw0D3+SGxn6Z/WFADsgcWVMyNAlSQupk0KK3ac=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/fxamacker/cbor/v2 v2.6.0 h1:sU6J2usfADwWlYDAFhZBQ6TnLFBHxgesMrQfQgk1tWA=
|
||||
github.com/fxamacker/cbor/v2 v2.6.0/go.mod h1:pxXPTn3joSm21Gbwsv0w9OSA2y1HFR9qXEeXQVeNoDQ=
|
||||
github.com/go-jose/go-jose/v3 v3.0.3 h1:fFKWeig/irsp7XD2zBxvnmA/XaRWp5V3CBsZXJF7G7k=
|
||||
github.com/go-jose/go-jose/v3 v3.0.3/go.mod h1:5b+7YgP7ZICgJDBdfjZaIt+H/9L9T/YQrVfLAMboGkQ=
|
||||
github.com/go-jose/go-jose/v4 v4.0.1 h1:QVEPDE3OluqXBQZDcnNvQrInro2h0e4eqNbnZSWqS6U=
|
||||
github.com/go-jose/go-jose/v4 v4.0.1/go.mod h1:WVf9LFMHh/QVrmqrOfqun0C45tMe3RoiKJMPvgWwLfY=
|
||||
github.com/go-webauthn/webauthn v0.10.2 h1:OG7B+DyuTytrEPFmTX503K77fqs3HDK/0Iv+z8UYbq4=
|
||||
github.com/go-webauthn/webauthn v0.10.2/go.mod h1:Gd1IDsGAybuvK1NkwUTLbGmeksxuRJjVN2PE/xsPxHs=
|
||||
github.com/go-webauthn/x v0.1.9 h1:v1oeLmoaa+gPOaZqUdDentu6Rl7HkSSsmOT6gxEQHhE=
|
||||
|
@ -28,7 +27,6 @@ github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiu
|
|||
github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg=
|
||||
github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
|
||||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/go-tpm v0.9.0 h1:sQF6YqWMi+SCXpsmS3fd21oPy/vSddwZry4JnmltHVk=
|
||||
|
@ -51,8 +49,6 @@ github.com/prometheus/common v0.48.0 h1:QO8U2CdOzSn1BBsmXJXduaaW+dY/5QLjfB8svtSz
|
|||
github.com/prometheus/common v0.48.0/go.mod h1:0/KsvlIEfPQCQ5I2iNSAWKPZziNCvRs5EC6ILDTlAPc=
|
||||
github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo=
|
||||
github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
|
||||
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/tdewolff/minify/v2 v2.20.19 h1:tX0SR0LUrIqGoLjXnkIzRSIbKJ7PaNnSENLD4CyH6Xo=
|
||||
|
@ -69,7 +65,6 @@ github.com/yuin/goldmark v1.7.0 h1:EfOIvIMZIzHdB/R/zVrikYLPPwJlfMcNczJFMs1m6sA=
|
|||
github.com/yuin/goldmark v1.7.0/go.mod h1:uzxRWxtg69N339t3louHJ7+O03ezfj6PlliRlaOzY1E=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
|
||||
golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA=
|
||||
golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
|
@ -79,7 +74,6 @@ golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v
|
|||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns=
|
||||
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
||||
golang.org/x/net v0.22.0 h1:9sGLhx7iRIHEiX0oAJ3MRZMUCElJgy7Br1nO+AMN3Tc=
|
||||
golang.org/x/net v0.22.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg=
|
||||
golang.org/x/oauth2 v0.18.0 h1:09qnuIAgzdx1XplqJvW6CQqMCtGZykZWcXzPMPUusvI=
|
||||
|
@ -94,16 +88,12 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc
|
|||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4=
|
||||
golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY=
|
||||
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
|
||||
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
|
||||
golang.org/x/term v0.18.0 h1:FcHjZXDMxI8mM3nwhX9HlKop4C0YQvCVCdwYl2wOtE8=
|
||||
golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
|
@ -126,8 +116,6 @@ google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp0
|
|||
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||
google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI=
|
||||
google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
mvdan.cc/xurls/v2 v2.5.0 h1:lyBNOm8Wo71UknhUs4QTFUNNMyxy2JEIaKKo0RWOh+8=
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -15,8 +15,8 @@ import (
|
|||
"miniflux.app/v2/internal/http/request"
|
||||
"miniflux.app/v2/internal/http/response/json"
|
||||
"miniflux.app/v2/internal/integration"
|
||||
"miniflux.app/v2/internal/mediaproxy"
|
||||
"miniflux.app/v2/internal/model"
|
||||
"miniflux.app/v2/internal/proxy"
|
||||
"miniflux.app/v2/internal/reader/processor"
|
||||
"miniflux.app/v2/internal/reader/readingtime"
|
||||
"miniflux.app/v2/internal/storage"
|
||||
|
@ -36,14 +36,14 @@ func (h *handler) getEntryFromBuilder(w http.ResponseWriter, r *http.Request, b
|
|||
return
|
||||
}
|
||||
|
||||
entry.Content = proxy.AbsoluteProxyRewriter(h.router, r.Host, entry.Content)
|
||||
proxyOption := config.Opts.ProxyOption()
|
||||
entry.Content = mediaproxy.RewriteDocumentWithAbsoluteProxyURL(h.router, r.Host, entry.Content)
|
||||
proxyOption := config.Opts.MediaProxyMode()
|
||||
|
||||
for i := range entry.Enclosures {
|
||||
if proxyOption == "all" || proxyOption != "none" && !urllib.IsHTTPS(entry.Enclosures[i].URL) {
|
||||
for _, mediaType := range config.Opts.ProxyMediaTypes() {
|
||||
for _, mediaType := range config.Opts.MediaProxyResourceTypes() {
|
||||
if strings.HasPrefix(entry.Enclosures[i].MimeType, mediaType+"/") {
|
||||
entry.Enclosures[i].URL = proxy.AbsoluteProxifyURL(h.router, r.Host, entry.Enclosures[i].URL)
|
||||
entry.Enclosures[i].URL = mediaproxy.ProxifyAbsoluteURL(h.router, r.Host, entry.Enclosures[i].URL)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
@ -164,7 +164,7 @@ func (h *handler) findEntries(w http.ResponseWriter, r *http.Request, feedID int
|
|||
}
|
||||
|
||||
for i := range entries {
|
||||
entries[i].Content = proxy.AbsoluteProxyRewriter(h.router, r.Host, entries[i].Content)
|
||||
entries[i].Content = mediaproxy.RewriteDocumentWithAbsoluteProxyURL(h.router, r.Host, entries[i].Content)
|
||||
}
|
||||
|
||||
json.OK(w, r, &entriesResponse{Total: count, Entries: entries})
|
||||
|
|
|
@ -115,7 +115,7 @@ func (h *handler) updateFeed(w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
if validationErr := validator.ValidateFeedModification(h.store, userID, &feedModificationRequest); validationErr != nil {
|
||||
if validationErr := validator.ValidateFeedModification(h.store, userID, originalFeed.ID, &feedModificationRequest); validationErr != nil {
|
||||
json.BadRequest(w, r, validationErr.Error())
|
||||
return
|
||||
}
|
||||
|
|
|
@ -16,7 +16,7 @@ func askCredentials() (string, string) {
|
|||
fd := int(os.Stdin.Fd())
|
||||
|
||||
if !term.IsTerminal(fd) {
|
||||
printErrorAndExit(fmt.Errorf("this is not a terminal, exiting"))
|
||||
printErrorAndExit(fmt.Errorf("this is not an interactive terminal, exiting"))
|
||||
}
|
||||
|
||||
fmt.Print("Enter Username: ")
|
||||
|
|
|
@ -23,7 +23,7 @@ const (
|
|||
flagVersionHelp = "Show application version"
|
||||
flagMigrateHelp = "Run SQL migrations"
|
||||
flagFlushSessionsHelp = "Flush all sessions (disconnect users)"
|
||||
flagCreateAdminHelp = "Create admin user"
|
||||
flagCreateAdminHelp = "Create an admin user from an interactive terminal"
|
||||
flagResetPasswordHelp = "Reset user password"
|
||||
flagResetFeedErrorsHelp = "Clear all feed errors for all users"
|
||||
flagDebugModeHelp = "Show debug logs"
|
||||
|
@ -191,7 +191,7 @@ func Parse() {
|
|||
}
|
||||
|
||||
if flagCreateAdmin {
|
||||
createAdmin(store)
|
||||
createAdminUserFromInteractiveTerminal(store)
|
||||
return
|
||||
}
|
||||
|
||||
|
@ -211,9 +211,8 @@ func Parse() {
|
|||
printErrorAndExit(err)
|
||||
}
|
||||
|
||||
// Create admin user and start the daemon.
|
||||
if config.Opts.CreateAdmin() {
|
||||
createAdmin(store)
|
||||
createAdminUserFromEnvironmentVariables(store)
|
||||
}
|
||||
|
||||
if flagRefreshFeeds {
|
||||
|
|
|
@ -12,15 +12,20 @@ import (
|
|||
"miniflux.app/v2/internal/validator"
|
||||
)
|
||||
|
||||
func createAdmin(store *storage.Storage) {
|
||||
userCreationRequest := &model.UserCreationRequest{
|
||||
Username: config.Opts.AdminUsername(),
|
||||
Password: config.Opts.AdminPassword(),
|
||||
IsAdmin: true,
|
||||
}
|
||||
func createAdminUserFromEnvironmentVariables(store *storage.Storage) {
|
||||
createAdminUser(store, config.Opts.AdminUsername(), config.Opts.AdminPassword())
|
||||
}
|
||||
|
||||
if userCreationRequest.Username == "" || userCreationRequest.Password == "" {
|
||||
userCreationRequest.Username, userCreationRequest.Password = askCredentials()
|
||||
func createAdminUserFromInteractiveTerminal(store *storage.Storage) {
|
||||
username, password := askCredentials()
|
||||
createAdminUser(store, username, password)
|
||||
}
|
||||
|
||||
func createAdminUser(store *storage.Storage, username, password string) {
|
||||
userCreationRequest := &model.UserCreationRequest{
|
||||
Username: username,
|
||||
Password: password,
|
||||
IsAdmin: true,
|
||||
}
|
||||
|
||||
if store.UserExists(userCreationRequest.Username) {
|
||||
|
@ -34,7 +39,12 @@ func createAdmin(store *storage.Storage) {
|
|||
printErrorAndExit(validationErr.Error())
|
||||
}
|
||||
|
||||
if _, err := store.CreateUser(userCreationRequest); err != nil {
|
||||
if user, err := store.CreateUser(userCreationRequest); err != nil {
|
||||
printErrorAndExit(err)
|
||||
} else {
|
||||
slog.Info("Created new admin user",
|
||||
slog.String("username", user.Username),
|
||||
slog.Int64("user_id", user.ID),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
package config // import "miniflux.app/v2/internal/config"
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"os"
|
||||
"testing"
|
||||
)
|
||||
|
@ -1442,9 +1443,9 @@ func TestPocketConsumerKeyFromUserPrefs(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestProxyOption(t *testing.T) {
|
||||
func TestMediaProxyMode(t *testing.T) {
|
||||
os.Clearenv()
|
||||
os.Setenv("PROXY_OPTION", "all")
|
||||
os.Setenv("MEDIA_PROXY_MODE", "all")
|
||||
|
||||
parser := NewParser()
|
||||
opts, err := parser.ParseEnvironmentVariables()
|
||||
|
@ -1453,14 +1454,14 @@ func TestProxyOption(t *testing.T) {
|
|||
}
|
||||
|
||||
expected := "all"
|
||||
result := opts.ProxyOption()
|
||||
result := opts.MediaProxyMode()
|
||||
|
||||
if result != expected {
|
||||
t.Fatalf(`Unexpected PROXY_OPTION value, got %q instead of %q`, result, expected)
|
||||
t.Fatalf(`Unexpected MEDIA_PROXY_MODE value, got %q instead of %q`, result, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDefaultProxyOptionValue(t *testing.T) {
|
||||
func TestDefaultMediaProxyModeValue(t *testing.T) {
|
||||
os.Clearenv()
|
||||
|
||||
parser := NewParser()
|
||||
|
@ -1469,17 +1470,17 @@ func TestDefaultProxyOptionValue(t *testing.T) {
|
|||
t.Fatalf(`Parsing failure: %v`, err)
|
||||
}
|
||||
|
||||
expected := defaultProxyOption
|
||||
result := opts.ProxyOption()
|
||||
expected := defaultMediaProxyMode
|
||||
result := opts.MediaProxyMode()
|
||||
|
||||
if result != expected {
|
||||
t.Fatalf(`Unexpected PROXY_OPTION value, got %q instead of %q`, result, expected)
|
||||
t.Fatalf(`Unexpected MEDIA_PROXY_MODE value, got %q instead of %q`, result, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestProxyMediaTypes(t *testing.T) {
|
||||
func TestMediaProxyResourceTypes(t *testing.T) {
|
||||
os.Clearenv()
|
||||
os.Setenv("PROXY_MEDIA_TYPES", "image,audio")
|
||||
os.Setenv("MEDIA_PROXY_RESOURCE_TYPES", "image,audio")
|
||||
|
||||
parser := NewParser()
|
||||
opts, err := parser.ParseEnvironmentVariables()
|
||||
|
@ -1489,25 +1490,25 @@ func TestProxyMediaTypes(t *testing.T) {
|
|||
|
||||
expected := []string{"audio", "image"}
|
||||
|
||||
if len(expected) != len(opts.ProxyMediaTypes()) {
|
||||
t.Fatalf(`Unexpected PROXY_MEDIA_TYPES value, got %v instead of %v`, opts.ProxyMediaTypes(), expected)
|
||||
if len(expected) != len(opts.MediaProxyResourceTypes()) {
|
||||
t.Fatalf(`Unexpected MEDIA_PROXY_RESOURCE_TYPES value, got %v instead of %v`, opts.MediaProxyResourceTypes(), expected)
|
||||
}
|
||||
|
||||
resultMap := make(map[string]bool)
|
||||
for _, mediaType := range opts.ProxyMediaTypes() {
|
||||
for _, mediaType := range opts.MediaProxyResourceTypes() {
|
||||
resultMap[mediaType] = true
|
||||
}
|
||||
|
||||
for _, mediaType := range expected {
|
||||
if !resultMap[mediaType] {
|
||||
t.Fatalf(`Unexpected PROXY_MEDIA_TYPES value, got %v instead of %v`, opts.ProxyMediaTypes(), expected)
|
||||
t.Fatalf(`Unexpected MEDIA_PROXY_RESOURCE_TYPES value, got %v instead of %v`, opts.MediaProxyResourceTypes(), expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestProxyMediaTypesWithDuplicatedValues(t *testing.T) {
|
||||
func TestMediaProxyResourceTypesWithDuplicatedValues(t *testing.T) {
|
||||
os.Clearenv()
|
||||
os.Setenv("PROXY_MEDIA_TYPES", "image,audio, image")
|
||||
os.Setenv("MEDIA_PROXY_RESOURCE_TYPES", "image,audio, image")
|
||||
|
||||
parser := NewParser()
|
||||
opts, err := parser.ParseEnvironmentVariables()
|
||||
|
@ -1516,23 +1517,119 @@ func TestProxyMediaTypesWithDuplicatedValues(t *testing.T) {
|
|||
}
|
||||
|
||||
expected := []string{"audio", "image"}
|
||||
if len(expected) != len(opts.ProxyMediaTypes()) {
|
||||
t.Fatalf(`Unexpected PROXY_MEDIA_TYPES value, got %v instead of %v`, opts.ProxyMediaTypes(), expected)
|
||||
if len(expected) != len(opts.MediaProxyResourceTypes()) {
|
||||
t.Fatalf(`Unexpected MEDIA_PROXY_RESOURCE_TYPES value, got %v instead of %v`, opts.MediaProxyResourceTypes(), expected)
|
||||
}
|
||||
|
||||
resultMap := make(map[string]bool)
|
||||
for _, mediaType := range opts.ProxyMediaTypes() {
|
||||
for _, mediaType := range opts.MediaProxyResourceTypes() {
|
||||
resultMap[mediaType] = true
|
||||
}
|
||||
|
||||
for _, mediaType := range expected {
|
||||
if !resultMap[mediaType] {
|
||||
t.Fatalf(`Unexpected PROXY_MEDIA_TYPES value, got %v instead of %v`, opts.ProxyMediaTypes(), expected)
|
||||
t.Fatalf(`Unexpected MEDIA_PROXY_RESOURCE_TYPES value, got %v instead of %v`, opts.MediaProxyResourceTypes(), expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestProxyImagesOptionBackwardCompatibility(t *testing.T) {
|
||||
func TestDefaultMediaProxyResourceTypes(t *testing.T) {
|
||||
os.Clearenv()
|
||||
|
||||
parser := NewParser()
|
||||
opts, err := parser.ParseEnvironmentVariables()
|
||||
if err != nil {
|
||||
t.Fatalf(`Parsing failure: %v`, err)
|
||||
}
|
||||
|
||||
expected := []string{"image"}
|
||||
|
||||
if len(expected) != len(opts.MediaProxyResourceTypes()) {
|
||||
t.Fatalf(`Unexpected MEDIA_PROXY_RESOURCE_TYPES value, got %v instead of %v`, opts.MediaProxyResourceTypes(), expected)
|
||||
}
|
||||
|
||||
resultMap := make(map[string]bool)
|
||||
for _, mediaType := range opts.MediaProxyResourceTypes() {
|
||||
resultMap[mediaType] = true
|
||||
}
|
||||
|
||||
for _, mediaType := range expected {
|
||||
if !resultMap[mediaType] {
|
||||
t.Fatalf(`Unexpected MEDIA_PROXY_RESOURCE_TYPES value, got %v instead of %v`, opts.MediaProxyResourceTypes(), expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestMediaProxyHTTPClientTimeout(t *testing.T) {
|
||||
os.Clearenv()
|
||||
os.Setenv("MEDIA_PROXY_HTTP_CLIENT_TIMEOUT", "24")
|
||||
|
||||
parser := NewParser()
|
||||
opts, err := parser.ParseEnvironmentVariables()
|
||||
if err != nil {
|
||||
t.Fatalf(`Parsing failure: %v`, err)
|
||||
}
|
||||
|
||||
expected := 24
|
||||
result := opts.MediaProxyHTTPClientTimeout()
|
||||
|
||||
if result != expected {
|
||||
t.Fatalf(`Unexpected MEDIA_PROXY_HTTP_CLIENT_TIMEOUT value, got %d instead of %d`, result, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDefaultMediaProxyHTTPClientTimeoutValue(t *testing.T) {
|
||||
os.Clearenv()
|
||||
|
||||
parser := NewParser()
|
||||
opts, err := parser.ParseEnvironmentVariables()
|
||||
if err != nil {
|
||||
t.Fatalf(`Parsing failure: %v`, err)
|
||||
}
|
||||
|
||||
expected := defaultMediaProxyHTTPClientTimeout
|
||||
result := opts.MediaProxyHTTPClientTimeout()
|
||||
|
||||
if result != expected {
|
||||
t.Fatalf(`Unexpected MEDIA_PROXY_HTTP_CLIENT_TIMEOUT value, got %d instead of %d`, result, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestMediaProxyCustomURL(t *testing.T) {
|
||||
os.Clearenv()
|
||||
os.Setenv("MEDIA_PROXY_CUSTOM_URL", "http://example.org/proxy")
|
||||
|
||||
parser := NewParser()
|
||||
opts, err := parser.ParseEnvironmentVariables()
|
||||
if err != nil {
|
||||
t.Fatalf(`Parsing failure: %v`, err)
|
||||
}
|
||||
expected := "http://example.org/proxy"
|
||||
result := opts.MediaCustomProxyURL()
|
||||
if result != expected {
|
||||
t.Fatalf(`Unexpected MEDIA_PROXY_CUSTOM_URL value, got %q instead of %q`, result, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestMediaProxyPrivateKey(t *testing.T) {
|
||||
os.Clearenv()
|
||||
os.Setenv("MEDIA_PROXY_PRIVATE_KEY", "foobar")
|
||||
|
||||
parser := NewParser()
|
||||
opts, err := parser.ParseEnvironmentVariables()
|
||||
if err != nil {
|
||||
t.Fatalf(`Parsing failure: %v`, err)
|
||||
}
|
||||
|
||||
expected := []byte("foobar")
|
||||
result := opts.MediaProxyPrivateKey()
|
||||
|
||||
if !bytes.Equal(result, expected) {
|
||||
t.Fatalf(`Unexpected MEDIA_PROXY_PRIVATE_KEY value, got %q instead of %q`, result, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestProxyImagesOptionForBackwardCompatibility(t *testing.T) {
|
||||
os.Clearenv()
|
||||
os.Setenv("PROXY_IMAGES", "all")
|
||||
|
||||
|
@ -1543,30 +1640,31 @@ func TestProxyImagesOptionBackwardCompatibility(t *testing.T) {
|
|||
}
|
||||
|
||||
expected := []string{"image"}
|
||||
if len(expected) != len(opts.ProxyMediaTypes()) {
|
||||
t.Fatalf(`Unexpected PROXY_MEDIA_TYPES value, got %v instead of %v`, opts.ProxyMediaTypes(), expected)
|
||||
if len(expected) != len(opts.MediaProxyResourceTypes()) {
|
||||
t.Fatalf(`Unexpected PROXY_IMAGES value, got %v instead of %v`, opts.MediaProxyResourceTypes(), expected)
|
||||
}
|
||||
|
||||
resultMap := make(map[string]bool)
|
||||
for _, mediaType := range opts.ProxyMediaTypes() {
|
||||
for _, mediaType := range opts.MediaProxyResourceTypes() {
|
||||
resultMap[mediaType] = true
|
||||
}
|
||||
|
||||
for _, mediaType := range expected {
|
||||
if !resultMap[mediaType] {
|
||||
t.Fatalf(`Unexpected PROXY_MEDIA_TYPES value, got %v instead of %v`, opts.ProxyMediaTypes(), expected)
|
||||
t.Fatalf(`Unexpected PROXY_IMAGES value, got %v instead of %v`, opts.MediaProxyResourceTypes(), expected)
|
||||
}
|
||||
}
|
||||
|
||||
expectedProxyOption := "all"
|
||||
result := opts.ProxyOption()
|
||||
result := opts.MediaProxyMode()
|
||||
if result != expectedProxyOption {
|
||||
t.Fatalf(`Unexpected PROXY_OPTION value, got %q instead of %q`, result, expectedProxyOption)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDefaultProxyMediaTypes(t *testing.T) {
|
||||
func TestProxyImageURLForBackwardCompatibility(t *testing.T) {
|
||||
os.Clearenv()
|
||||
os.Setenv("PROXY_IMAGE_URL", "http://example.org/proxy")
|
||||
|
||||
parser := NewParser()
|
||||
opts, err := parser.ParseEnvironmentVariables()
|
||||
|
@ -1574,25 +1672,73 @@ func TestDefaultProxyMediaTypes(t *testing.T) {
|
|||
t.Fatalf(`Parsing failure: %v`, err)
|
||||
}
|
||||
|
||||
expected := []string{"image"}
|
||||
expected := "http://example.org/proxy"
|
||||
result := opts.MediaCustomProxyURL()
|
||||
if result != expected {
|
||||
t.Fatalf(`Unexpected PROXY_IMAGE_URL value, got %q instead of %q`, result, expected)
|
||||
}
|
||||
}
|
||||
|
||||
if len(expected) != len(opts.ProxyMediaTypes()) {
|
||||
t.Fatalf(`Unexpected PROXY_MEDIA_TYPES value, got %v instead of %v`, opts.ProxyMediaTypes(), expected)
|
||||
func TestProxyURLOptionForBackwardCompatibility(t *testing.T) {
|
||||
os.Clearenv()
|
||||
os.Setenv("PROXY_URL", "http://example.org/proxy")
|
||||
|
||||
parser := NewParser()
|
||||
opts, err := parser.ParseEnvironmentVariables()
|
||||
if err != nil {
|
||||
t.Fatalf(`Parsing failure: %v`, err)
|
||||
}
|
||||
|
||||
expected := "http://example.org/proxy"
|
||||
result := opts.MediaCustomProxyURL()
|
||||
if result != expected {
|
||||
t.Fatalf(`Unexpected PROXY_URL value, got %q instead of %q`, result, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestProxyMediaTypesOptionForBackwardCompatibility(t *testing.T) {
|
||||
os.Clearenv()
|
||||
os.Setenv("PROXY_MEDIA_TYPES", "image,audio")
|
||||
|
||||
parser := NewParser()
|
||||
opts, err := parser.ParseEnvironmentVariables()
|
||||
if err != nil {
|
||||
t.Fatalf(`Parsing failure: %v`, err)
|
||||
}
|
||||
expected := []string{"audio", "image"}
|
||||
if len(expected) != len(opts.MediaProxyResourceTypes()) {
|
||||
t.Fatalf(`Unexpected PROXY_MEDIA_TYPES value, got %v instead of %v`, opts.MediaProxyResourceTypes(), expected)
|
||||
}
|
||||
|
||||
resultMap := make(map[string]bool)
|
||||
for _, mediaType := range opts.ProxyMediaTypes() {
|
||||
for _, mediaType := range opts.MediaProxyResourceTypes() {
|
||||
resultMap[mediaType] = true
|
||||
}
|
||||
|
||||
for _, mediaType := range expected {
|
||||
if !resultMap[mediaType] {
|
||||
t.Fatalf(`Unexpected PROXY_MEDIA_TYPES value, got %v instead of %v`, opts.ProxyMediaTypes(), expected)
|
||||
t.Fatalf(`Unexpected PROXY_MEDIA_TYPES value, got %v instead of %v`, opts.MediaProxyResourceTypes(), expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestProxyHTTPClientTimeout(t *testing.T) {
|
||||
func TestProxyOptionForBackwardCompatibility(t *testing.T) {
|
||||
os.Clearenv()
|
||||
os.Setenv("PROXY_OPTION", "all")
|
||||
|
||||
parser := NewParser()
|
||||
opts, err := parser.ParseEnvironmentVariables()
|
||||
if err != nil {
|
||||
t.Fatalf(`Parsing failure: %v`, err)
|
||||
}
|
||||
expected := "all"
|
||||
result := opts.MediaProxyMode()
|
||||
if result != expected {
|
||||
t.Fatalf(`Unexpected PROXY_OPTION value, got %q instead of %q`, result, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestProxyHTTPClientTimeoutOptionForBackwardCompatibility(t *testing.T) {
|
||||
os.Clearenv()
|
||||
os.Setenv("PROXY_HTTP_CLIENT_TIMEOUT", "24")
|
||||
|
||||
|
@ -1601,29 +1747,26 @@ func TestProxyHTTPClientTimeout(t *testing.T) {
|
|||
if err != nil {
|
||||
t.Fatalf(`Parsing failure: %v`, err)
|
||||
}
|
||||
|
||||
expected := 24
|
||||
result := opts.ProxyHTTPClientTimeout()
|
||||
|
||||
result := opts.MediaProxyHTTPClientTimeout()
|
||||
if result != expected {
|
||||
t.Fatalf(`Unexpected PROXY_HTTP_CLIENT_TIMEOUT value, got %d instead of %d`, result, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDefaultProxyHTTPClientTimeoutValue(t *testing.T) {
|
||||
func TestProxyPrivateKeyOptionForBackwardCompatibility(t *testing.T) {
|
||||
os.Clearenv()
|
||||
os.Setenv("PROXY_PRIVATE_KEY", "foobar")
|
||||
|
||||
parser := NewParser()
|
||||
opts, err := parser.ParseEnvironmentVariables()
|
||||
if err != nil {
|
||||
t.Fatalf(`Parsing failure: %v`, err)
|
||||
}
|
||||
|
||||
expected := defaultProxyHTTPClientTimeout
|
||||
result := opts.ProxyHTTPClientTimeout()
|
||||
|
||||
if result != expected {
|
||||
t.Fatalf(`Unexpected PROXY_HTTP_CLIENT_TIMEOUT value, got %d instead of %d`, result, expected)
|
||||
expected := []byte("foobar")
|
||||
result := opts.MediaProxyPrivateKey()
|
||||
if !bytes.Equal(result, expected) {
|
||||
t.Fatalf(`Unexpected PROXY_PRIVATE_KEY value, got %q instead of %q`, result, expected)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@ const (
|
|||
defaultBaseURL = "http://localhost"
|
||||
defaultRootURL = "http://localhost"
|
||||
defaultBasePath = ""
|
||||
defaultWorkerPoolSize = 5
|
||||
defaultWorkerPoolSize = 16
|
||||
defaultPollingFrequency = 60
|
||||
defaultForceRefreshInterval = 30
|
||||
defaultBatchSize = 100
|
||||
|
@ -51,10 +51,11 @@ const (
|
|||
defaultCleanupArchiveUnreadDays = 180
|
||||
defaultCleanupArchiveBatchSize = 10000
|
||||
defaultCleanupRemoveSessionsDays = 30
|
||||
defaultProxyHTTPClientTimeout = 120
|
||||
defaultProxyOption = "http-only"
|
||||
defaultProxyMediaTypes = "image"
|
||||
defaultProxyUrl = ""
|
||||
defaultMediaProxyHTTPClientTimeout = 120
|
||||
defaultMediaProxyMode = "http-only"
|
||||
defaultMediaResourceTypes = "image"
|
||||
defaultMediaProxyURL = ""
|
||||
defaultFilterEntryMaxAgeDays = 0
|
||||
defaultFetchOdyseeWatchTime = false
|
||||
defaultFetchYouTubeWatchTime = false
|
||||
defaultYouTubeEmbedUrlOverride = "https://www.youtube-nocookie.com/embed/"
|
||||
|
@ -135,12 +136,13 @@ type Options struct {
|
|||
createAdmin bool
|
||||
adminUsername string
|
||||
adminPassword string
|
||||
proxyHTTPClientTimeout int
|
||||
proxyOption string
|
||||
proxyMediaTypes []string
|
||||
proxyUrl string
|
||||
mediaProxyHTTPClientTimeout int
|
||||
mediaProxyMode string
|
||||
mediaProxyResourceTypes []string
|
||||
mediaProxyCustomURL string
|
||||
fetchOdyseeWatchTime bool
|
||||
fetchYouTubeWatchTime bool
|
||||
filterEntryMaxAgeDays int
|
||||
youTubeEmbedUrlOverride string
|
||||
oauth2UserCreationAllowed bool
|
||||
oauth2ClientID string
|
||||
|
@ -165,7 +167,7 @@ type Options struct {
|
|||
metricsPassword string
|
||||
watchdog bool
|
||||
invidiousInstance string
|
||||
proxyPrivateKey []byte
|
||||
mediaProxyPrivateKey []byte
|
||||
webAuthn bool
|
||||
}
|
||||
|
||||
|
@ -209,10 +211,11 @@ func NewOptions() *Options {
|
|||
pollingParsingErrorLimit: defaultPollingParsingErrorLimit,
|
||||
workerPoolSize: defaultWorkerPoolSize,
|
||||
createAdmin: defaultCreateAdmin,
|
||||
proxyHTTPClientTimeout: defaultProxyHTTPClientTimeout,
|
||||
proxyOption: defaultProxyOption,
|
||||
proxyMediaTypes: []string{defaultProxyMediaTypes},
|
||||
proxyUrl: defaultProxyUrl,
|
||||
mediaProxyHTTPClientTimeout: defaultMediaProxyHTTPClientTimeout,
|
||||
mediaProxyMode: defaultMediaProxyMode,
|
||||
mediaProxyResourceTypes: []string{defaultMediaResourceTypes},
|
||||
mediaProxyCustomURL: defaultMediaProxyURL,
|
||||
filterEntryMaxAgeDays: defaultFilterEntryMaxAgeDays,
|
||||
fetchOdyseeWatchTime: defaultFetchOdyseeWatchTime,
|
||||
fetchYouTubeWatchTime: defaultFetchYouTubeWatchTime,
|
||||
youTubeEmbedUrlOverride: defaultYouTubeEmbedUrlOverride,
|
||||
|
@ -239,7 +242,7 @@ func NewOptions() *Options {
|
|||
metricsPassword: defaultMetricsPassword,
|
||||
watchdog: defaultWatchdog,
|
||||
invidiousInstance: defaultInvidiousInstance,
|
||||
proxyPrivateKey: crypto.GenerateRandomBytes(16),
|
||||
mediaProxyPrivateKey: crypto.GenerateRandomBytes(16),
|
||||
webAuthn: defaultWebAuthn,
|
||||
}
|
||||
}
|
||||
|
@ -489,24 +492,29 @@ func (o *Options) FetchOdyseeWatchTime() bool {
|
|||
return o.fetchOdyseeWatchTime
|
||||
}
|
||||
|
||||
// ProxyOption returns "none" to never proxy, "http-only" to proxy non-HTTPS, "all" to always proxy.
|
||||
func (o *Options) ProxyOption() string {
|
||||
return o.proxyOption
|
||||
// MediaProxyMode returns "none" to never proxy, "http-only" to proxy non-HTTPS, "all" to always proxy.
|
||||
func (o *Options) MediaProxyMode() string {
|
||||
return o.mediaProxyMode
|
||||
}
|
||||
|
||||
// ProxyMediaTypes returns a slice of media types to proxy.
|
||||
func (o *Options) ProxyMediaTypes() []string {
|
||||
return o.proxyMediaTypes
|
||||
// MediaProxyResourceTypes returns a slice of resource types to proxy.
|
||||
func (o *Options) MediaProxyResourceTypes() []string {
|
||||
return o.mediaProxyResourceTypes
|
||||
}
|
||||
|
||||
// ProxyUrl returns a string of a URL to use to proxy image requests
|
||||
func (o *Options) ProxyUrl() string {
|
||||
return o.proxyUrl
|
||||
// MediaCustomProxyURL returns the custom proxy URL for medias.
|
||||
func (o *Options) MediaCustomProxyURL() string {
|
||||
return o.mediaProxyCustomURL
|
||||
}
|
||||
|
||||
// ProxyHTTPClientTimeout returns the time limit in seconds before the proxy HTTP client cancel the request.
|
||||
func (o *Options) ProxyHTTPClientTimeout() int {
|
||||
return o.proxyHTTPClientTimeout
|
||||
// MediaProxyHTTPClientTimeout returns the time limit in seconds before the proxy HTTP client cancel the request.
|
||||
func (o *Options) MediaProxyHTTPClientTimeout() int {
|
||||
return o.mediaProxyHTTPClientTimeout
|
||||
}
|
||||
|
||||
// MediaProxyPrivateKey returns the private key used by the media proxy.
|
||||
func (o *Options) MediaProxyPrivateKey() []byte {
|
||||
return o.mediaProxyPrivateKey
|
||||
}
|
||||
|
||||
// HasHTTPService returns true if the HTTP service is enabled.
|
||||
|
@ -602,16 +610,16 @@ func (o *Options) InvidiousInstance() string {
|
|||
return o.invidiousInstance
|
||||
}
|
||||
|
||||
// ProxyPrivateKey returns the private key used by the media proxy
|
||||
func (o *Options) ProxyPrivateKey() []byte {
|
||||
return o.proxyPrivateKey
|
||||
}
|
||||
|
||||
// WebAuthn returns true if WebAuthn logins are supported
|
||||
func (o *Options) WebAuthn() bool {
|
||||
return o.webAuthn
|
||||
}
|
||||
|
||||
// FilterEntryMaxAgeDays returns the number of days after which entries should be retained.
|
||||
func (o *Options) FilterEntryMaxAgeDays() int {
|
||||
return o.filterEntryMaxAgeDays
|
||||
}
|
||||
|
||||
// SortedOptions returns options as a list of key value pairs, sorted by keys.
|
||||
func (o *Options) SortedOptions(redactSecret bool) []*Option {
|
||||
var keyValues = map[string]interface{}{
|
||||
|
@ -637,6 +645,7 @@ func (o *Options) SortedOptions(redactSecret bool) []*Option {
|
|||
"DISABLE_HSTS": !o.hsts,
|
||||
"DISABLE_HTTP_SERVICE": !o.httpService,
|
||||
"DISABLE_SCHEDULER_SERVICE": !o.schedulerService,
|
||||
"FILTER_ENTRY_MAX_AGE_DAYS": o.filterEntryMaxAgeDays,
|
||||
"FETCH_YOUTUBE_WATCH_TIME": o.fetchYouTubeWatchTime,
|
||||
"FETCH_ODYSEE_WATCH_TIME": o.fetchOdyseeWatchTime,
|
||||
"HTTPS": o.HTTPS,
|
||||
|
@ -671,11 +680,11 @@ func (o *Options) SortedOptions(redactSecret bool) []*Option {
|
|||
"FORCE_REFRESH_INTERVAL": o.forceRefreshInterval,
|
||||
"POLLING_PARSING_ERROR_LIMIT": o.pollingParsingErrorLimit,
|
||||
"POLLING_SCHEDULER": o.pollingScheduler,
|
||||
"PROXY_HTTP_CLIENT_TIMEOUT": o.proxyHTTPClientTimeout,
|
||||
"PROXY_MEDIA_TYPES": o.proxyMediaTypes,
|
||||
"PROXY_OPTION": o.proxyOption,
|
||||
"PROXY_PRIVATE_KEY": redactSecretValue(string(o.proxyPrivateKey), redactSecret),
|
||||
"PROXY_URL": o.proxyUrl,
|
||||
"MEDIA_PROXY_HTTP_CLIENT_TIMEOUT": o.mediaProxyHTTPClientTimeout,
|
||||
"MEDIA_PROXY_RESOURCE_TYPES": o.mediaProxyResourceTypes,
|
||||
"MEDIA_PROXY_MODE": o.mediaProxyMode,
|
||||
"MEDIA_PROXY_PRIVATE_KEY": redactSecretValue(string(o.mediaProxyPrivateKey), redactSecret),
|
||||
"MEDIA_PROXY_CUSTOM_URL": o.mediaProxyCustomURL,
|
||||
"ROOT_URL": o.rootURL,
|
||||
"RUN_MIGRATIONS": o.runMigrations,
|
||||
"SCHEDULER_ENTRY_FREQUENCY_MAX_INTERVAL": o.schedulerEntryFrequencyMaxInterval,
|
||||
|
|
|
@ -10,6 +10,7 @@ import (
|
|||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log/slog"
|
||||
"net/url"
|
||||
"os"
|
||||
"strconv"
|
||||
|
@ -87,6 +88,7 @@ func (p *Parser) parseLines(lines []string) (err error) {
|
|||
p.opts.logFormat = parsedValue
|
||||
}
|
||||
case "DEBUG":
|
||||
slog.Warn("The DEBUG environment variable is deprecated, use LOG_LEVEL instead")
|
||||
parsedValue := parseBool(value, defaultDebug)
|
||||
if parsedValue {
|
||||
p.opts.logLevel = "debug"
|
||||
|
@ -112,6 +114,8 @@ func (p *Parser) parseLines(lines []string) (err error) {
|
|||
p.opts.databaseMinConns = parseInt(value, defaultDatabaseMinConns)
|
||||
case "DATABASE_CONNECTION_LIFETIME":
|
||||
p.opts.databaseConnectionLifetime = parseInt(value, defaultDatabaseConnectionLifetime)
|
||||
case "FILTER_ENTRY_MAX_AGE_DAYS":
|
||||
p.opts.filterEntryMaxAgeDays = parseInt(value, defaultFilterEntryMaxAgeDays)
|
||||
case "RUN_MIGRATIONS":
|
||||
p.opts.runMigrations = parseBool(value, defaultRunMigrations)
|
||||
case "DISABLE_HSTS":
|
||||
|
@ -158,20 +162,41 @@ func (p *Parser) parseLines(lines []string) (err error) {
|
|||
p.opts.schedulerRoundRobinMinInterval = parseInt(value, defaultSchedulerRoundRobinMinInterval)
|
||||
case "POLLING_PARSING_ERROR_LIMIT":
|
||||
p.opts.pollingParsingErrorLimit = parseInt(value, defaultPollingParsingErrorLimit)
|
||||
// kept for compatibility purpose
|
||||
case "PROXY_IMAGES":
|
||||
p.opts.proxyOption = parseString(value, defaultProxyOption)
|
||||
slog.Warn("The PROXY_IMAGES environment variable is deprecated, use MEDIA_PROXY_MODE instead")
|
||||
p.opts.mediaProxyMode = parseString(value, defaultMediaProxyMode)
|
||||
case "PROXY_HTTP_CLIENT_TIMEOUT":
|
||||
p.opts.proxyHTTPClientTimeout = parseInt(value, defaultProxyHTTPClientTimeout)
|
||||
slog.Warn("The PROXY_HTTP_CLIENT_TIMEOUT environment variable is deprecated, use MEDIA_PROXY_HTTP_CLIENT_TIMEOUT instead")
|
||||
p.opts.mediaProxyHTTPClientTimeout = parseInt(value, defaultMediaProxyHTTPClientTimeout)
|
||||
case "MEDIA_PROXY_HTTP_CLIENT_TIMEOUT":
|
||||
p.opts.mediaProxyHTTPClientTimeout = parseInt(value, defaultMediaProxyHTTPClientTimeout)
|
||||
case "PROXY_OPTION":
|
||||
p.opts.proxyOption = parseString(value, defaultProxyOption)
|
||||
slog.Warn("The PROXY_OPTION environment variable is deprecated, use MEDIA_PROXY_MODE instead")
|
||||
p.opts.mediaProxyMode = parseString(value, defaultMediaProxyMode)
|
||||
case "MEDIA_PROXY_MODE":
|
||||
p.opts.mediaProxyMode = parseString(value, defaultMediaProxyMode)
|
||||
case "PROXY_MEDIA_TYPES":
|
||||
p.opts.proxyMediaTypes = parseStringList(value, []string{defaultProxyMediaTypes})
|
||||
// kept for compatibility purpose
|
||||
slog.Warn("The PROXY_MEDIA_TYPES environment variable is deprecated, use MEDIA_PROXY_RESOURCE_TYPES instead")
|
||||
p.opts.mediaProxyResourceTypes = parseStringList(value, []string{defaultMediaResourceTypes})
|
||||
case "MEDIA_PROXY_RESOURCE_TYPES":
|
||||
p.opts.mediaProxyResourceTypes = parseStringList(value, []string{defaultMediaResourceTypes})
|
||||
case "PROXY_IMAGE_URL":
|
||||
p.opts.proxyUrl = parseString(value, defaultProxyUrl)
|
||||
slog.Warn("The PROXY_IMAGE_URL environment variable is deprecated, use MEDIA_PROXY_CUSTOM_URL instead")
|
||||
p.opts.mediaProxyCustomURL = parseString(value, defaultMediaProxyURL)
|
||||
case "PROXY_URL":
|
||||
p.opts.proxyUrl = parseString(value, defaultProxyUrl)
|
||||
slog.Warn("The PROXY_URL environment variable is deprecated, use MEDIA_PROXY_CUSTOM_URL instead")
|
||||
p.opts.mediaProxyCustomURL = parseString(value, defaultMediaProxyURL)
|
||||
case "PROXY_PRIVATE_KEY":
|
||||
slog.Warn("The PROXY_PRIVATE_KEY environment variable is deprecated, use MEDIA_PROXY_PRIVATE_KEY instead")
|
||||
randomKey := make([]byte, 16)
|
||||
rand.Read(randomKey)
|
||||
p.opts.mediaProxyPrivateKey = parseBytes(value, randomKey)
|
||||
case "MEDIA_PROXY_PRIVATE_KEY":
|
||||
randomKey := make([]byte, 16)
|
||||
rand.Read(randomKey)
|
||||
p.opts.mediaProxyPrivateKey = parseBytes(value, randomKey)
|
||||
case "MEDIA_PROXY_CUSTOM_URL":
|
||||
p.opts.mediaProxyCustomURL = parseString(value, defaultMediaProxyURL)
|
||||
case "CREATE_ADMIN":
|
||||
p.opts.createAdmin = parseBool(value, defaultCreateAdmin)
|
||||
case "ADMIN_USERNAME":
|
||||
|
@ -244,10 +269,6 @@ func (p *Parser) parseLines(lines []string) (err error) {
|
|||
p.opts.watchdog = parseBool(value, defaultWatchdog)
|
||||
case "INVIDIOUS_INSTANCE":
|
||||
p.opts.invidiousInstance = parseString(value, defaultInvidiousInstance)
|
||||
case "PROXY_PRIVATE_KEY":
|
||||
randomKey := make([]byte, 16)
|
||||
rand.Read(randomKey)
|
||||
p.opts.proxyPrivateKey = parseBytes(value, randomKey)
|
||||
case "WEBAUTHN":
|
||||
p.opts.webAuthn = parseBool(value, defaultWebAuthn)
|
||||
}
|
||||
|
|
|
@ -871,4 +871,15 @@ var migrations = []func(tx *sql.Tx) error{
|
|||
_, err = tx.Exec(sql)
|
||||
return err
|
||||
},
|
||||
func(tx *sql.Tx) (err error) {
|
||||
sql := `ALTER TABLE users ADD COLUMN media_playback_rate numeric default 1;`
|
||||
_, err = tx.Exec(sql)
|
||||
return err
|
||||
},
|
||||
func(tx *sql.Tx) (err error) {
|
||||
// the WHERE part speed-up the request a lot
|
||||
sql := `UPDATE entries SET tags = array_remove(tags, '') WHERE '' = ANY(tags);`
|
||||
_, err = tx.Exec(sql)
|
||||
return err
|
||||
},
|
||||
}
|
||||
|
|
|
@ -13,8 +13,8 @@ import (
|
|||
"miniflux.app/v2/internal/http/request"
|
||||
"miniflux.app/v2/internal/http/response/json"
|
||||
"miniflux.app/v2/internal/integration"
|
||||
"miniflux.app/v2/internal/mediaproxy"
|
||||
"miniflux.app/v2/internal/model"
|
||||
"miniflux.app/v2/internal/proxy"
|
||||
"miniflux.app/v2/internal/storage"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
|
@ -324,7 +324,7 @@ func (h *handler) handleItems(w http.ResponseWriter, r *http.Request) {
|
|||
FeedID: entry.FeedID,
|
||||
Title: entry.Title,
|
||||
Author: entry.Author,
|
||||
HTML: proxy.AbsoluteProxyRewriter(h.router, r.Host, entry.Content),
|
||||
HTML: mediaproxy.RewriteDocumentWithAbsoluteProxyURL(h.router, r.Host, entry.Content),
|
||||
URL: entry.URL,
|
||||
IsSaved: isSaved,
|
||||
IsRead: isRead,
|
||||
|
|
|
@ -18,8 +18,8 @@ import (
|
|||
"miniflux.app/v2/internal/http/response/json"
|
||||
"miniflux.app/v2/internal/http/route"
|
||||
"miniflux.app/v2/internal/integration"
|
||||
"miniflux.app/v2/internal/mediaproxy"
|
||||
"miniflux.app/v2/internal/model"
|
||||
"miniflux.app/v2/internal/proxy"
|
||||
"miniflux.app/v2/internal/reader/fetcher"
|
||||
mff "miniflux.app/v2/internal/reader/handler"
|
||||
mfs "miniflux.app/v2/internal/reader/subscription"
|
||||
|
@ -265,9 +265,10 @@ func getStreamFilterModifiers(r *http.Request) (RequestModifiers, error) {
|
|||
}
|
||||
|
||||
func getStream(streamID string, userID int64) (Stream, error) {
|
||||
if strings.HasPrefix(streamID, FeedPrefix) {
|
||||
switch {
|
||||
case strings.HasPrefix(streamID, FeedPrefix):
|
||||
return Stream{Type: FeedStream, ID: strings.TrimPrefix(streamID, FeedPrefix)}, nil
|
||||
} else if strings.HasPrefix(streamID, fmt.Sprintf(UserStreamPrefix, userID)) || strings.HasPrefix(streamID, StreamPrefix) {
|
||||
case strings.HasPrefix(streamID, fmt.Sprintf(UserStreamPrefix, userID)) || strings.HasPrefix(streamID, StreamPrefix):
|
||||
id := strings.TrimPrefix(streamID, fmt.Sprintf(UserStreamPrefix, userID))
|
||||
id = strings.TrimPrefix(id, StreamPrefix)
|
||||
switch id {
|
||||
|
@ -288,15 +289,15 @@ func getStream(streamID string, userID int64) (Stream, error) {
|
|||
default:
|
||||
return Stream{NoStream, ""}, fmt.Errorf("googlereader: unknown stream with id: %s", id)
|
||||
}
|
||||
} else if strings.HasPrefix(streamID, fmt.Sprintf(UserLabelPrefix, userID)) || strings.HasPrefix(streamID, LabelPrefix) {
|
||||
case strings.HasPrefix(streamID, fmt.Sprintf(UserLabelPrefix, userID)) || strings.HasPrefix(streamID, LabelPrefix):
|
||||
id := strings.TrimPrefix(streamID, fmt.Sprintf(UserLabelPrefix, userID))
|
||||
id = strings.TrimPrefix(id, LabelPrefix)
|
||||
return Stream{LabelStream, id}, nil
|
||||
} else if streamID == "" {
|
||||
case streamID == "":
|
||||
return Stream{NoStream, ""}, nil
|
||||
default:
|
||||
return Stream{NoStream, ""}, fmt.Errorf("googlereader: unknown stream type: %s", streamID)
|
||||
}
|
||||
|
||||
return Stream{NoStream, ""}, fmt.Errorf("googlereader: unknown stream type: %s", streamID)
|
||||
}
|
||||
|
||||
func getStreams(streamIDs []string, userID int64) ([]Stream, error) {
|
||||
|
@ -382,7 +383,7 @@ func getItemIDs(r *http.Request) ([]int64, error) {
|
|||
return itemIDs, nil
|
||||
}
|
||||
|
||||
func checkOutputFormat(w http.ResponseWriter, r *http.Request) error {
|
||||
func checkOutputFormat(r *http.Request) error {
|
||||
var output string
|
||||
if r.Method == http.MethodPost {
|
||||
err := r.ParseForm()
|
||||
|
@ -736,11 +737,12 @@ func getFeed(stream Stream, store *storage.Storage, userID int64) (*model.Feed,
|
|||
}
|
||||
|
||||
func getOrCreateCategory(category Stream, store *storage.Storage, userID int64) (*model.Category, error) {
|
||||
if category.ID == "" {
|
||||
switch {
|
||||
case category.ID == "":
|
||||
return store.FirstCategory(userID)
|
||||
} else if store.CategoryTitleExists(userID, category.ID) {
|
||||
case store.CategoryTitleExists(userID, category.ID):
|
||||
return store.CategoryByTitle(userID, category.ID)
|
||||
} else {
|
||||
default:
|
||||
catRequest := model.CategoryRequest{
|
||||
Title: category.ID,
|
||||
}
|
||||
|
@ -764,7 +766,7 @@ func subscribe(newFeed Stream, category Stream, title string, store *storage.Sto
|
|||
}
|
||||
|
||||
created, localizedError := mff.CreateFeed(store, userID, &feedRequest)
|
||||
if err != nil {
|
||||
if localizedError != nil {
|
||||
return nil, localizedError.Error()
|
||||
}
|
||||
|
||||
|
@ -908,7 +910,7 @@ func (h *handler) streamItemContentsHandler(w http.ResponseWriter, r *http.Reque
|
|||
slog.Int64("user_id", userID),
|
||||
)
|
||||
|
||||
if err := checkOutputFormat(w, r); err != nil {
|
||||
if err := checkOutputFormat(r); err != nil {
|
||||
json.BadRequest(w, r, err)
|
||||
return
|
||||
}
|
||||
|
@ -1001,14 +1003,14 @@ func (h *handler) streamItemContentsHandler(w http.ResponseWriter, r *http.Reque
|
|||
categories = append(categories, userStarred)
|
||||
}
|
||||
|
||||
entry.Content = proxy.AbsoluteProxyRewriter(h.router, r.Host, entry.Content)
|
||||
proxyOption := config.Opts.ProxyOption()
|
||||
entry.Content = mediaproxy.RewriteDocumentWithAbsoluteProxyURL(h.router, r.Host, entry.Content)
|
||||
proxyOption := config.Opts.MediaProxyMode()
|
||||
|
||||
for i := range entry.Enclosures {
|
||||
if proxyOption == "all" || proxyOption != "none" && !urllib.IsHTTPS(entry.Enclosures[i].URL) {
|
||||
for _, mediaType := range config.Opts.ProxyMediaTypes() {
|
||||
for _, mediaType := range config.Opts.MediaProxyResourceTypes() {
|
||||
if strings.HasPrefix(entry.Enclosures[i].MimeType, mediaType+"/") {
|
||||
entry.Enclosures[i].URL = proxy.AbsoluteProxifyURL(h.router, r.Host, entry.Enclosures[i].URL)
|
||||
entry.Enclosures[i].URL = mediaproxy.ProxifyAbsoluteURL(h.router, r.Host, entry.Enclosures[i].URL)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
@ -1170,7 +1172,7 @@ func (h *handler) tagListHandler(w http.ResponseWriter, r *http.Request) {
|
|||
slog.String("user_agent", r.UserAgent()),
|
||||
)
|
||||
|
||||
if err := checkOutputFormat(w, r); err != nil {
|
||||
if err := checkOutputFormat(r); err != nil {
|
||||
json.BadRequest(w, r, err)
|
||||
return
|
||||
}
|
||||
|
@ -1205,7 +1207,7 @@ func (h *handler) subscriptionListHandler(w http.ResponseWriter, r *http.Request
|
|||
slog.String("user_agent", r.UserAgent()),
|
||||
)
|
||||
|
||||
if err := checkOutputFormat(w, r); err != nil {
|
||||
if err := checkOutputFormat(r); err != nil {
|
||||
json.BadRequest(w, r, err)
|
||||
return
|
||||
}
|
||||
|
@ -1224,7 +1226,7 @@ func (h *handler) subscriptionListHandler(w http.ResponseWriter, r *http.Request
|
|||
URL: feed.FeedURL,
|
||||
Categories: []subscriptionCategory{{fmt.Sprintf(UserLabelPrefix, userID) + feed.Category.Title, feed.Category.Title, "folder"}},
|
||||
HTMLURL: feed.SiteURL,
|
||||
IconURL: "", //TODO Icons are only base64 encode in DB yet
|
||||
IconURL: "", // TODO: Icons are base64 encoded in the DB.
|
||||
})
|
||||
}
|
||||
json.OK(w, r, result)
|
||||
|
@ -1251,7 +1253,7 @@ func (h *handler) userInfoHandler(w http.ResponseWriter, r *http.Request) {
|
|||
slog.String("user_agent", r.UserAgent()),
|
||||
)
|
||||
|
||||
if err := checkOutputFormat(w, r); err != nil {
|
||||
if err := checkOutputFormat(r); err != nil {
|
||||
json.BadRequest(w, r, err)
|
||||
return
|
||||
}
|
||||
|
@ -1276,7 +1278,7 @@ func (h *handler) streamItemIDsHandler(w http.ResponseWriter, r *http.Request) {
|
|||
slog.Int64("user_id", userID),
|
||||
)
|
||||
|
||||
if err := checkOutputFormat(w, r); err != nil {
|
||||
if err := checkOutputFormat(r); err != nil {
|
||||
json.BadRequest(w, r, err)
|
||||
return
|
||||
}
|
||||
|
@ -1477,8 +1479,7 @@ func (h *handler) handleFeedStreamHandler(w http.ResponseWriter, r *http.Request
|
|||
|
||||
if len(rm.ExcludeTargets) > 0 {
|
||||
for _, s := range rm.ExcludeTargets {
|
||||
switch s.Type {
|
||||
case ReadStream:
|
||||
if s.Type == ReadStream {
|
||||
builder.WithoutStatus(model.EntryStatusRead)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -37,14 +37,10 @@ const (
|
|||
|
||||
func WebAuthnSessionData(r *http.Request) *model.WebAuthnSession {
|
||||
if v := r.Context().Value(WebAuthnDataContextKey); v != nil {
|
||||
value, valid := v.(model.WebAuthnSession)
|
||||
if !valid {
|
||||
return nil
|
||||
if value, valid := v.(model.WebAuthnSession); valid {
|
||||
return &value
|
||||
}
|
||||
|
||||
return &value
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -151,39 +147,27 @@ func ClientIP(r *http.Request) string {
|
|||
|
||||
func getContextStringValue(r *http.Request, key ContextKey) string {
|
||||
if v := r.Context().Value(key); v != nil {
|
||||
value, valid := v.(string)
|
||||
if !valid {
|
||||
return ""
|
||||
if value, valid := v.(string); valid {
|
||||
return value
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func getContextBoolValue(r *http.Request, key ContextKey) bool {
|
||||
if v := r.Context().Value(key); v != nil {
|
||||
value, valid := v.(bool)
|
||||
if !valid {
|
||||
return false
|
||||
if value, valid := v.(bool); valid {
|
||||
return value
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func getContextInt64Value(r *http.Request, key ContextKey) int64 {
|
||||
if v := r.Context().Value(key); v != nil {
|
||||
value, valid := v.(int64)
|
||||
if !valid {
|
||||
return 0
|
||||
if value, valid := v.(int64); valid {
|
||||
return value
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
return 0
|
||||
}
|
||||
|
|
|
@ -96,7 +96,6 @@ func (b *Builder) Write() {
|
|||
}
|
||||
|
||||
func (b *Builder) writeHeaders() {
|
||||
b.headers["X-XSS-Protection"] = "1; mode=block"
|
||||
b.headers["X-Content-Type-Options"] = "nosniff"
|
||||
b.headers["X-Frame-Options"] = "DENY"
|
||||
b.headers["Referrer-Policy"] = "no-referrer"
|
||||
|
|
|
@ -28,7 +28,6 @@ func TestResponseHasCommonHeaders(t *testing.T) {
|
|||
resp := w.Result()
|
||||
|
||||
headers := map[string]string{
|
||||
"X-XSS-Protection": "1; mode=block",
|
||||
"X-Content-Type-Options": "nosniff",
|
||||
"X-Frame-Options": "DENY",
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
// SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
package rssbridge // import "miniflux.app/integration/rssbridge"
|
||||
package rssbridge // import "miniflux.app/v2/internal/integration/rssbridge"
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
|
|
@ -57,6 +57,7 @@ func (c *Client) SendSaveEntryWebhookEvent(entry *model.Entry) error {
|
|||
ID: entry.Feed.ID,
|
||||
UserID: entry.Feed.UserID,
|
||||
CategoryID: entry.Feed.Category.ID,
|
||||
Category: &WebhookCategory{ID: entry.Feed.Category.ID, Title: entry.Feed.Category.Title},
|
||||
FeedURL: entry.Feed.FeedURL,
|
||||
SiteURL: entry.Feed.SiteURL,
|
||||
Title: entry.Feed.Title,
|
||||
|
@ -94,13 +95,13 @@ func (c *Client) SendNewEntriesWebhookEvent(feed *model.Feed, entries model.Entr
|
|||
Tags: entry.Tags,
|
||||
})
|
||||
}
|
||||
|
||||
return c.makeRequest(NewEntriesEventType, &WebhookNewEntriesEvent{
|
||||
EventType: NewEntriesEventType,
|
||||
Feed: &WebhookFeed{
|
||||
ID: feed.ID,
|
||||
UserID: feed.UserID,
|
||||
CategoryID: feed.Category.ID,
|
||||
Category: &WebhookCategory{ID: feed.Category.ID, Title: feed.Category.Title},
|
||||
FeedURL: feed.FeedURL,
|
||||
SiteURL: feed.SiteURL,
|
||||
Title: feed.Title,
|
||||
|
@ -145,13 +146,19 @@ func (c *Client) makeRequest(eventType string, payload any) error {
|
|||
}
|
||||
|
||||
type WebhookFeed struct {
|
||||
ID int64 `json:"id"`
|
||||
UserID int64 `json:"user_id"`
|
||||
CategoryID int64 `json:"category_id"`
|
||||
FeedURL string `json:"feed_url"`
|
||||
SiteURL string `json:"site_url"`
|
||||
Title string `json:"title"`
|
||||
CheckedAt time.Time `json:"checked_at"`
|
||||
ID int64 `json:"id"`
|
||||
UserID int64 `json:"user_id"`
|
||||
CategoryID int64 `json:"category_id"`
|
||||
Category *WebhookCategory `json:"category,omitempty"`
|
||||
FeedURL string `json:"feed_url"`
|
||||
SiteURL string `json:"site_url"`
|
||||
Title string `json:"title"`
|
||||
CheckedAt time.Time `json:"checked_at"`
|
||||
}
|
||||
|
||||
type WebhookCategory struct {
|
||||
ID int64 `json:"id"`
|
||||
Title string `json:"title"`
|
||||
}
|
||||
|
||||
type WebhookEntry struct {
|
||||
|
|
|
@ -178,6 +178,8 @@
|
|||
"page.keyboard_shortcuts.go_to_feed": "Zum Abonnement gehen",
|
||||
"page.keyboard_shortcuts.go_to_previous_page": "Zur vorherigen Seite gehen",
|
||||
"page.keyboard_shortcuts.go_to_next_page": "Zur nächsten Seite gehen",
|
||||
"page.keyboard_shortcuts.go_to_bottom_item": "Gehen Sie zum untersten Element",
|
||||
"page.keyboard_shortcuts.go_to_top_item": "Zum obersten Artikel gehen",
|
||||
"page.keyboard_shortcuts.open_item": "Gewählten Artikel öffnen",
|
||||
"page.keyboard_shortcuts.open_original": "Original-Artikel öffnen",
|
||||
"page.keyboard_shortcuts.open_original_same_window": "Öffne den Original-Link in der aktuellen Registerkarte",
|
||||
|
@ -505,7 +507,7 @@
|
|||
"error.http_body_read": "Der HTTP-Inhalt kann nicht gelesen werden: %v",
|
||||
"error.http_empty_response_body": "Der Inhalt der HTTP-Antwort ist leer.",
|
||||
"error.http_empty_response": "Die HTTP-Antwort ist leer. Vielleicht versucht die Webseite, sich vor Bots zu schützen?",
|
||||
"error.tls_error": "TLS-Fehler: %v. Wenn Sie mögen, können Sie versuchen die TLS-Verifizierung in den Einstellungen des Abonnements zu deaktivieren.",
|
||||
"error.tls_error": "TLS-Fehler: %q. Wenn Sie mögen, können Sie versuchen die TLS-Verifizierung in den Einstellungen des Abonnements zu deaktivieren.",
|
||||
"error.network_operation": "Miniflux kann die Webseite aufgrund eines Netzwerk-Fehlers nicht erreichen: %v",
|
||||
"error.network_timeout": "Die Webseite ist zu langsam und die Anfrage ist abgelaufen: %v.",
|
||||
"error.http_client_error": "HTTP-Client-Fehler: %v.",
|
||||
|
@ -524,5 +526,7 @@
|
|||
"error.unable_to_parse_feed": "Dieses Abonnement kann nicht gelesen werden: %v.",
|
||||
"error.feed_not_found": "Dieses Abonnement existiert nicht oder gehört nicht zu diesem Benutzer.",
|
||||
"error.unable_to_detect_rssbridge": "Abonnement kann nicht durch RSS-Bridge erkannt werden: %v.",
|
||||
"error.feed_format_not_detected": "Das Format des Abonnements kann nicht erkannt werden: %v."
|
||||
"error.feed_format_not_detected": "Das Format des Abonnements kann nicht erkannt werden: %v.",
|
||||
"form.prefs.label.media_playback_rate": "Wiedergabegeschwindigkeit von Audio/Video",
|
||||
"error.settings_media_playback_rate_range": "Die Wiedergabegeschwindigkeit liegt außerhalb des Bereichs"
|
||||
}
|
||||
|
|
|
@ -178,6 +178,8 @@
|
|||
"page.keyboard_shortcuts.go_to_feed": "Πηγαίνετε στη ροή",
|
||||
"page.keyboard_shortcuts.go_to_previous_page": "Μετάβαση στην προηγούμενη σελίδα",
|
||||
"page.keyboard_shortcuts.go_to_next_page": "Μετάβαση στην επόμενη σελίδα",
|
||||
"page.keyboard_shortcuts.go_to_bottom_item": "Μετάβαση στο κάτω στοιχείο",
|
||||
"page.keyboard_shortcuts.go_to_top_item": "Μετάβαση στο επάνω στοιχείο",
|
||||
"page.keyboard_shortcuts.open_item": "Άνοιγμα επιλεγμένου στοιχείου",
|
||||
"page.keyboard_shortcuts.open_original": "Άνοιγμα αρχικού συνδέσμου",
|
||||
"page.keyboard_shortcuts.open_original_same_window": "Άνοιγμα αρχικού συνδέσμου στην τρέχουσα καρτέλα",
|
||||
|
@ -505,7 +507,7 @@
|
|||
"error.http_body_read": "Unable to read the HTTP body: %v.",
|
||||
"error.http_empty_response_body": "The HTTP response body is empty.",
|
||||
"error.http_empty_response": "The HTTP response is empty. Perhaps, this website is using a bot protection mechanism?",
|
||||
"error.tls_error": "TLS error: %v. You could disable TLS verification in the feed settings if you would like.",
|
||||
"error.tls_error": "TLS error: %q. You could disable TLS verification in the feed settings if you would like.",
|
||||
"error.network_operation": "Miniflux is not able to reach this website due to a network error: %v.",
|
||||
"error.network_timeout": "This website is too slow and the request timed out: %v",
|
||||
"error.http_client_error": "HTTP client error: %v.",
|
||||
|
@ -524,5 +526,7 @@
|
|||
"error.unable_to_parse_feed": "Unable to parse this feed: %v.",
|
||||
"error.feed_not_found": "This feed does not exist or does not belong to this user.",
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v."
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v.",
|
||||
"form.prefs.label.media_playback_rate": "Ταχύτητα αναπαραγωγής του ήχου/βίντεο",
|
||||
"error.settings_media_playback_rate_range": "Η ταχύτητα αναπαραγωγής είναι εκτός εύρους"
|
||||
}
|
||||
|
|
|
@ -176,6 +176,8 @@
|
|||
"page.keyboard_shortcuts.go_to_previous_item": "Go to previous item",
|
||||
"page.keyboard_shortcuts.go_to_next_item": "Go to next item",
|
||||
"page.keyboard_shortcuts.go_to_feed": "Go to feed",
|
||||
"page.keyboard_shortcuts.go_to_top_item": "Go to top item",
|
||||
"page.keyboard_shortcuts.go_to_bottom_item": "Go to bottom item",
|
||||
"page.keyboard_shortcuts.go_to_previous_page": "Go to previous page",
|
||||
"page.keyboard_shortcuts.go_to_next_page": "Go to next page",
|
||||
"page.keyboard_shortcuts.open_item": "Open selected item",
|
||||
|
@ -215,7 +217,7 @@
|
|||
"page.settings.webauthn.last_seen_on": "Last Used",
|
||||
"page.settings.webauthn.register": "Register passkey",
|
||||
"page.settings.webauthn.register.error": "Unable to register passkey",
|
||||
"page.settings.webauthn.delete" : [
|
||||
"page.settings.webauthn.delete": [
|
||||
"Remove %d passkey",
|
||||
"Remove %d passkeys"
|
||||
],
|
||||
|
@ -505,7 +507,7 @@
|
|||
"error.http_body_read": "Unable to read the HTTP body: %v.",
|
||||
"error.http_empty_response_body": "The HTTP response body is empty.",
|
||||
"error.http_empty_response": "The HTTP response is empty. Perhaps, this website is using a bot protection mechanism?",
|
||||
"error.tls_error": "TLS error: %v. You could disable TLS verification in the feed settings if you would like.",
|
||||
"error.tls_error": "TLS error: %q. You could disable TLS verification in the feed settings if you would like.",
|
||||
"error.network_operation": "Miniflux is not able to reach this website due to a network error: %v.",
|
||||
"error.network_timeout": "This website is too slow and the request timed out: %v",
|
||||
"error.http_client_error": "HTTP client error: %v.",
|
||||
|
@ -524,5 +526,7 @@
|
|||
"error.unable_to_parse_feed": "Unable to parse this feed: %v.",
|
||||
"error.feed_not_found": "This feed does not exist or does not belong to this user.",
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v."
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v.",
|
||||
"form.prefs.label.media_playback_rate": "Playback speed of the audio/video",
|
||||
"error.settings_media_playback_rate_range": "Playback speed is out of range"
|
||||
}
|
||||
|
|
|
@ -178,6 +178,8 @@
|
|||
"page.keyboard_shortcuts.go_to_feed": "Ir a la fuente",
|
||||
"page.keyboard_shortcuts.go_to_previous_page": "Ir al página anterior",
|
||||
"page.keyboard_shortcuts.go_to_next_page": "Ir al página siguiente",
|
||||
"page.keyboard_shortcuts.go_to_bottom_item": "Ir al elemento inferior",
|
||||
"page.keyboard_shortcuts.go_to_top_item": "Ir al elemento superior",
|
||||
"page.keyboard_shortcuts.open_item": "Abrir el elemento seleccionado",
|
||||
"page.keyboard_shortcuts.open_original": "Abrir el enlace original",
|
||||
"page.keyboard_shortcuts.open_original_same_window": "Abrir enlace original en la pestaña actual",
|
||||
|
@ -505,7 +507,7 @@
|
|||
"error.http_body_read": "Unable to read the HTTP body: %v.",
|
||||
"error.http_empty_response_body": "The HTTP response body is empty.",
|
||||
"error.http_empty_response": "The HTTP response is empty. Perhaps, this website is using a bot protection mechanism?",
|
||||
"error.tls_error": "TLS error: %v. You could disable TLS verification in the feed settings if you would like.",
|
||||
"error.tls_error": "TLS error: %q. You could disable TLS verification in the feed settings if you would like.",
|
||||
"error.network_operation": "Miniflux is not able to reach this website due to a network error: %v.",
|
||||
"error.network_timeout": "This website is too slow and the request timed out: %v",
|
||||
"error.http_client_error": "HTTP client error: %v.",
|
||||
|
@ -524,5 +526,7 @@
|
|||
"error.unable_to_parse_feed": "Unable to parse this feed: %v.",
|
||||
"error.feed_not_found": "This feed does not exist or does not belong to this user.",
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v."
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v.",
|
||||
"form.prefs.label.media_playback_rate": "Velocidad de reproducción del audio/vídeo",
|
||||
"error.settings_media_playback_rate_range": "La velocidad de reproducción está fuera de rango"
|
||||
}
|
||||
|
|
|
@ -178,6 +178,8 @@
|
|||
"page.keyboard_shortcuts.go_to_feed": "Siirry syötteeseen",
|
||||
"page.keyboard_shortcuts.go_to_previous_page": "Siirry edelliselle sivulle",
|
||||
"page.keyboard_shortcuts.go_to_next_page": "Siirry seuraavalle sivulle",
|
||||
"page.keyboard_shortcuts.go_to_bottom_item": "Siirry alimpaan kohtaan",
|
||||
"page.keyboard_shortcuts.go_to_top_item": "Siirry alkuun",
|
||||
"page.keyboard_shortcuts.open_item": "Avaa valittu kohde",
|
||||
"page.keyboard_shortcuts.open_original": "Avaa alkuperäinen linkki",
|
||||
"page.keyboard_shortcuts.open_original_same_window": "Avaa alkuperäinen linkki nykyisessä välilehdessä",
|
||||
|
@ -505,7 +507,7 @@
|
|||
"error.http_body_read": "Unable to read the HTTP body: %v.",
|
||||
"error.http_empty_response_body": "The HTTP response body is empty.",
|
||||
"error.http_empty_response": "The HTTP response is empty. Perhaps, this website is using a bot protection mechanism?",
|
||||
"error.tls_error": "TLS error: %v. You could disable TLS verification in the feed settings if you would like.",
|
||||
"error.tls_error": "TLS error: %q. You could disable TLS verification in the feed settings if you would like.",
|
||||
"error.network_operation": "Miniflux is not able to reach this website due to a network error: %v.",
|
||||
"error.network_timeout": "This website is too slow and the request timed out: %v",
|
||||
"error.http_client_error": "HTTP client error: %v.",
|
||||
|
@ -524,5 +526,7 @@
|
|||
"error.unable_to_parse_feed": "Unable to parse this feed: %v.",
|
||||
"error.feed_not_found": "This feed does not exist or does not belong to this user.",
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v."
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v.",
|
||||
"form.prefs.label.media_playback_rate": "Äänen/videon toistonopeus",
|
||||
"error.settings_media_playback_rate_range": "Toistonopeus on alueen ulkopuolella"
|
||||
}
|
||||
|
|
|
@ -178,6 +178,8 @@
|
|||
"page.keyboard_shortcuts.go_to_feed": "Voir abonnement",
|
||||
"page.keyboard_shortcuts.go_to_previous_page": "Page précédente",
|
||||
"page.keyboard_shortcuts.go_to_next_page": "Page suivante",
|
||||
"page.keyboard_shortcuts.go_to_bottom_item": "Aller à l'élément du bas",
|
||||
"page.keyboard_shortcuts.go_to_top_item": "Aller à l'élément supérieur",
|
||||
"page.keyboard_shortcuts.open_item": "Ouvrir élément sélectionné",
|
||||
"page.keyboard_shortcuts.open_original": "Ouvrir le lien original",
|
||||
"page.keyboard_shortcuts.open_original_same_window": "Ouvrir le lien original dans l'onglet en cours",
|
||||
|
@ -215,7 +217,7 @@
|
|||
"page.settings.webauthn.last_seen_on": "Dernière utilisation",
|
||||
"page.settings.webauthn.register": "Enregister une nouvelle clé d’accès",
|
||||
"page.settings.webauthn.register.error": "Impossible d'enregistrer la clé d’accès",
|
||||
"page.settings.webauthn.delete" : [
|
||||
"page.settings.webauthn.delete": [
|
||||
"Supprimer %d clé d’accès",
|
||||
"Supprimer %d clés d’accès"
|
||||
],
|
||||
|
@ -505,7 +507,7 @@
|
|||
"error.http_body_read": "Impossible de lire le corps de la réponse HTTP : %v.",
|
||||
"error.http_empty_response_body": "Le corps de la réponse HTTP est vide.",
|
||||
"error.http_empty_response": "La réponse HTTP est vide. Peut-être que ce site web bloque Miniflux avec une protection anti-bot ?",
|
||||
"error.tls_error": "Erreur TLS : %v. Vous pouvez désactiver la vérification TLS dans les paramètres de l'abonnement.",
|
||||
"error.tls_error": "Erreur TLS : %q. Vous pouvez désactiver la vérification TLS dans les paramètres de l'abonnement.",
|
||||
"error.network_operation": "Miniflux n'est pas en mesure de se connecter à ce site web à cause d'un problème réseau : %v.",
|
||||
"error.network_timeout": "Ce site web est trop lent à répondre : %v.",
|
||||
"error.http_client_error": "Erreur du client HTTP : %v.",
|
||||
|
@ -524,5 +526,7 @@
|
|||
"error.unable_to_parse_feed": "Impossible d'analyser ce flux : %v.",
|
||||
"error.feed_not_found": "Impossible de trouver ce flux.",
|
||||
"error.unable_to_detect_rssbridge": "Impossible de détecter un flux RSS en utilisant RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Impossible de détecter le format du flux : %v."
|
||||
"error.feed_format_not_detected": "Impossible de détecter le format du flux : %v.",
|
||||
"form.prefs.label.media_playback_rate": "Vitesse de lecture de l'audio/vidéo",
|
||||
"error.settings_media_playback_rate_range": "La vitesse de lecture est hors limites"
|
||||
}
|
||||
|
|
|
@ -178,6 +178,8 @@
|
|||
"page.keyboard_shortcuts.go_to_feed": "फ़ीड पर जाएं",
|
||||
"page.keyboard_shortcuts.go_to_previous_page": "पिछले पृष्ठ पर जाएं",
|
||||
"page.keyboard_shortcuts.go_to_next_page": "अगले पेज पर जाएं",
|
||||
"page.keyboard_shortcuts.go_to_bottom_item": "निचले आइटम पर जाएँ",
|
||||
"page.keyboard_shortcuts.go_to_top_item": "शीर्ष आइटम पर जाएँ",
|
||||
"page.keyboard_shortcuts.open_item": "चयनित आइटम खोलें",
|
||||
"page.keyboard_shortcuts.open_original": "मूल लिंक खोलें",
|
||||
"page.keyboard_shortcuts.open_original_same_window": "वर्तमान टैब में मूल लिंक खोलें",
|
||||
|
@ -505,7 +507,7 @@
|
|||
"error.http_body_read": "Unable to read the HTTP body: %v.",
|
||||
"error.http_empty_response_body": "The HTTP response body is empty.",
|
||||
"error.http_empty_response": "The HTTP response is empty. Perhaps, this website is using a bot protection mechanism?",
|
||||
"error.tls_error": "TLS error: %v. You could disable TLS verification in the feed settings if you would like.",
|
||||
"error.tls_error": "TLS error: %q. You could disable TLS verification in the feed settings if you would like.",
|
||||
"error.network_operation": "Miniflux is not able to reach this website due to a network error: %v.",
|
||||
"error.network_timeout": "This website is too slow and the request timed out: %v",
|
||||
"error.http_client_error": "HTTP client error: %v.",
|
||||
|
@ -524,5 +526,7 @@
|
|||
"error.unable_to_parse_feed": "Unable to parse this feed: %v.",
|
||||
"error.feed_not_found": "This feed does not exist or does not belong to this user.",
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v."
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v.",
|
||||
"form.prefs.label.media_playback_rate": "ऑडियो/वीडियो की प्लेबैक गति",
|
||||
"error.settings_media_playback_rate_range": "प्लेबैक गति सीमा से बाहर है"
|
||||
}
|
||||
|
|
|
@ -169,6 +169,8 @@
|
|||
"page.keyboard_shortcuts.go_to_feed": "Ke umpan",
|
||||
"page.keyboard_shortcuts.go_to_previous_page": "Ke halaman sebelumnya",
|
||||
"page.keyboard_shortcuts.go_to_next_page": "Ke halaman berikutnya",
|
||||
"page.keyboard_shortcuts.go_to_bottom_item": "Pergi ke item paling bawah",
|
||||
"page.keyboard_shortcuts.go_to_top_item": "Pergi ke item teratas",
|
||||
"page.keyboard_shortcuts.open_item": "Buka entri yang dipilih",
|
||||
"page.keyboard_shortcuts.open_original": "Buka tautan asli",
|
||||
"page.keyboard_shortcuts.open_original_same_window": "Buka tautan asli di bilah saat ini",
|
||||
|
@ -488,7 +490,7 @@
|
|||
"error.http_body_read": "Unable to read the HTTP body: %v.",
|
||||
"error.http_empty_response_body": "The HTTP response body is empty.",
|
||||
"error.http_empty_response": "The HTTP response is empty. Perhaps, this website is using a bot protection mechanism?",
|
||||
"error.tls_error": "TLS error: %v. You could disable TLS verification in the feed settings if you would like.",
|
||||
"error.tls_error": "TLS error: %q. You could disable TLS verification in the feed settings if you would like.",
|
||||
"error.network_operation": "Miniflux is not able to reach this website due to a network error: %v.",
|
||||
"error.network_timeout": "This website is too slow and the request timed out: %v",
|
||||
"error.http_client_error": "HTTP client error: %v.",
|
||||
|
@ -507,5 +509,7 @@
|
|||
"error.unable_to_parse_feed": "Unable to parse this feed: %v.",
|
||||
"error.feed_not_found": "This feed does not exist or does not belong to this user.",
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v."
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v.",
|
||||
"form.prefs.label.media_playback_rate": "Kecepatan pemutaran audio/video",
|
||||
"error.settings_media_playback_rate_range": "Kecepatan pemutaran di luar jangkauan"
|
||||
}
|
||||
|
|
|
@ -178,6 +178,8 @@
|
|||
"page.keyboard_shortcuts.go_to_feed": "Mostra il feed",
|
||||
"page.keyboard_shortcuts.go_to_previous_page": "Mostra la pagina precedente",
|
||||
"page.keyboard_shortcuts.go_to_next_page": "Mostra la pagina successiva",
|
||||
"page.keyboard_shortcuts.go_to_bottom_item": "Vai all'elemento in fondo",
|
||||
"page.keyboard_shortcuts.go_to_top_item": "Vai all'elemento principale",
|
||||
"page.keyboard_shortcuts.open_item": "Apri l'articolo selezionato",
|
||||
"page.keyboard_shortcuts.open_original": "Apri la pagina web originale",
|
||||
"page.keyboard_shortcuts.open_original_same_window": "Apri il link originale nella scheda corrente",
|
||||
|
@ -505,7 +507,7 @@
|
|||
"error.http_body_read": "Unable to read the HTTP body: %v.",
|
||||
"error.http_empty_response_body": "The HTTP response body is empty.",
|
||||
"error.http_empty_response": "The HTTP response is empty. Perhaps, this website is using a bot protection mechanism?",
|
||||
"error.tls_error": "TLS error: %v. You could disable TLS verification in the feed settings if you would like.",
|
||||
"error.tls_error": "TLS error: %q. You could disable TLS verification in the feed settings if you would like.",
|
||||
"error.network_operation": "Miniflux is not able to reach this website due to a network error: %v.",
|
||||
"error.network_timeout": "This website is too slow and the request timed out: %v",
|
||||
"error.http_client_error": "HTTP client error: %v.",
|
||||
|
@ -524,5 +526,7 @@
|
|||
"error.unable_to_parse_feed": "Unable to parse this feed: %v.",
|
||||
"error.feed_not_found": "This feed does not exist or does not belong to this user.",
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v."
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v.",
|
||||
"form.prefs.label.media_playback_rate": "Velocità di riproduzione dell'audio/video",
|
||||
"error.settings_media_playback_rate_range": "La velocità di riproduzione non rientra nell'intervallo"
|
||||
}
|
||||
|
|
|
@ -169,6 +169,8 @@
|
|||
"page.keyboard_shortcuts.go_to_feed": "フィード",
|
||||
"page.keyboard_shortcuts.go_to_previous_page": "前のページ",
|
||||
"page.keyboard_shortcuts.go_to_next_page": "次のページ",
|
||||
"page.keyboard_shortcuts.go_to_bottom_item": "一番下の項目に移動",
|
||||
"page.keyboard_shortcuts.go_to_top_item": "先頭の項目に移動",
|
||||
"page.keyboard_shortcuts.open_item": "選択されたアイテムを開く",
|
||||
"page.keyboard_shortcuts.open_original": "オリジナルのリンクを開く",
|
||||
"page.keyboard_shortcuts.open_original_same_window": "現在のタブでオリジナルのリンクを開く",
|
||||
|
@ -488,7 +490,7 @@
|
|||
"error.http_body_read": "Unable to read the HTTP body: %v.",
|
||||
"error.http_empty_response_body": "The HTTP response body is empty.",
|
||||
"error.http_empty_response": "The HTTP response is empty. Perhaps, this website is using a bot protection mechanism?",
|
||||
"error.tls_error": "TLS error: %v. You could disable TLS verification in the feed settings if you would like.",
|
||||
"error.tls_error": "TLS error: %q. You could disable TLS verification in the feed settings if you would like.",
|
||||
"error.network_operation": "Miniflux is not able to reach this website due to a network error: %v.",
|
||||
"error.network_timeout": "This website is too slow and the request timed out: %v",
|
||||
"error.http_client_error": "HTTP client error: %v.",
|
||||
|
@ -507,5 +509,7 @@
|
|||
"error.unable_to_parse_feed": "Unable to parse this feed: %v.",
|
||||
"error.feed_not_found": "This feed does not exist or does not belong to this user.",
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v."
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v.",
|
||||
"form.prefs.label.media_playback_rate": "オーディオ/ビデオの再生速度",
|
||||
"error.settings_media_playback_rate_range": "再生速度が範囲外"
|
||||
}
|
||||
|
|
|
@ -179,6 +179,8 @@
|
|||
"page.keyboard_shortcuts.go_to_feed": "Ga naar feed",
|
||||
"page.keyboard_shortcuts.go_to_previous_page": "Vorige pagina",
|
||||
"page.keyboard_shortcuts.go_to_next_page": "Volgende pagina",
|
||||
"page.keyboard_shortcuts.go_to_bottom_item": "Ga naar het onderste item",
|
||||
"page.keyboard_shortcuts.go_to_top_item": "Ga naar het bovenste item",
|
||||
"page.keyboard_shortcuts.open_item": "Open geselecteerde link",
|
||||
"page.keyboard_shortcuts.open_original": "Open originele link",
|
||||
"page.keyboard_shortcuts.open_original_same_window": "Oorspronkelijke koppeling op huidig tabblad openen",
|
||||
|
@ -505,7 +507,7 @@
|
|||
"error.http_body_read": "Unable to read the HTTP body: %v.",
|
||||
"error.http_empty_response_body": "The HTTP response body is empty.",
|
||||
"error.http_empty_response": "The HTTP response is empty. Perhaps, this website is using a bot protection mechanism?",
|
||||
"error.tls_error": "TLS error: %v. You could disable TLS verification in the feed settings if you would like.",
|
||||
"error.tls_error": "TLS error: %q. You could disable TLS verification in the feed settings if you would like.",
|
||||
"error.network_operation": "Miniflux is not able to reach this website due to a network error: %v.",
|
||||
"error.network_timeout": "This website is too slow and the request timed out: %v",
|
||||
"error.http_client_error": "HTTP client error: %v.",
|
||||
|
@ -524,5 +526,7 @@
|
|||
"error.unable_to_parse_feed": "Unable to parse this feed: %v.",
|
||||
"error.feed_not_found": "This feed does not exist or does not belong to this user.",
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v."
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v.",
|
||||
"form.prefs.label.media_playback_rate": "Afspeelsnelheid van de audio/video",
|
||||
"error.settings_media_playback_rate_range": "Afspeelsnelheid is buiten bereik"
|
||||
}
|
||||
|
|
|
@ -187,6 +187,8 @@
|
|||
"page.keyboard_shortcuts.go_to_feed": "Przejdź do subskrypcji",
|
||||
"page.keyboard_shortcuts.go_to_previous_page": "Przejdź do poprzedniej strony",
|
||||
"page.keyboard_shortcuts.go_to_next_page": "Przejdź do następnej strony",
|
||||
"page.keyboard_shortcuts.go_to_bottom_item": "Przejdź do dolnego elementu",
|
||||
"page.keyboard_shortcuts.go_to_top_item": "Przejdź do najwyższego elementu",
|
||||
"page.keyboard_shortcuts.open_item": "Otwórz zaznaczony artykuł",
|
||||
"page.keyboard_shortcuts.open_original": "Otwórz oryginalny artykuł",
|
||||
"page.keyboard_shortcuts.open_original_same_window": "Otwórz oryginalny link w bieżącej karcie",
|
||||
|
@ -522,7 +524,7 @@
|
|||
"error.http_body_read": "Unable to read the HTTP body: %v.",
|
||||
"error.http_empty_response_body": "The HTTP response body is empty.",
|
||||
"error.http_empty_response": "The HTTP response is empty. Perhaps, this website is using a bot protection mechanism?",
|
||||
"error.tls_error": "TLS error: %v. You could disable TLS verification in the feed settings if you would like.",
|
||||
"error.tls_error": "TLS error: %q. You could disable TLS verification in the feed settings if you would like.",
|
||||
"error.network_operation": "Miniflux is not able to reach this website due to a network error: %v.",
|
||||
"error.network_timeout": "This website is too slow and the request timed out: %v",
|
||||
"error.http_client_error": "HTTP client error: %v.",
|
||||
|
@ -541,5 +543,7 @@
|
|||
"error.unable_to_parse_feed": "Unable to parse this feed: %v.",
|
||||
"error.feed_not_found": "This feed does not exist or does not belong to this user.",
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v."
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v.",
|
||||
"form.prefs.label.media_playback_rate": "Prędkość odtwarzania audio/wideo",
|
||||
"error.settings_media_playback_rate_range": "Prędkość odtwarzania jest poza zakresem"
|
||||
}
|
||||
|
|
|
@ -178,6 +178,8 @@
|
|||
"page.keyboard_shortcuts.go_to_feed": "Ir a fonte",
|
||||
"page.keyboard_shortcuts.go_to_previous_page": "Ir a página anterior",
|
||||
"page.keyboard_shortcuts.go_to_next_page": "Ir a página seguinte",
|
||||
"page.keyboard_shortcuts.go_to_bottom_item": "Ir para o item inferior",
|
||||
"page.keyboard_shortcuts.go_to_top_item": "Ir para o item superior",
|
||||
"page.keyboard_shortcuts.open_item": "Abrir o item selecionado",
|
||||
"page.keyboard_shortcuts.open_original": "Abrir o conteúdo original",
|
||||
"page.keyboard_shortcuts.open_original_same_window": "Abrir o conteúdo original na janela atual",
|
||||
|
@ -505,7 +507,7 @@
|
|||
"error.http_body_read": "Unable to read the HTTP body: %v.",
|
||||
"error.http_empty_response_body": "The HTTP response body is empty.",
|
||||
"error.http_empty_response": "The HTTP response is empty. Perhaps, this website is using a bot protection mechanism?",
|
||||
"error.tls_error": "TLS error: %v. You could disable TLS verification in the feed settings if you would like.",
|
||||
"error.tls_error": "TLS error: %q. You could disable TLS verification in the feed settings if you would like.",
|
||||
"error.network_operation": "Miniflux is not able to reach this website due to a network error: %v.",
|
||||
"error.network_timeout": "This website is too slow and the request timed out: %v",
|
||||
"error.http_client_error": "HTTP client error: %v.",
|
||||
|
@ -524,5 +526,7 @@
|
|||
"error.unable_to_parse_feed": "Unable to parse this feed: %v.",
|
||||
"error.feed_not_found": "This feed does not exist or does not belong to this user.",
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v."
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v.",
|
||||
"form.prefs.label.media_playback_rate": "Velocidade de reprodução do áudio/vídeo",
|
||||
"error.settings_media_playback_rate_range": "A velocidade de reprodução está fora do intervalo"
|
||||
}
|
||||
|
|
|
@ -187,6 +187,8 @@
|
|||
"page.keyboard_shortcuts.go_to_feed": "Перейти к подписке",
|
||||
"page.keyboard_shortcuts.go_to_previous_page": "Перейти к предыдущей странице",
|
||||
"page.keyboard_shortcuts.go_to_next_page": "Перейти к следующей странице",
|
||||
"page.keyboard_shortcuts.go_to_bottom_item": "Перейти к нижнему элементу",
|
||||
"page.keyboard_shortcuts.go_to_top_item": "Перейти к верхнему элементу",
|
||||
"page.keyboard_shortcuts.open_item": "Открыть выбранный элемент",
|
||||
"page.keyboard_shortcuts.open_original_same_window": "Открыть оригинальную ссылку в текущей вкладке",
|
||||
"page.keyboard_shortcuts.open_original": "Открыть оригинальную ссылку",
|
||||
|
@ -522,7 +524,7 @@
|
|||
"error.http_body_read": "Unable to read the HTTP body: %v.",
|
||||
"error.http_empty_response_body": "The HTTP response body is empty.",
|
||||
"error.http_empty_response": "The HTTP response is empty. Perhaps, this website is using a bot protection mechanism?",
|
||||
"error.tls_error": "TLS error: %v. You could disable TLS verification in the feed settings if you would like.",
|
||||
"error.tls_error": "TLS error: %q. You could disable TLS verification in the feed settings if you would like.",
|
||||
"error.network_operation": "Miniflux is not able to reach this website due to a network error: %v.",
|
||||
"error.network_timeout": "This website is too slow and the request timed out: %v",
|
||||
"error.http_client_error": "HTTP client error: %v.",
|
||||
|
@ -541,5 +543,7 @@
|
|||
"error.unable_to_parse_feed": "Unable to parse this feed: %v.",
|
||||
"error.feed_not_found": "This feed does not exist or does not belong to this user.",
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v."
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v.",
|
||||
"form.prefs.label.media_playback_rate": "Скорость воспроизведения аудио/видео",
|
||||
"error.settings_media_playback_rate_range": "Скорость воспроизведения выходит за пределы диапазона"
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -187,6 +187,8 @@
|
|||
"page.keyboard_shortcuts.go_to_feed": "Перейти до стрічки",
|
||||
"page.keyboard_shortcuts.go_to_previous_page": "Перейти до попередньої сторінки",
|
||||
"page.keyboard_shortcuts.go_to_next_page": "Перейти до наступної сторінки",
|
||||
"page.keyboard_shortcuts.go_to_bottom_item": "Перейти до нижнього пункту",
|
||||
"page.keyboard_shortcuts.go_to_top_item": "Перейти до верхнього пункту",
|
||||
"page.keyboard_shortcuts.open_item": "Відкрити виділений запис",
|
||||
"page.keyboard_shortcuts.open_original": "Відкрити оригінальне посилання",
|
||||
"page.keyboard_shortcuts.open_original_same_window": "Відкрити оригінальне посилання в поточній вкладці",
|
||||
|
@ -522,7 +524,7 @@
|
|||
"error.http_body_read": "Unable to read the HTTP body: %v.",
|
||||
"error.http_empty_response_body": "The HTTP response body is empty.",
|
||||
"error.http_empty_response": "The HTTP response is empty. Perhaps, this website is using a bot protection mechanism?",
|
||||
"error.tls_error": "TLS error: %v. You could disable TLS verification in the feed settings if you would like.",
|
||||
"error.tls_error": "TLS error: %q. You could disable TLS verification in the feed settings if you would like.",
|
||||
"error.network_operation": "Miniflux is not able to reach this website due to a network error: %v.",
|
||||
"error.network_timeout": "This website is too slow and the request timed out: %v",
|
||||
"error.http_client_error": "HTTP client error: %v.",
|
||||
|
@ -541,5 +543,7 @@
|
|||
"error.unable_to_parse_feed": "Unable to parse this feed: %v.",
|
||||
"error.feed_not_found": "This feed does not exist or does not belong to this user.",
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v."
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v.",
|
||||
"form.prefs.label.media_playback_rate": "Швидкість відтворення аудіо/відео",
|
||||
"error.settings_media_playback_rate_range": "Швидкість відтворення виходить за межі діапазону"
|
||||
}
|
||||
|
|
|
@ -169,6 +169,8 @@
|
|||
"page.keyboard_shortcuts.go_to_feed": "转到源页面",
|
||||
"page.keyboard_shortcuts.go_to_previous_page": "上一页",
|
||||
"page.keyboard_shortcuts.go_to_next_page": "下一页",
|
||||
"page.keyboard_shortcuts.go_to_bottom_item": "转到底部项目",
|
||||
"page.keyboard_shortcuts.go_to_top_item": "转到顶部项目",
|
||||
"page.keyboard_shortcuts.open_item": "打开选定的文章",
|
||||
"page.keyboard_shortcuts.open_original": "打开原始链接",
|
||||
"page.keyboard_shortcuts.open_original_same_window": "在当前标签页中打开原始链接",
|
||||
|
@ -406,9 +408,9 @@
|
|||
"form.integration.omnivore_activate": "保存文章到 Omnivore",
|
||||
"form.integration.omnivore_url": "Omnivore API 端点",
|
||||
"form.integration.omnivore_api_key": "Omnivore API 密钥",
|
||||
"form.integration.espial_activate": "保存文章到 Espial",
|
||||
"form.integration.espial_endpoint": "Espial API 端点",
|
||||
"form.integration.espial_api_key": "Espial API 密钥",
|
||||
"form.integration.espial_activate": "保存文章到 Espial",
|
||||
"form.integration.espial_endpoint": "Espial API 端点",
|
||||
"form.integration.espial_api_key": "Espial API 密钥",
|
||||
"form.integration.espial_tags": "Espial 标签",
|
||||
"form.integration.readwise_activate": "保存文章到 Readwise Reader",
|
||||
"form.integration.readwise_api_key": "Readwise Reader Access Token",
|
||||
|
@ -488,7 +490,7 @@
|
|||
"error.http_body_read": "Unable to read the HTTP body: %v.",
|
||||
"error.http_empty_response_body": "The HTTP response body is empty.",
|
||||
"error.http_empty_response": "The HTTP response is empty. Perhaps, this website is using a bot protection mechanism?",
|
||||
"error.tls_error": "TLS error: %v. You could disable TLS verification in the feed settings if you would like.",
|
||||
"error.tls_error": "TLS error: %q. You could disable TLS verification in the feed settings if you would like.",
|
||||
"error.network_operation": "Miniflux is not able to reach this website due to a network error: %v.",
|
||||
"error.network_timeout": "This website is too slow and the request timed out: %v",
|
||||
"error.http_client_error": "HTTP client error: %v.",
|
||||
|
@ -507,5 +509,7 @@
|
|||
"error.unable_to_parse_feed": "Unable to parse this feed: %v.",
|
||||
"error.feed_not_found": "This feed does not exist or does not belong to this user.",
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v."
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v.",
|
||||
"form.prefs.label.media_playback_rate": "音频/视频的播放速度",
|
||||
"error.settings_media_playback_rate_range": "播放速度超出范围"
|
||||
}
|
||||
|
|
|
@ -169,6 +169,8 @@
|
|||
"page.keyboard_shortcuts.go_to_feed": "轉到Feed頁面",
|
||||
"page.keyboard_shortcuts.go_to_previous_page": "上一頁",
|
||||
"page.keyboard_shortcuts.go_to_next_page": "下一頁",
|
||||
"page.keyboard_shortcuts.go_to_bottom_item": "转到底部项目",
|
||||
"page.keyboard_shortcuts.go_to_top_item": "转到顶部项目",
|
||||
"page.keyboard_shortcuts.open_item": "開啟選定的文章",
|
||||
"page.keyboard_shortcuts.open_original": "開啟原始連結",
|
||||
"page.keyboard_shortcuts.open_original_same_window": "在當前標籤頁中開啟原始連結",
|
||||
|
@ -488,7 +490,7 @@
|
|||
"error.http_body_read": "Unable to read the HTTP body: %v.",
|
||||
"error.http_empty_response_body": "The HTTP response body is empty.",
|
||||
"error.http_empty_response": "The HTTP response is empty. Perhaps, this website is using a bot protection mechanism?",
|
||||
"error.tls_error": "TLS error: %v. You could disable TLS verification in the feed settings if you would like.",
|
||||
"error.tls_error": "TLS error: %q. You could disable TLS verification in the feed settings if you would like.",
|
||||
"error.network_operation": "Miniflux is not able to reach this website due to a network error: %v.",
|
||||
"error.network_timeout": "This website is too slow and the request timed out: %v",
|
||||
"error.http_client_error": "HTTP client error: %v.",
|
||||
|
@ -507,5 +509,7 @@
|
|||
"error.unable_to_parse_feed": "Unable to parse this feed: %v.",
|
||||
"error.feed_not_found": "This feed does not exist or does not belong to this user.",
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v."
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v.",
|
||||
"form.prefs.label.media_playback_rate": "音訊/視訊的播放速度",
|
||||
"error.settings_media_playback_rate_range": "播放速度超出範圍"
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
// SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
package proxy // import "miniflux.app/v2/internal/proxy"
|
||||
package mediaproxy // import "miniflux.app/v2/internal/mediaproxy"
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
@ -29,11 +29,11 @@ func TestProxyFilterWithHttpDefault(t *testing.T) {
|
|||
r.HandleFunc("/proxy/{encodedDigest}/{encodedURL}", func(w http.ResponseWriter, r *http.Request) {}).Name("proxy")
|
||||
|
||||
input := `<p><img src="http://website/folder/image.png" alt="Test"/></p>`
|
||||
output := ProxyRewriter(r, input)
|
||||
output := RewriteDocumentWithRelativeProxyURL(r, input)
|
||||
expected := `<p><img src="/proxy/okK5PsdNY8F082UMQEAbLPeUFfbe2WnNfInNmR9T4WA=/aHR0cDovL3dlYnNpdGUvZm9sZGVyL2ltYWdlLnBuZw==" alt="Test"/></p>`
|
||||
|
||||
if expected != output {
|
||||
t.Errorf(`Not expected output: got "%s" instead of "%s"`, output, expected)
|
||||
t.Errorf(`Not expected output: got %q instead of %q`, output, expected)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -53,11 +53,11 @@ func TestProxyFilterWithHttpsDefault(t *testing.T) {
|
|||
r.HandleFunc("/proxy/{encodedDigest}/{encodedURL}", func(w http.ResponseWriter, r *http.Request) {}).Name("proxy")
|
||||
|
||||
input := `<p><img src="https://website/folder/image.png" alt="Test"/></p>`
|
||||
output := ProxyRewriter(r, input)
|
||||
output := RewriteDocumentWithRelativeProxyURL(r, input)
|
||||
expected := `<p><img src="https://website/folder/image.png" alt="Test"/></p>`
|
||||
|
||||
if expected != output {
|
||||
t.Errorf(`Not expected output: got "%s" instead of "%s"`, output, expected)
|
||||
t.Errorf(`Not expected output: got %q instead of %q`, output, expected)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -76,11 +76,11 @@ func TestProxyFilterWithHttpNever(t *testing.T) {
|
|||
r.HandleFunc("/proxy/{encodedDigest}/{encodedURL}", func(w http.ResponseWriter, r *http.Request) {}).Name("proxy")
|
||||
|
||||
input := `<p><img src="http://website/folder/image.png" alt="Test"/></p>`
|
||||
output := ProxyRewriter(r, input)
|
||||
output := RewriteDocumentWithRelativeProxyURL(r, input)
|
||||
expected := input
|
||||
|
||||
if expected != output {
|
||||
t.Errorf(`Not expected output: got "%s" instead of "%s"`, output, expected)
|
||||
t.Errorf(`Not expected output: got %q instead of %q`, output, expected)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -99,11 +99,11 @@ func TestProxyFilterWithHttpsNever(t *testing.T) {
|
|||
r.HandleFunc("/proxy/{encodedDigest}/{encodedURL}", func(w http.ResponseWriter, r *http.Request) {}).Name("proxy")
|
||||
|
||||
input := `<p><img src="https://website/folder/image.png" alt="Test"/></p>`
|
||||
output := ProxyRewriter(r, input)
|
||||
output := RewriteDocumentWithRelativeProxyURL(r, input)
|
||||
expected := input
|
||||
|
||||
if expected != output {
|
||||
t.Errorf(`Not expected output: got "%s" instead of "%s"`, output, expected)
|
||||
t.Errorf(`Not expected output: got %q instead of %q`, output, expected)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -124,11 +124,11 @@ func TestProxyFilterWithHttpAlways(t *testing.T) {
|
|||
r.HandleFunc("/proxy/{encodedDigest}/{encodedURL}", func(w http.ResponseWriter, r *http.Request) {}).Name("proxy")
|
||||
|
||||
input := `<p><img src="http://website/folder/image.png" alt="Test"/></p>`
|
||||
output := ProxyRewriter(r, input)
|
||||
output := RewriteDocumentWithRelativeProxyURL(r, input)
|
||||
expected := `<p><img src="/proxy/okK5PsdNY8F082UMQEAbLPeUFfbe2WnNfInNmR9T4WA=/aHR0cDovL3dlYnNpdGUvZm9sZGVyL2ltYWdlLnBuZw==" alt="Test"/></p>`
|
||||
|
||||
if expected != output {
|
||||
t.Errorf(`Not expected output: got "%s" instead of "%s"`, output, expected)
|
||||
t.Errorf(`Not expected output: got %q instead of %q`, output, expected)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -149,11 +149,87 @@ func TestProxyFilterWithHttpsAlways(t *testing.T) {
|
|||
r.HandleFunc("/proxy/{encodedDigest}/{encodedURL}", func(w http.ResponseWriter, r *http.Request) {}).Name("proxy")
|
||||
|
||||
input := `<p><img src="https://website/folder/image.png" alt="Test"/></p>`
|
||||
output := ProxyRewriter(r, input)
|
||||
output := RewriteDocumentWithRelativeProxyURL(r, input)
|
||||
expected := `<p><img src="/proxy/LdPNR1GBDigeeNp2ArUQRyZsVqT_PWLfHGjYFrrWWIY=/aHR0cHM6Ly93ZWJzaXRlL2ZvbGRlci9pbWFnZS5wbmc=" alt="Test"/></p>`
|
||||
|
||||
if expected != output {
|
||||
t.Errorf(`Not expected output: got "%s" instead of "%s"`, output, expected)
|
||||
t.Errorf(`Not expected output: got %q instead of %q`, output, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAbsoluteProxyFilterWithHttpsAlways(t *testing.T) {
|
||||
os.Clearenv()
|
||||
os.Setenv("PROXY_OPTION", "all")
|
||||
os.Setenv("PROXY_MEDIA_TYPES", "image")
|
||||
os.Setenv("PROXY_PRIVATE_KEY", "test")
|
||||
|
||||
var err error
|
||||
parser := config.NewParser()
|
||||
config.Opts, err = parser.ParseEnvironmentVariables()
|
||||
if err != nil {
|
||||
t.Fatalf(`Parsing failure: %v`, err)
|
||||
}
|
||||
|
||||
r := mux.NewRouter()
|
||||
r.HandleFunc("/proxy/{encodedDigest}/{encodedURL}", func(w http.ResponseWriter, r *http.Request) {}).Name("proxy")
|
||||
|
||||
input := `<p><img src="https://website/folder/image.png" alt="Test"/></p>`
|
||||
output := RewriteDocumentWithAbsoluteProxyURL(r, "localhost", input)
|
||||
expected := `<p><img src="http://localhost/proxy/LdPNR1GBDigeeNp2ArUQRyZsVqT_PWLfHGjYFrrWWIY=/aHR0cHM6Ly93ZWJzaXRlL2ZvbGRlci9pbWFnZS5wbmc=" alt="Test"/></p>`
|
||||
|
||||
if expected != output {
|
||||
t.Errorf(`Not expected output: got %q instead of %q`, output, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAbsoluteProxyFilterWithHttpsScheme(t *testing.T) {
|
||||
os.Clearenv()
|
||||
os.Setenv("PROXY_OPTION", "all")
|
||||
os.Setenv("PROXY_MEDIA_TYPES", "image")
|
||||
os.Setenv("PROXY_PRIVATE_KEY", "test")
|
||||
os.Setenv("HTTPS", "1")
|
||||
|
||||
var err error
|
||||
parser := config.NewParser()
|
||||
config.Opts, err = parser.ParseEnvironmentVariables()
|
||||
if err != nil {
|
||||
t.Fatalf(`Parsing failure: %v`, err)
|
||||
}
|
||||
|
||||
r := mux.NewRouter()
|
||||
r.HandleFunc("/proxy/{encodedDigest}/{encodedURL}", func(w http.ResponseWriter, r *http.Request) {}).Name("proxy")
|
||||
|
||||
input := `<p><img src="https://website/folder/image.png" alt="Test"/></p>`
|
||||
output := RewriteDocumentWithAbsoluteProxyURL(r, "localhost", input)
|
||||
expected := `<p><img src="https://localhost/proxy/LdPNR1GBDigeeNp2ArUQRyZsVqT_PWLfHGjYFrrWWIY=/aHR0cHM6Ly93ZWJzaXRlL2ZvbGRlci9pbWFnZS5wbmc=" alt="Test"/></p>`
|
||||
|
||||
if expected != output {
|
||||
t.Errorf(`Not expected output: got %q instead of %q`, output, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAbsoluteProxyFilterWithHttpsAlwaysAndAudioTag(t *testing.T) {
|
||||
os.Clearenv()
|
||||
os.Setenv("PROXY_OPTION", "all")
|
||||
os.Setenv("PROXY_MEDIA_TYPES", "audio")
|
||||
os.Setenv("PROXY_PRIVATE_KEY", "test")
|
||||
|
||||
var err error
|
||||
parser := config.NewParser()
|
||||
config.Opts, err = parser.ParseEnvironmentVariables()
|
||||
if err != nil {
|
||||
t.Fatalf(`Parsing failure: %v`, err)
|
||||
}
|
||||
|
||||
r := mux.NewRouter()
|
||||
r.HandleFunc("/proxy/{encodedDigest}/{encodedURL}", func(w http.ResponseWriter, r *http.Request) {}).Name("proxy")
|
||||
|
||||
input := `<audio src="https://website/folder/audio.mp3"></audio>`
|
||||
output := RewriteDocumentWithAbsoluteProxyURL(r, "localhost", input)
|
||||
expected := `<audio src="http://localhost/proxy/EmBTvmU5B17wGuONkeknkptYopW_Tl6Y6_W8oYbN_Xs=/aHR0cHM6Ly93ZWJzaXRlL2ZvbGRlci9hdWRpby5tcDM="></audio>`
|
||||
|
||||
if expected != output {
|
||||
t.Errorf(`Not expected output: got %q instead of %q`, output, expected)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -174,11 +250,61 @@ func TestProxyFilterWithHttpsAlwaysAndCustomProxyServer(t *testing.T) {
|
|||
r.HandleFunc("/proxy/{encodedDigest}/{encodedURL}", func(w http.ResponseWriter, r *http.Request) {}).Name("proxy")
|
||||
|
||||
input := `<p><img src="https://website/folder/image.png" alt="Test"/></p>`
|
||||
output := ProxyRewriter(r, input)
|
||||
output := RewriteDocumentWithRelativeProxyURL(r, input)
|
||||
expected := `<p><img src="https://proxy-example/proxy/aHR0cHM6Ly93ZWJzaXRlL2ZvbGRlci9pbWFnZS5wbmc=" alt="Test"/></p>`
|
||||
|
||||
if expected != output {
|
||||
t.Errorf(`Not expected output: got "%s" instead of "%s"`, output, expected)
|
||||
t.Errorf(`Not expected output: got %q instead of %q`, output, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestProxyFilterWithHttpsAlwaysAndIncorrectCustomProxyServer(t *testing.T) {
|
||||
os.Clearenv()
|
||||
os.Setenv("PROXY_OPTION", "all")
|
||||
os.Setenv("PROXY_MEDIA_TYPES", "image")
|
||||
os.Setenv("PROXY_URL", "http://:8080example.com")
|
||||
|
||||
var err error
|
||||
parser := config.NewParser()
|
||||
config.Opts, err = parser.ParseEnvironmentVariables()
|
||||
if err != nil {
|
||||
t.Fatalf(`Parsing failure: %v`, err)
|
||||
}
|
||||
|
||||
r := mux.NewRouter()
|
||||
r.HandleFunc("/proxy/{encodedDigest}/{encodedURL}", func(w http.ResponseWriter, r *http.Request) {}).Name("proxy")
|
||||
|
||||
input := `<p><img src="https://website/folder/image.png" alt="Test"/></p>`
|
||||
output := RewriteDocumentWithRelativeProxyURL(r, input)
|
||||
expected := `<p><img src="https://website/folder/image.png" alt="Test"/></p>`
|
||||
|
||||
if expected != output {
|
||||
t.Errorf(`Not expected output: got %q instead of %q`, output, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAbsoluteProxyFilterWithHttpsAlwaysAndCustomProxyServer(t *testing.T) {
|
||||
os.Clearenv()
|
||||
os.Setenv("MEDIA_PROXY_MODE", "all")
|
||||
os.Setenv("MEDIA_PROXY_RESOURCE_TYPES", "image")
|
||||
os.Setenv("MEDIA_PROXY_CUSTOM_URL", "https://proxy-example/proxy")
|
||||
|
||||
var err error
|
||||
parser := config.NewParser()
|
||||
config.Opts, err = parser.ParseEnvironmentVariables()
|
||||
if err != nil {
|
||||
t.Fatalf(`Parsing failure: %v`, err)
|
||||
}
|
||||
|
||||
r := mux.NewRouter()
|
||||
r.HandleFunc("/proxy/{encodedDigest}/{encodedURL}", func(w http.ResponseWriter, r *http.Request) {}).Name("proxy")
|
||||
|
||||
input := `<p><img src="https://website/folder/image.png" alt="Test"/></p>`
|
||||
output := RewriteDocumentWithAbsoluteProxyURL(r, "localhost", input)
|
||||
expected := `<p><img src="https://proxy-example/proxy/aHR0cHM6Ly93ZWJzaXRlL2ZvbGRlci9pbWFnZS5wbmc=" alt="Test"/></p>`
|
||||
|
||||
if expected != output {
|
||||
t.Errorf(`Not expected output: got %q instead of %q`, output, expected)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -198,11 +324,11 @@ func TestProxyFilterWithHttpInvalid(t *testing.T) {
|
|||
r.HandleFunc("/proxy/{encodedDigest}/{encodedURL}", func(w http.ResponseWriter, r *http.Request) {}).Name("proxy")
|
||||
|
||||
input := `<p><img src="http://website/folder/image.png" alt="Test"/></p>`
|
||||
output := ProxyRewriter(r, input)
|
||||
output := RewriteDocumentWithRelativeProxyURL(r, input)
|
||||
expected := `<p><img src="/proxy/okK5PsdNY8F082UMQEAbLPeUFfbe2WnNfInNmR9T4WA=/aHR0cDovL3dlYnNpdGUvZm9sZGVyL2ltYWdlLnBuZw==" alt="Test"/></p>`
|
||||
|
||||
if expected != output {
|
||||
t.Errorf(`Not expected output: got "%s" instead of "%s"`, output, expected)
|
||||
t.Errorf(`Not expected output: got %q instead of %q`, output, expected)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -222,11 +348,11 @@ func TestProxyFilterWithHttpsInvalid(t *testing.T) {
|
|||
r.HandleFunc("/proxy/{encodedDigest}/{encodedURL}", func(w http.ResponseWriter, r *http.Request) {}).Name("proxy")
|
||||
|
||||
input := `<p><img src="https://website/folder/image.png" alt="Test"/></p>`
|
||||
output := ProxyRewriter(r, input)
|
||||
output := RewriteDocumentWithRelativeProxyURL(r, input)
|
||||
expected := `<p><img src="https://website/folder/image.png" alt="Test"/></p>`
|
||||
|
||||
if expected != output {
|
||||
t.Errorf(`Not expected output: got "%s" instead of "%s"`, output, expected)
|
||||
t.Errorf(`Not expected output: got %q instead of %q`, output, expected)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -248,7 +374,7 @@ func TestProxyFilterWithSrcset(t *testing.T) {
|
|||
|
||||
input := `<p><img src="http://website/folder/image.png" srcset="http://website/folder/image2.png 656w, http://website/folder/image3.png 360w" alt="test"></p>`
|
||||
expected := `<p><img src="/proxy/okK5PsdNY8F082UMQEAbLPeUFfbe2WnNfInNmR9T4WA=/aHR0cDovL3dlYnNpdGUvZm9sZGVyL2ltYWdlLnBuZw==" srcset="/proxy/aY5Hb4urDnUCly2vTJ7ExQeeaVS-52O7kjUr2v9VrAs=/aHR0cDovL3dlYnNpdGUvZm9sZGVyL2ltYWdlMi5wbmc= 656w, /proxy/QgAmrJWiAud_nNAsz3F8OTxaIofwAiO36EDzH_YfMzo=/aHR0cDovL3dlYnNpdGUvZm9sZGVyL2ltYWdlMy5wbmc= 360w" alt="test"/></p>`
|
||||
output := ProxyRewriter(r, input)
|
||||
output := RewriteDocumentWithRelativeProxyURL(r, input)
|
||||
|
||||
if expected != output {
|
||||
t.Errorf(`Not expected output: got %s`, output)
|
||||
|
@ -273,7 +399,7 @@ func TestProxyFilterWithEmptySrcset(t *testing.T) {
|
|||
|
||||
input := `<p><img src="http://website/folder/image.png" srcset="" alt="test"></p>`
|
||||
expected := `<p><img src="/proxy/okK5PsdNY8F082UMQEAbLPeUFfbe2WnNfInNmR9T4WA=/aHR0cDovL3dlYnNpdGUvZm9sZGVyL2ltYWdlLnBuZw==" srcset="" alt="test"/></p>`
|
||||
output := ProxyRewriter(r, input)
|
||||
output := RewriteDocumentWithRelativeProxyURL(r, input)
|
||||
|
||||
if expected != output {
|
||||
t.Errorf(`Not expected output: got %s`, output)
|
||||
|
@ -298,7 +424,7 @@ func TestProxyFilterWithPictureSource(t *testing.T) {
|
|||
|
||||
input := `<picture><source srcset="http://website/folder/image2.png 656w, http://website/folder/image3.png 360w, https://website/some,image.png 2x"></picture>`
|
||||
expected := `<picture><source srcset="/proxy/aY5Hb4urDnUCly2vTJ7ExQeeaVS-52O7kjUr2v9VrAs=/aHR0cDovL3dlYnNpdGUvZm9sZGVyL2ltYWdlMi5wbmc= 656w, /proxy/QgAmrJWiAud_nNAsz3F8OTxaIofwAiO36EDzH_YfMzo=/aHR0cDovL3dlYnNpdGUvZm9sZGVyL2ltYWdlMy5wbmc= 360w, /proxy/ZIw0hv8WhSTls5aSqhnFaCXlUrKIqTnBRaY0-NaLnds=/aHR0cHM6Ly93ZWJzaXRlL3NvbWUsaW1hZ2UucG5n 2x"/></picture>`
|
||||
output := ProxyRewriter(r, input)
|
||||
output := RewriteDocumentWithRelativeProxyURL(r, input)
|
||||
|
||||
if expected != output {
|
||||
t.Errorf(`Not expected output: got %s`, output)
|
||||
|
@ -323,7 +449,7 @@ func TestProxyFilterOnlyNonHTTPWithPictureSource(t *testing.T) {
|
|||
|
||||
input := `<picture><source srcset="http://website/folder/image2.png 656w, https://website/some,image.png 2x"></picture>`
|
||||
expected := `<picture><source srcset="/proxy/aY5Hb4urDnUCly2vTJ7ExQeeaVS-52O7kjUr2v9VrAs=/aHR0cDovL3dlYnNpdGUvZm9sZGVyL2ltYWdlMi5wbmc= 656w, https://website/some,image.png 2x"/></picture>`
|
||||
output := ProxyRewriter(r, input)
|
||||
output := RewriteDocumentWithRelativeProxyURL(r, input)
|
||||
|
||||
if expected != output {
|
||||
t.Errorf(`Not expected output: got %s`, output)
|
||||
|
@ -347,7 +473,7 @@ func TestProxyWithImageDataURL(t *testing.T) {
|
|||
|
||||
input := `<img src="data:image/gif;base64,test">`
|
||||
expected := `<img src="data:image/gif;base64,test"/>`
|
||||
output := ProxyRewriter(r, input)
|
||||
output := RewriteDocumentWithRelativeProxyURL(r, input)
|
||||
|
||||
if expected != output {
|
||||
t.Errorf(`Not expected output: got %s`, output)
|
||||
|
@ -371,7 +497,7 @@ func TestProxyWithImageSourceDataURL(t *testing.T) {
|
|||
|
||||
input := `<picture><source srcset="data:image/gif;base64,test"/></picture>`
|
||||
expected := `<picture><source srcset="data:image/gif;base64,test"/></picture>`
|
||||
output := ProxyRewriter(r, input)
|
||||
output := RewriteDocumentWithRelativeProxyURL(r, input)
|
||||
|
||||
if expected != output {
|
||||
t.Errorf(`Not expected output: got %s`, output)
|
||||
|
@ -396,7 +522,7 @@ func TestProxyFilterWithVideo(t *testing.T) {
|
|||
|
||||
input := `<video poster="https://example.com/img.png" src="https://example.com/video.mp4"></video>`
|
||||
expected := `<video poster="/proxy/aDFfroYL57q5XsojIzATT6OYUCkuVSPXYJQAVrotnLw=/aHR0cHM6Ly9leGFtcGxlLmNvbS9pbWcucG5n" src="/proxy/0y3LR8zlx8S8qJkj1qWFOO6x3a-5yf2gLWjGIJV5yyc=/aHR0cHM6Ly9leGFtcGxlLmNvbS92aWRlby5tcDQ="></video>`
|
||||
output := ProxyRewriter(r, input)
|
||||
output := RewriteDocumentWithRelativeProxyURL(r, input)
|
||||
|
||||
if expected != output {
|
||||
t.Errorf(`Not expected output: got %s`, output)
|
||||
|
@ -421,7 +547,7 @@ func TestProxyFilterVideoPoster(t *testing.T) {
|
|||
|
||||
input := `<video poster="https://example.com/img.png" src="https://example.com/video.mp4"></video>`
|
||||
expected := `<video poster="/proxy/aDFfroYL57q5XsojIzATT6OYUCkuVSPXYJQAVrotnLw=/aHR0cHM6Ly9leGFtcGxlLmNvbS9pbWcucG5n" src="https://example.com/video.mp4"></video>`
|
||||
output := ProxyRewriter(r, input)
|
||||
output := RewriteDocumentWithRelativeProxyURL(r, input)
|
||||
|
||||
if expected != output {
|
||||
t.Errorf(`Not expected output: got %s`, output)
|
|
@ -1,7 +1,7 @@
|
|||
// SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
package proxy // import "miniflux.app/v2/internal/proxy"
|
||||
package mediaproxy // import "miniflux.app/v2/internal/mediaproxy"
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
@ -16,31 +16,29 @@ import (
|
|||
|
||||
type urlProxyRewriter func(router *mux.Router, url string) string
|
||||
|
||||
// ProxyRewriter replaces media URLs with internal proxy URLs.
|
||||
func ProxyRewriter(router *mux.Router, data string) string {
|
||||
return genericProxyRewriter(router, ProxifyURL, data)
|
||||
func RewriteDocumentWithRelativeProxyURL(router *mux.Router, htmlDocument string) string {
|
||||
return genericProxyRewriter(router, ProxifyRelativeURL, htmlDocument)
|
||||
}
|
||||
|
||||
// AbsoluteProxyRewriter do the same as ProxyRewriter except it uses absolute URLs.
|
||||
func AbsoluteProxyRewriter(router *mux.Router, host, data string) string {
|
||||
func RewriteDocumentWithAbsoluteProxyURL(router *mux.Router, host, htmlDocument string) string {
|
||||
proxifyFunction := func(router *mux.Router, url string) string {
|
||||
return AbsoluteProxifyURL(router, host, url)
|
||||
return ProxifyAbsoluteURL(router, host, url)
|
||||
}
|
||||
return genericProxyRewriter(router, proxifyFunction, data)
|
||||
return genericProxyRewriter(router, proxifyFunction, htmlDocument)
|
||||
}
|
||||
|
||||
func genericProxyRewriter(router *mux.Router, proxifyFunction urlProxyRewriter, data string) string {
|
||||
proxyOption := config.Opts.ProxyOption()
|
||||
func genericProxyRewriter(router *mux.Router, proxifyFunction urlProxyRewriter, htmlDocument string) string {
|
||||
proxyOption := config.Opts.MediaProxyMode()
|
||||
if proxyOption == "none" {
|
||||
return data
|
||||
return htmlDocument
|
||||
}
|
||||
|
||||
doc, err := goquery.NewDocumentFromReader(strings.NewReader(data))
|
||||
doc, err := goquery.NewDocumentFromReader(strings.NewReader(htmlDocument))
|
||||
if err != nil {
|
||||
return data
|
||||
return htmlDocument
|
||||
}
|
||||
|
||||
for _, mediaType := range config.Opts.ProxyMediaTypes() {
|
||||
for _, mediaType := range config.Opts.MediaProxyResourceTypes() {
|
||||
switch mediaType {
|
||||
case "image":
|
||||
doc.Find("img, picture source").Each(func(i int, img *goquery.Selection) {
|
||||
|
@ -91,7 +89,7 @@ func genericProxyRewriter(router *mux.Router, proxifyFunction urlProxyRewriter,
|
|||
|
||||
output, err := doc.Find("body").First().Html()
|
||||
if err != nil {
|
||||
return data
|
||||
return htmlDocument
|
||||
}
|
||||
|
||||
return output
|
|
@ -0,0 +1,70 @@
|
|||
// SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
package mediaproxy // import "miniflux.app/v2/internal/mediaproxy"
|
||||
|
||||
import (
|
||||
"crypto/hmac"
|
||||
"crypto/sha256"
|
||||
"encoding/base64"
|
||||
"log/slog"
|
||||
"net/url"
|
||||
"path"
|
||||
|
||||
"miniflux.app/v2/internal/http/route"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
|
||||
"miniflux.app/v2/internal/config"
|
||||
)
|
||||
|
||||
func ProxifyRelativeURL(router *mux.Router, mediaURL string) string {
|
||||
if mediaURL == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
if customProxyURL := config.Opts.MediaCustomProxyURL(); customProxyURL != "" {
|
||||
return proxifyURLWithCustomProxy(mediaURL, customProxyURL)
|
||||
}
|
||||
|
||||
mac := hmac.New(sha256.New, config.Opts.MediaProxyPrivateKey())
|
||||
mac.Write([]byte(mediaURL))
|
||||
digest := mac.Sum(nil)
|
||||
return route.Path(router, "proxy", "encodedDigest", base64.URLEncoding.EncodeToString(digest), "encodedURL", base64.URLEncoding.EncodeToString([]byte(mediaURL)))
|
||||
}
|
||||
|
||||
func ProxifyAbsoluteURL(router *mux.Router, host, mediaURL string) string {
|
||||
if mediaURL == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
if customProxyURL := config.Opts.MediaCustomProxyURL(); customProxyURL != "" {
|
||||
return proxifyURLWithCustomProxy(mediaURL, customProxyURL)
|
||||
}
|
||||
|
||||
proxifiedUrl := ProxifyRelativeURL(router, mediaURL)
|
||||
scheme := "http"
|
||||
if config.Opts.HTTPS {
|
||||
scheme = "https"
|
||||
}
|
||||
|
||||
return scheme + "://" + host + proxifiedUrl
|
||||
}
|
||||
|
||||
func proxifyURLWithCustomProxy(mediaURL, customProxyURL string) string {
|
||||
if customProxyURL == "" {
|
||||
return mediaURL
|
||||
}
|
||||
|
||||
proxyUrl, err := url.Parse(customProxyURL)
|
||||
if err != nil {
|
||||
slog.Error("Incorrect custom media proxy URL",
|
||||
slog.String("custom_proxy_url", customProxyURL),
|
||||
slog.Any("error", err),
|
||||
)
|
||||
return mediaURL
|
||||
}
|
||||
|
||||
proxyUrl.Path = path.Join(proxyUrl.Path, base64.URLEncoding.EncodeToString([]byte(mediaURL)))
|
||||
return proxyUrl.String()
|
||||
}
|
|
@ -2,7 +2,6 @@
|
|||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
package model // import "miniflux.app/v2/internal/model"
|
||||
import "strings"
|
||||
|
||||
// Enclosure represents an attachment.
|
||||
type Enclosure struct {
|
||||
|
@ -17,15 +16,8 @@ type Enclosure struct {
|
|||
|
||||
// Html5MimeType will modify the actual MimeType to allow direct playback from HTML5 player for some kind of MimeType
|
||||
func (e Enclosure) Html5MimeType() string {
|
||||
if strings.HasPrefix(e.MimeType, "video") {
|
||||
switch e.MimeType {
|
||||
// Solution from this stackoverflow discussion:
|
||||
// https://stackoverflow.com/questions/15277147/m4v-mimetype-video-mp4-or-video-m4v/66945470#66945470
|
||||
// tested at the time of this commit (06/2023) on latest Firefox & Vivaldi on this feed
|
||||
// https://www.florenceporcel.com/podcast/lfhdu.xml
|
||||
case "video/m4v":
|
||||
return "video/x-m4v"
|
||||
}
|
||||
if e.MimeType == "video/m4v" {
|
||||
return "video/x-m4v"
|
||||
}
|
||||
return e.MimeType
|
||||
}
|
||||
|
|
|
@ -159,25 +159,7 @@ type FeedCreationRequestFromSubscriptionDiscovery struct {
|
|||
ETag string
|
||||
LastModified string
|
||||
|
||||
FeedURL string `json:"feed_url"`
|
||||
CategoryID int64 `json:"category_id"`
|
||||
UserAgent string `json:"user_agent"`
|
||||
Cookie string `json:"cookie"`
|
||||
Username string `json:"username"`
|
||||
Password string `json:"password"`
|
||||
Crawler bool `json:"crawler"`
|
||||
Disabled bool `json:"disabled"`
|
||||
NoMediaPlayer bool `json:"no_media_player"`
|
||||
IgnoreHTTPCache bool `json:"ignore_http_cache"`
|
||||
AllowSelfSignedCertificates bool `json:"allow_self_signed_certificates"`
|
||||
FetchViaProxy bool `json:"fetch_via_proxy"`
|
||||
ScraperRules string `json:"scraper_rules"`
|
||||
RewriteRules string `json:"rewrite_rules"`
|
||||
BlocklistRules string `json:"blocklist_rules"`
|
||||
KeeplistRules string `json:"keeplist_rules"`
|
||||
HideGlobally bool `json:"hide_globally"`
|
||||
UrlRewriteRules string `json:"urlrewrite_rules"`
|
||||
DisableHTTP2 bool `json:"disable_http2"`
|
||||
FeedCreationRequest
|
||||
}
|
||||
|
||||
// FeedModificationRequest represents the request to update a feed.
|
||||
|
|
|
@ -3,26 +3,20 @@
|
|||
|
||||
package model // import "miniflux.app/v2/internal/model"
|
||||
|
||||
// OptionalString populates an optional string field.
|
||||
type Number interface {
|
||||
int | int64 | float64
|
||||
}
|
||||
|
||||
func OptionalNumber[T Number](value T) *T {
|
||||
if value > 0 {
|
||||
return &value
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func OptionalString(value string) *string {
|
||||
if value != "" {
|
||||
return &value
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// OptionalInt populates an optional int field.
|
||||
func OptionalInt(value int) *int {
|
||||
if value > 0 {
|
||||
return &value
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// OptionalInt64 populates an optional int64 field.
|
||||
func OptionalInt64(value int64) *int64 {
|
||||
if value > 0 {
|
||||
return &value
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -35,6 +35,7 @@ type User struct {
|
|||
DefaultHomePage string `json:"default_home_page"`
|
||||
CategoriesSortingOrder string `json:"categories_sorting_order"`
|
||||
MarkReadOnView bool `json:"mark_read_on_view"`
|
||||
MediaPlaybackRate float64 `json:"media_playback_rate"`
|
||||
}
|
||||
|
||||
// UserCreationRequest represents the request to create a user.
|
||||
|
@ -48,28 +49,29 @@ type UserCreationRequest struct {
|
|||
|
||||
// UserModificationRequest represents the request to update a user.
|
||||
type UserModificationRequest struct {
|
||||
Username *string `json:"username"`
|
||||
Password *string `json:"password"`
|
||||
Theme *string `json:"theme"`
|
||||
Language *string `json:"language"`
|
||||
Timezone *string `json:"timezone"`
|
||||
EntryDirection *string `json:"entry_sorting_direction"`
|
||||
EntryOrder *string `json:"entry_sorting_order"`
|
||||
Stylesheet *string `json:"stylesheet"`
|
||||
GoogleID *string `json:"google_id"`
|
||||
OpenIDConnectID *string `json:"openid_connect_id"`
|
||||
EntriesPerPage *int `json:"entries_per_page"`
|
||||
IsAdmin *bool `json:"is_admin"`
|
||||
KeyboardShortcuts *bool `json:"keyboard_shortcuts"`
|
||||
ShowReadingTime *bool `json:"show_reading_time"`
|
||||
EntrySwipe *bool `json:"entry_swipe"`
|
||||
GestureNav *string `json:"gesture_nav"`
|
||||
DisplayMode *string `json:"display_mode"`
|
||||
DefaultReadingSpeed *int `json:"default_reading_speed"`
|
||||
CJKReadingSpeed *int `json:"cjk_reading_speed"`
|
||||
DefaultHomePage *string `json:"default_home_page"`
|
||||
CategoriesSortingOrder *string `json:"categories_sorting_order"`
|
||||
MarkReadOnView *bool `json:"mark_read_on_view"`
|
||||
Username *string `json:"username"`
|
||||
Password *string `json:"password"`
|
||||
Theme *string `json:"theme"`
|
||||
Language *string `json:"language"`
|
||||
Timezone *string `json:"timezone"`
|
||||
EntryDirection *string `json:"entry_sorting_direction"`
|
||||
EntryOrder *string `json:"entry_sorting_order"`
|
||||
Stylesheet *string `json:"stylesheet"`
|
||||
GoogleID *string `json:"google_id"`
|
||||
OpenIDConnectID *string `json:"openid_connect_id"`
|
||||
EntriesPerPage *int `json:"entries_per_page"`
|
||||
IsAdmin *bool `json:"is_admin"`
|
||||
KeyboardShortcuts *bool `json:"keyboard_shortcuts"`
|
||||
ShowReadingTime *bool `json:"show_reading_time"`
|
||||
EntrySwipe *bool `json:"entry_swipe"`
|
||||
GestureNav *string `json:"gesture_nav"`
|
||||
DisplayMode *string `json:"display_mode"`
|
||||
DefaultReadingSpeed *int `json:"default_reading_speed"`
|
||||
CJKReadingSpeed *int `json:"cjk_reading_speed"`
|
||||
DefaultHomePage *string `json:"default_home_page"`
|
||||
CategoriesSortingOrder *string `json:"categories_sorting_order"`
|
||||
MarkReadOnView *bool `json:"mark_read_on_view"`
|
||||
MediaPlaybackRate *float64 `json:"media_playback_rate"`
|
||||
}
|
||||
|
||||
// Patch updates the User object with the modification request.
|
||||
|
@ -161,6 +163,10 @@ func (u *UserModificationRequest) Patch(user *User) {
|
|||
if u.MarkReadOnView != nil {
|
||||
user.MarkReadOnView = *u.MarkReadOnView
|
||||
}
|
||||
|
||||
if u.MediaPlaybackRate != nil {
|
||||
user.MediaPlaybackRate = *u.MediaPlaybackRate
|
||||
}
|
||||
}
|
||||
|
||||
// UseTimezone converts last login date to the given timezone.
|
||||
|
|
|
@ -1,52 +0,0 @@
|
|||
// SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
package proxy // import "miniflux.app/v2/internal/proxy"
|
||||
|
||||
import (
|
||||
"crypto/hmac"
|
||||
"crypto/sha256"
|
||||
"encoding/base64"
|
||||
"net/url"
|
||||
"path"
|
||||
|
||||
"miniflux.app/v2/internal/http/route"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
|
||||
"miniflux.app/v2/internal/config"
|
||||
)
|
||||
|
||||
// ProxifyURL generates a relative URL for a proxified resource.
|
||||
func ProxifyURL(router *mux.Router, link string) string {
|
||||
if link == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
if proxyImageUrl := config.Opts.ProxyUrl(); proxyImageUrl != "" {
|
||||
proxyUrl, err := url.Parse(proxyImageUrl)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
proxyUrl.Path = path.Join(proxyUrl.Path, base64.URLEncoding.EncodeToString([]byte(link)))
|
||||
return proxyUrl.String()
|
||||
}
|
||||
|
||||
mac := hmac.New(sha256.New, config.Opts.ProxyPrivateKey())
|
||||
mac.Write([]byte(link))
|
||||
digest := mac.Sum(nil)
|
||||
return route.Path(router, "proxy", "encodedDigest", base64.URLEncoding.EncodeToString(digest), "encodedURL", base64.URLEncoding.EncodeToString([]byte(link)))
|
||||
}
|
||||
|
||||
// AbsoluteProxifyURL generates an absolute URL for a proxified resource.
|
||||
func AbsoluteProxifyURL(router *mux.Router, host, link string) string {
|
||||
proxifiedUrl := ProxifyURL(router, link)
|
||||
|
||||
if config.Opts.ProxyUrl() == "" {
|
||||
return proxifiedUrl
|
||||
}
|
||||
if config.Opts.HTTPS {
|
||||
return "https://" + host + proxifiedUrl
|
||||
}
|
||||
return "http://" + host + proxifiedUrl
|
||||
}
|
|
@ -6,158 +6,114 @@ package atom // import "miniflux.app/v2/internal/reader/atom"
|
|||
import (
|
||||
"encoding/base64"
|
||||
"html"
|
||||
"log/slog"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"miniflux.app/v2/internal/crypto"
|
||||
"miniflux.app/v2/internal/model"
|
||||
"miniflux.app/v2/internal/reader/date"
|
||||
"miniflux.app/v2/internal/reader/sanitizer"
|
||||
"miniflux.app/v2/internal/urllib"
|
||||
)
|
||||
|
||||
// Specs: http://web.archive.org/web/20060811235523/http://www.mnot.net/drafts/draft-nottingham-atom-format-02.html
|
||||
type atom03Feed struct {
|
||||
ID string `xml:"id"`
|
||||
Title atom03Text `xml:"title"`
|
||||
Author atomPerson `xml:"author"`
|
||||
Links atomLinks `xml:"link"`
|
||||
Entries []atom03Entry `xml:"entry"`
|
||||
type Atom03Feed struct {
|
||||
Version string `xml:"version,attr"`
|
||||
|
||||
// The "atom:id" element's content conveys a permanent, globally unique identifier for the feed.
|
||||
// It MUST NOT change over time, even if the feed is relocated. atom:feed elements MAY contain an atom:id element,
|
||||
// but MUST NOT contain more than one. The content of this element, when present, MUST be a URI.
|
||||
ID string `xml:"http://purl.org/atom/ns# id"`
|
||||
|
||||
// The "atom:title" element is a Content construct that conveys a human-readable title for the feed.
|
||||
// atom:feed elements MUST contain exactly one atom:title element.
|
||||
// If the feed describes a Web resource, its content SHOULD be the same as that resource's title.
|
||||
Title Atom03Content `xml:"http://purl.org/atom/ns# title"`
|
||||
|
||||
// The "atom:link" element is a Link construct that conveys a URI associated with the feed.
|
||||
// The nature of the relationship as well as the link itself is determined by the element's content.
|
||||
// atom:feed elements MUST contain at least one atom:link element with a rel attribute value of "alternate".
|
||||
// atom:feed elements MUST NOT contain more than one atom:link element with a rel attribute value of "alternate" that has the same type attribute value.
|
||||
// atom:feed elements MAY contain additional atom:link elements beyond those described above.
|
||||
Links AtomLinks `xml:"http://purl.org/atom/ns# link"`
|
||||
|
||||
// The "atom:author" element is a Person construct that indicates the default author of the feed.
|
||||
// atom:feed elements MUST contain exactly one atom:author element,
|
||||
// UNLESS all of the atom:feed element's child atom:entry elements contain an atom:author element.
|
||||
// atom:feed elements MUST NOT contain more than one atom:author element.
|
||||
Author AtomPerson `xml:"http://purl.org/atom/ns# author"`
|
||||
|
||||
// The "atom:entry" element's represents an individual entry that is contained by the feed.
|
||||
// atom:feed elements MAY contain one or more atom:entry elements.
|
||||
Entries []Atom03Entry `xml:"http://purl.org/atom/ns# entry"`
|
||||
}
|
||||
|
||||
func (a *atom03Feed) Transform(baseURL string) *model.Feed {
|
||||
var err error
|
||||
type Atom03Entry struct {
|
||||
// The "atom:id" element's content conveys a permanent, globally unique identifier for the entry.
|
||||
// It MUST NOT change over time, even if other representations of the entry (such as a web representation pointed to by the entry's atom:link element) are relocated.
|
||||
// If the same entry is syndicated in two atom:feeds published by the same entity, the entry's atom:id MUST be the same in both feeds.
|
||||
ID string `xml:"id"`
|
||||
|
||||
feed := new(model.Feed)
|
||||
// The "atom:title" element is a Content construct that conveys a human-readable title for the entry.
|
||||
// atom:entry elements MUST have exactly one "atom:title" element.
|
||||
// If an entry describes a Web resource, its content SHOULD be the same as that resource's title.
|
||||
Title Atom03Content `xml:"title"`
|
||||
|
||||
feedURL := a.Links.firstLinkWithRelation("self")
|
||||
feed.FeedURL, err = urllib.AbsoluteURL(baseURL, feedURL)
|
||||
if err != nil {
|
||||
feed.FeedURL = feedURL
|
||||
}
|
||||
// The "atom:modified" element is a Date construct that indicates the time that the entry was last modified.
|
||||
// atom:entry elements MUST contain an atom:modified element, but MUST NOT contain more than one.
|
||||
// The content of an atom:modified element MUST have a time zone whose value SHOULD be "UTC".
|
||||
Modified string `xml:"modified"`
|
||||
|
||||
siteURL := a.Links.originalLink()
|
||||
feed.SiteURL, err = urllib.AbsoluteURL(baseURL, siteURL)
|
||||
if err != nil {
|
||||
feed.SiteURL = siteURL
|
||||
}
|
||||
// The "atom:issued" element is a Date construct that indicates the time that the entry was issued.
|
||||
// atom:entry elements MUST contain an atom:issued element, but MUST NOT contain more than one.
|
||||
// The content of an atom:issued element MAY omit a time zone.
|
||||
Issued string `xml:"issued"`
|
||||
|
||||
feed.Title = a.Title.String()
|
||||
if feed.Title == "" {
|
||||
feed.Title = feed.SiteURL
|
||||
}
|
||||
// The "atom:created" element is a Date construct that indicates the time that the entry was created.
|
||||
// atom:entry elements MAY contain an atom:created element, but MUST NOT contain more than one.
|
||||
// The content of an atom:created element MUST have a time zone whose value SHOULD be "UTC".
|
||||
// If atom:created is not present, its content MUST considered to be the same as that of atom:modified.
|
||||
Created string `xml:"created"`
|
||||
|
||||
for _, entry := range a.Entries {
|
||||
item := entry.Transform()
|
||||
entryURL, err := urllib.AbsoluteURL(feed.SiteURL, item.URL)
|
||||
if err == nil {
|
||||
item.URL = entryURL
|
||||
}
|
||||
// The "atom:link" element is a Link construct that conveys a URI associated with the entry.
|
||||
// The nature of the relationship as well as the link itself is determined by the element's content.
|
||||
// atom:entry elements MUST contain at least one atom:link element with a rel attribute value of "alternate".
|
||||
// atom:entry elements MUST NOT contain more than one atom:link element with a rel attribute value of "alternate" that has the same type attribute value.
|
||||
// atom:entry elements MAY contain additional atom:link elements beyond those described above.
|
||||
Links AtomLinks `xml:"link"`
|
||||
|
||||
if item.Author == "" {
|
||||
item.Author = a.Author.String()
|
||||
}
|
||||
// The "atom:summary" element is a Content construct that conveys a short summary, abstract or excerpt of the entry.
|
||||
// atom:entry elements MAY contain an atom:created element, but MUST NOT contain more than one.
|
||||
Summary Atom03Content `xml:"summary"`
|
||||
|
||||
if item.Title == "" {
|
||||
item.Title = sanitizer.TruncateHTML(item.Content, 100)
|
||||
}
|
||||
// The "atom:content" element is a Content construct that conveys the content of the entry.
|
||||
// atom:entry elements MAY contain one or more atom:content elements.
|
||||
Content Atom03Content `xml:"content"`
|
||||
|
||||
if item.Title == "" {
|
||||
item.Title = item.URL
|
||||
}
|
||||
|
||||
feed.Entries = append(feed.Entries, item)
|
||||
}
|
||||
|
||||
return feed
|
||||
// The "atom:author" element is a Person construct that indicates the default author of the entry.
|
||||
// atom:entry elements MUST contain exactly one atom:author element,
|
||||
// UNLESS the atom:feed element containing them contains an atom:author element itself.
|
||||
// atom:entry elements MUST NOT contain more than one atom:author element.
|
||||
Author AtomPerson `xml:"author"`
|
||||
}
|
||||
|
||||
type atom03Entry struct {
|
||||
ID string `xml:"id"`
|
||||
Title atom03Text `xml:"title"`
|
||||
Modified string `xml:"modified"`
|
||||
Issued string `xml:"issued"`
|
||||
Created string `xml:"created"`
|
||||
Links atomLinks `xml:"link"`
|
||||
Summary atom03Text `xml:"summary"`
|
||||
Content atom03Text `xml:"content"`
|
||||
Author atomPerson `xml:"author"`
|
||||
}
|
||||
type Atom03Content struct {
|
||||
// Content constructs MAY have a "type" attribute, whose value indicates the media type of the content.
|
||||
// When present, this attribute's value MUST be a registered media type [RFC2045].
|
||||
// If not present, its value MUST be considered to be "text/plain".
|
||||
Type string `xml:"type,attr"`
|
||||
|
||||
func (a *atom03Entry) Transform() *model.Entry {
|
||||
entry := model.NewEntry()
|
||||
entry.URL = a.Links.originalLink()
|
||||
entry.Date = a.entryDate()
|
||||
entry.Author = a.Author.String()
|
||||
entry.Hash = a.entryHash()
|
||||
entry.Content = a.entryContent()
|
||||
entry.Title = a.entryTitle()
|
||||
return entry
|
||||
}
|
||||
// Content constructs MAY have a "mode" attribute, whose value indicates the method used to encode the content.
|
||||
// When present, this attribute's value MUST be listed below.
|
||||
// If not present, its value MUST be considered to be "xml".
|
||||
//
|
||||
// "xml": A mode attribute with the value "xml" indicates that the element's content is inline xml (for example, namespace-qualified XHTML).
|
||||
//
|
||||
// "escaped": A mode attribute with the value "escaped" indicates that the element's content is an escaped string.
|
||||
// Processors MUST unescape the element's content before considering it as content of the indicated media type.
|
||||
//
|
||||
// "base64": A mode attribute with the value "base64" indicates that the element's content is base64-encoded [RFC2045].
|
||||
// Processors MUST decode the element's content before considering it as content of the the indicated media type.
|
||||
Mode string `xml:"mode,attr"`
|
||||
|
||||
func (a *atom03Entry) entryTitle() string {
|
||||
return sanitizer.StripTags(a.Title.String())
|
||||
}
|
||||
|
||||
func (a *atom03Entry) entryContent() string {
|
||||
content := a.Content.String()
|
||||
if content != "" {
|
||||
return content
|
||||
}
|
||||
|
||||
summary := a.Summary.String()
|
||||
if summary != "" {
|
||||
return summary
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func (a *atom03Entry) entryDate() time.Time {
|
||||
dateText := ""
|
||||
for _, value := range []string{a.Issued, a.Modified, a.Created} {
|
||||
if value != "" {
|
||||
dateText = value
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if dateText != "" {
|
||||
result, err := date.Parse(dateText)
|
||||
if err != nil {
|
||||
slog.Debug("Unable to parse date from Atom 0.3 feed",
|
||||
slog.String("date", dateText),
|
||||
slog.String("id", a.ID),
|
||||
slog.Any("error", err),
|
||||
)
|
||||
return time.Now()
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
return time.Now()
|
||||
}
|
||||
|
||||
func (a *atom03Entry) entryHash() string {
|
||||
for _, value := range []string{a.ID, a.Links.originalLink()} {
|
||||
if value != "" {
|
||||
return crypto.Hash(value)
|
||||
}
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
type atom03Text struct {
|
||||
Type string `xml:"type,attr"`
|
||||
Mode string `xml:"mode,attr"`
|
||||
CharData string `xml:",chardata"`
|
||||
InnerXML string `xml:",innerxml"`
|
||||
}
|
||||
|
||||
func (a *atom03Text) String() string {
|
||||
func (a *Atom03Content) Content() string {
|
||||
content := ""
|
||||
|
||||
switch {
|
||||
|
|
|
@ -0,0 +1,115 @@
|
|||
// SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
package atom // import "miniflux.app/v2/internal/reader/atom"
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
"time"
|
||||
|
||||
"miniflux.app/v2/internal/crypto"
|
||||
"miniflux.app/v2/internal/model"
|
||||
"miniflux.app/v2/internal/reader/date"
|
||||
"miniflux.app/v2/internal/reader/sanitizer"
|
||||
"miniflux.app/v2/internal/urllib"
|
||||
)
|
||||
|
||||
type Atom03Adapter struct {
|
||||
atomFeed *Atom03Feed
|
||||
}
|
||||
|
||||
func NewAtom03Adapter(atomFeed *Atom03Feed) *Atom03Adapter {
|
||||
return &Atom03Adapter{atomFeed}
|
||||
}
|
||||
|
||||
func (a *Atom03Adapter) BuildFeed(baseURL string) *model.Feed {
|
||||
feed := new(model.Feed)
|
||||
|
||||
// Populate the feed URL.
|
||||
feedURL := a.atomFeed.Links.firstLinkWithRelation("self")
|
||||
if feedURL != "" {
|
||||
if absoluteFeedURL, err := urllib.AbsoluteURL(baseURL, feedURL); err == nil {
|
||||
feed.FeedURL = absoluteFeedURL
|
||||
}
|
||||
} else {
|
||||
feed.FeedURL = baseURL
|
||||
}
|
||||
|
||||
// Populate the site URL.
|
||||
siteURL := a.atomFeed.Links.OriginalLink()
|
||||
if siteURL != "" {
|
||||
if absoluteSiteURL, err := urllib.AbsoluteURL(baseURL, siteURL); err == nil {
|
||||
feed.SiteURL = absoluteSiteURL
|
||||
}
|
||||
} else {
|
||||
feed.SiteURL = baseURL
|
||||
}
|
||||
|
||||
// Populate the feed title.
|
||||
feed.Title = a.atomFeed.Title.Content()
|
||||
if feed.Title == "" {
|
||||
feed.Title = feed.SiteURL
|
||||
}
|
||||
|
||||
for _, atomEntry := range a.atomFeed.Entries {
|
||||
entry := model.NewEntry()
|
||||
|
||||
// Populate the entry URL.
|
||||
entry.URL = atomEntry.Links.OriginalLink()
|
||||
if entry.URL != "" {
|
||||
if absoluteEntryURL, err := urllib.AbsoluteURL(feed.SiteURL, entry.URL); err == nil {
|
||||
entry.URL = absoluteEntryURL
|
||||
}
|
||||
}
|
||||
|
||||
// Populate the entry content.
|
||||
entry.Content = atomEntry.Content.Content()
|
||||
if entry.Content == "" {
|
||||
entry.Content = atomEntry.Summary.Content()
|
||||
}
|
||||
|
||||
// Populate the entry title.
|
||||
entry.Title = atomEntry.Title.Content()
|
||||
if entry.Title == "" {
|
||||
entry.Title = sanitizer.TruncateHTML(entry.Content, 100)
|
||||
}
|
||||
if entry.Title == "" {
|
||||
entry.Title = entry.URL
|
||||
}
|
||||
|
||||
// Populate the entry author.
|
||||
entry.Author = atomEntry.Author.PersonName()
|
||||
if entry.Author == "" {
|
||||
entry.Author = a.atomFeed.Author.PersonName()
|
||||
}
|
||||
|
||||
// Populate the entry date.
|
||||
for _, value := range []string{atomEntry.Issued, atomEntry.Modified, atomEntry.Created} {
|
||||
if parsedDate, err := date.Parse(value); err == nil {
|
||||
entry.Date = parsedDate
|
||||
break
|
||||
} else {
|
||||
slog.Debug("Unable to parse date from Atom 0.3 feed",
|
||||
slog.String("date", value),
|
||||
slog.String("id", atomEntry.ID),
|
||||
slog.Any("error", err),
|
||||
)
|
||||
}
|
||||
}
|
||||
if entry.Date.IsZero() {
|
||||
entry.Date = time.Now()
|
||||
}
|
||||
|
||||
// Generate the entry hash.
|
||||
for _, value := range []string{atomEntry.ID, atomEntry.Links.OriginalLink()} {
|
||||
if value != "" {
|
||||
entry.Hash = crypto.Hash(value)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
feed.Entries = append(feed.Entries, entry)
|
||||
}
|
||||
|
||||
return feed
|
||||
}
|
|
@ -27,7 +27,7 @@ func TestParseAtom03(t *testing.T) {
|
|||
</entry>
|
||||
</feed>`
|
||||
|
||||
feed, err := Parse("http://diveintomark.org/", bytes.NewReader([]byte(data)), "0.3")
|
||||
feed, err := Parse("http://diveintomark.org/atom.xml", bytes.NewReader([]byte(data)), "0.3")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
@ -36,7 +36,7 @@ func TestParseAtom03(t *testing.T) {
|
|||
t.Errorf("Incorrect title, got: %s", feed.Title)
|
||||
}
|
||||
|
||||
if feed.FeedURL != "http://diveintomark.org/" {
|
||||
if feed.FeedURL != "http://diveintomark.org/atom.xml" {
|
||||
t.Errorf("Incorrect feed URL, got: %s", feed.FeedURL)
|
||||
}
|
||||
|
||||
|
@ -74,6 +74,28 @@ func TestParseAtom03(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestParseAtom03WithoutSiteURL(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<feed version="0.3" xmlns="http://purl.org/atom/ns#">
|
||||
<modified>2003-12-13T18:30:02Z</modified>
|
||||
<author><name>Mark Pilgrim</name></author>
|
||||
<entry>
|
||||
<title>Atom 0.3 snapshot</title>
|
||||
<link rel="alternate" type="text/html" href="http://diveintomark.org/2003/12/13/atom03"/>
|
||||
<id>tag:diveintomark.org,2003:3.2397</id>
|
||||
</entry>
|
||||
</feed>`
|
||||
|
||||
feed, err := Parse("http://diveintomark.org/atom.xml", bytes.NewReader([]byte(data)), "0.3")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feed.SiteURL != "http://diveintomark.org/atom.xml" {
|
||||
t.Errorf("Incorrect title, got: %s", feed.Title)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseAtom03WithoutFeedTitle(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<feed version="0.3" xmlns="http://purl.org/atom/ns#">
|
||||
|
|
|
@ -6,286 +6,200 @@ package atom // import "miniflux.app/v2/internal/reader/atom"
|
|||
import (
|
||||
"encoding/xml"
|
||||
"html"
|
||||
"log/slog"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"miniflux.app/v2/internal/crypto"
|
||||
"miniflux.app/v2/internal/model"
|
||||
"miniflux.app/v2/internal/reader/date"
|
||||
"miniflux.app/v2/internal/reader/media"
|
||||
"miniflux.app/v2/internal/reader/sanitizer"
|
||||
"miniflux.app/v2/internal/urllib"
|
||||
)
|
||||
|
||||
// The "atom:feed" element is the document (i.e., top-level) element of
|
||||
// an Atom Feed Document, acting as a container for metadata and data
|
||||
// associated with the feed. Its element children consist of metadata
|
||||
// elements followed by zero or more atom:entry child elements.
|
||||
//
|
||||
// Specs:
|
||||
// https://tools.ietf.org/html/rfc4287
|
||||
// https://validator.w3.org/feed/docs/atom.html
|
||||
type atom10Feed struct {
|
||||
XMLName xml.Name `xml:"http://www.w3.org/2005/Atom feed"`
|
||||
ID string `xml:"id"`
|
||||
Title atom10Text `xml:"title"`
|
||||
Authors atomAuthors `xml:"author"`
|
||||
Icon string `xml:"icon"`
|
||||
Links atomLinks `xml:"link"`
|
||||
Entries []atom10Entry `xml:"entry"`
|
||||
type Atom10Feed struct {
|
||||
XMLName xml.Name `xml:"http://www.w3.org/2005/Atom feed"`
|
||||
|
||||
// The "atom:id" element conveys a permanent, universally unique
|
||||
// identifier for an entry or feed.
|
||||
//
|
||||
// Its content MUST be an IRI, as defined by [RFC3987]. Note that the
|
||||
// definition of "IRI" excludes relative references. Though the IRI
|
||||
// might use a dereferencable scheme, Atom Processors MUST NOT assume it
|
||||
// can be dereferenced.
|
||||
//
|
||||
// atom:feed elements MUST contain exactly one atom:id element.
|
||||
ID string `xml:"http://www.w3.org/2005/Atom id"`
|
||||
|
||||
// The "atom:title" element is a Text construct that conveys a human-
|
||||
// readable title for an entry or feed.
|
||||
//
|
||||
// atom:feed elements MUST contain exactly one atom:title element.
|
||||
Title Atom10Text `xml:"http://www.w3.org/2005/Atom title"`
|
||||
|
||||
// The "atom:author" element is a Person construct that indicates the
|
||||
// author of the entry or feed.
|
||||
//
|
||||
// atom:feed elements MUST contain one or more atom:author elements,
|
||||
// unless all of the atom:feed element's child atom:entry elements
|
||||
// contain at least one atom:author element.
|
||||
Authors AtomPersons `xml:"http://www.w3.org/2005/Atom author"`
|
||||
|
||||
// The "atom:icon" element's content is an IRI reference [RFC3987] that
|
||||
// identifies an image that provides iconic visual identification for a
|
||||
// feed.
|
||||
//
|
||||
// atom:feed elements MUST NOT contain more than one atom:icon element.
|
||||
Icon string `xml:"http://www.w3.org/2005/Atom icon"`
|
||||
|
||||
// The "atom:logo" element's content is an IRI reference [RFC3987] that
|
||||
// identifies an image that provides visual identification for a feed.
|
||||
//
|
||||
// atom:feed elements MUST NOT contain more than one atom:logo element.
|
||||
Logo string `xml:"http://www.w3.org/2005/Atom logo"`
|
||||
|
||||
// atom:feed elements SHOULD contain one atom:link element with a rel
|
||||
// attribute value of "self". This is the preferred URI for
|
||||
// retrieving Atom Feed Documents representing this Atom feed.
|
||||
//
|
||||
// atom:feed elements MUST NOT contain more than one atom:link
|
||||
// element with a rel attribute value of "alternate" that has the
|
||||
// same combination of type and hreflang attribute values.
|
||||
Links AtomLinks `xml:"http://www.w3.org/2005/Atom link"`
|
||||
|
||||
// The "atom:category" element conveys information about a category
|
||||
// associated with an entry or feed. This specification assigns no
|
||||
// meaning to the content (if any) of this element.
|
||||
//
|
||||
// atom:feed elements MAY contain any number of atom:category
|
||||
// elements.
|
||||
Categories AtomCategories `xml:"http://www.w3.org/2005/Atom category"`
|
||||
|
||||
Entries []Atom10Entry `xml:"http://www.w3.org/2005/Atom entry"`
|
||||
}
|
||||
|
||||
func (a *atom10Feed) Transform(baseURL string) *model.Feed {
|
||||
var err error
|
||||
type Atom10Entry struct {
|
||||
// The "atom:id" element conveys a permanent, universally unique
|
||||
// identifier for an entry or feed.
|
||||
//
|
||||
// Its content MUST be an IRI, as defined by [RFC3987]. Note that the
|
||||
// definition of "IRI" excludes relative references. Though the IRI
|
||||
// might use a dereferencable scheme, Atom Processors MUST NOT assume it
|
||||
// can be dereferenced.
|
||||
//
|
||||
// atom:entry elements MUST contain exactly one atom:id element.
|
||||
ID string `xml:"http://www.w3.org/2005/Atom id"`
|
||||
|
||||
feed := new(model.Feed)
|
||||
// The "atom:title" element is a Text construct that conveys a human-
|
||||
// readable title for an entry or feed.
|
||||
//
|
||||
// atom:entry elements MUST contain exactly one atom:title element.
|
||||
Title Atom10Text `xml:"http://www.w3.org/2005/Atom title"`
|
||||
|
||||
feedURL := a.Links.firstLinkWithRelation("self")
|
||||
feed.FeedURL, err = urllib.AbsoluteURL(baseURL, feedURL)
|
||||
if err != nil {
|
||||
feed.FeedURL = feedURL
|
||||
}
|
||||
// The "atom:published" element is a Date construct indicating an
|
||||
// instant in time associated with an event early in the life cycle of
|
||||
// the entry.
|
||||
Published string `xml:"http://www.w3.org/2005/Atom published"`
|
||||
|
||||
siteURL := a.Links.originalLink()
|
||||
feed.SiteURL, err = urllib.AbsoluteURL(baseURL, siteURL)
|
||||
if err != nil {
|
||||
feed.SiteURL = siteURL
|
||||
}
|
||||
// The "atom:updated" element is a Date construct indicating the most
|
||||
// recent instant in time when an entry or feed was modified in a way
|
||||
// the publisher considers significant. Therefore, not all
|
||||
// modifications necessarily result in a changed atom:updated value.
|
||||
//
|
||||
// atom:entry elements MUST contain exactly one atom:updated element.
|
||||
Updated string `xml:"http://www.w3.org/2005/Atom updated"`
|
||||
|
||||
feed.Title = html.UnescapeString(a.Title.String())
|
||||
if feed.Title == "" {
|
||||
feed.Title = feed.SiteURL
|
||||
}
|
||||
// atom:entry elements MUST NOT contain more than one atom:link
|
||||
// element with a rel attribute value of "alternate" that has the
|
||||
// same combination of type and hreflang attribute values.
|
||||
Links AtomLinks `xml:"http://www.w3.org/2005/Atom link"`
|
||||
|
||||
feed.IconURL = strings.TrimSpace(a.Icon)
|
||||
// atom:entry elements MUST contain an atom:summary element in either
|
||||
// of the following cases:
|
||||
// * the atom:entry contains an atom:content that has a "src"
|
||||
// attribute (and is thus empty).
|
||||
// * the atom:entry contains content that is encoded in Base64;
|
||||
// i.e., the "type" attribute of atom:content is a MIME media type
|
||||
// [MIMEREG], but is not an XML media type [RFC3023], does not
|
||||
// begin with "text/", and does not end with "/xml" or "+xml".
|
||||
//
|
||||
// atom:entry elements MUST NOT contain more than one atom:summary
|
||||
// element.
|
||||
Summary Atom10Text `xml:"http://www.w3.org/2005/Atom summary"`
|
||||
|
||||
for _, entry := range a.Entries {
|
||||
item := entry.Transform()
|
||||
entryURL, err := urllib.AbsoluteURL(feed.SiteURL, item.URL)
|
||||
if err == nil {
|
||||
item.URL = entryURL
|
||||
}
|
||||
// atom:entry elements MUST NOT contain more than one atom:content
|
||||
// element.
|
||||
Content Atom10Text `xml:"http://www.w3.org/2005/Atom content"`
|
||||
|
||||
if item.Author == "" {
|
||||
item.Author = a.Authors.String()
|
||||
}
|
||||
// The "atom:author" element is a Person construct that indicates the
|
||||
// author of the entry or feed.
|
||||
//
|
||||
// atom:entry elements MUST contain one or more atom:author elements
|
||||
Authors AtomPersons `xml:"http://www.w3.org/2005/Atom author"`
|
||||
|
||||
if item.Title == "" {
|
||||
item.Title = sanitizer.TruncateHTML(item.Content, 100)
|
||||
}
|
||||
// The "atom:category" element conveys information about a category
|
||||
// associated with an entry or feed. This specification assigns no
|
||||
// meaning to the content (if any) of this element.
|
||||
//
|
||||
// atom:entry elements MAY contain any number of atom:category
|
||||
// elements.
|
||||
Categories AtomCategories `xml:"http://www.w3.org/2005/Atom category"`
|
||||
|
||||
if item.Title == "" {
|
||||
item.Title = item.URL
|
||||
}
|
||||
|
||||
feed.Entries = append(feed.Entries, item)
|
||||
}
|
||||
|
||||
return feed
|
||||
}
|
||||
|
||||
type atom10Entry struct {
|
||||
ID string `xml:"id"`
|
||||
Title atom10Text `xml:"title"`
|
||||
Published string `xml:"published"`
|
||||
Updated string `xml:"updated"`
|
||||
Links atomLinks `xml:"link"`
|
||||
Summary atom10Text `xml:"summary"`
|
||||
Content atom10Text `xml:"http://www.w3.org/2005/Atom content"`
|
||||
Authors atomAuthors `xml:"author"`
|
||||
Categories []atom10Category `xml:"category"`
|
||||
media.MediaItemElement
|
||||
}
|
||||
|
||||
func (a *atom10Entry) Transform() *model.Entry {
|
||||
entry := model.NewEntry()
|
||||
entry.URL = a.Links.originalLink()
|
||||
entry.Date = a.entryDate()
|
||||
entry.Author = a.Authors.String()
|
||||
entry.Hash = a.entryHash()
|
||||
entry.Content = a.entryContent()
|
||||
entry.Title = a.entryTitle()
|
||||
entry.Enclosures = a.entryEnclosures()
|
||||
entry.CommentsURL = a.entryCommentsURL()
|
||||
entry.Tags = a.entryCategories()
|
||||
return entry
|
||||
}
|
||||
|
||||
func (a *atom10Entry) entryTitle() string {
|
||||
return html.UnescapeString(a.Title.String())
|
||||
}
|
||||
|
||||
func (a *atom10Entry) entryContent() string {
|
||||
content := a.Content.String()
|
||||
if content != "" {
|
||||
return content
|
||||
}
|
||||
|
||||
summary := a.Summary.String()
|
||||
if summary != "" {
|
||||
return summary
|
||||
}
|
||||
|
||||
mediaDescription := a.FirstMediaDescription()
|
||||
if mediaDescription != "" {
|
||||
return mediaDescription
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
// Note: The published date represents the original creation date for YouTube feeds.
|
||||
// Example:
|
||||
// <published>2019-01-26T08:02:28+00:00</published>
|
||||
// <updated>2019-01-29T07:27:27+00:00</updated>
|
||||
func (a *atom10Entry) entryDate() time.Time {
|
||||
dateText := a.Published
|
||||
if dateText == "" {
|
||||
dateText = a.Updated
|
||||
}
|
||||
|
||||
if dateText != "" {
|
||||
result, err := date.Parse(dateText)
|
||||
if err != nil {
|
||||
slog.Debug("Unable to parse date from Atom 0.3 feed",
|
||||
slog.String("date", dateText),
|
||||
slog.String("id", a.ID),
|
||||
slog.Any("error", err),
|
||||
)
|
||||
return time.Now()
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
return time.Now()
|
||||
}
|
||||
|
||||
func (a *atom10Entry) entryHash() string {
|
||||
for _, value := range []string{a.ID, a.Links.originalLink()} {
|
||||
if value != "" {
|
||||
return crypto.Hash(value)
|
||||
}
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func (a *atom10Entry) entryEnclosures() model.EnclosureList {
|
||||
enclosures := make(model.EnclosureList, 0)
|
||||
duplicates := make(map[string]bool)
|
||||
|
||||
for _, mediaThumbnail := range a.AllMediaThumbnails() {
|
||||
if _, found := duplicates[mediaThumbnail.URL]; !found {
|
||||
duplicates[mediaThumbnail.URL] = true
|
||||
enclosures = append(enclosures, &model.Enclosure{
|
||||
URL: mediaThumbnail.URL,
|
||||
MimeType: mediaThumbnail.MimeType(),
|
||||
Size: mediaThumbnail.Size(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
for _, link := range a.Links {
|
||||
if strings.EqualFold(link.Rel, "enclosure") {
|
||||
if link.URL == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
if _, found := duplicates[link.URL]; !found {
|
||||
duplicates[link.URL] = true
|
||||
length, _ := strconv.ParseInt(link.Length, 10, 0)
|
||||
enclosures = append(enclosures, &model.Enclosure{URL: link.URL, MimeType: link.Type, Size: length})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, mediaContent := range a.AllMediaContents() {
|
||||
if _, found := duplicates[mediaContent.URL]; !found {
|
||||
duplicates[mediaContent.URL] = true
|
||||
enclosures = append(enclosures, &model.Enclosure{
|
||||
URL: mediaContent.URL,
|
||||
MimeType: mediaContent.MimeType(),
|
||||
Size: mediaContent.Size(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
for _, mediaPeerLink := range a.AllMediaPeerLinks() {
|
||||
if _, found := duplicates[mediaPeerLink.URL]; !found {
|
||||
duplicates[mediaPeerLink.URL] = true
|
||||
enclosures = append(enclosures, &model.Enclosure{
|
||||
URL: mediaPeerLink.URL,
|
||||
MimeType: mediaPeerLink.MimeType(),
|
||||
Size: mediaPeerLink.Size(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return enclosures
|
||||
}
|
||||
|
||||
func (r *atom10Entry) entryCategories() []string {
|
||||
categoryList := make([]string, 0)
|
||||
|
||||
for _, atomCategory := range r.Categories {
|
||||
if strings.TrimSpace(atomCategory.Label) != "" {
|
||||
categoryList = append(categoryList, strings.TrimSpace(atomCategory.Label))
|
||||
} else {
|
||||
categoryList = append(categoryList, strings.TrimSpace(atomCategory.Term))
|
||||
}
|
||||
}
|
||||
|
||||
return categoryList
|
||||
}
|
||||
|
||||
// See https://tools.ietf.org/html/rfc4685#section-4
|
||||
// If the type attribute of the atom:link is omitted, its value is assumed to be "application/atom+xml".
|
||||
// We accept only HTML or XHTML documents for now since the intention is to have the same behavior as RSS.
|
||||
func (a *atom10Entry) entryCommentsURL() string {
|
||||
commentsURL := a.Links.firstLinkWithRelationAndType("replies", "text/html", "application/xhtml+xml")
|
||||
if urllib.IsAbsoluteURL(commentsURL) {
|
||||
return commentsURL
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type atom10Text struct {
|
||||
Type string `xml:"type,attr"`
|
||||
CharData string `xml:",chardata"`
|
||||
InnerXML string `xml:",innerxml"`
|
||||
XHTMLRootElement atomXHTMLRootElement `xml:"http://www.w3.org/1999/xhtml div"`
|
||||
}
|
||||
|
||||
type atom10Category struct {
|
||||
Term string `xml:"term,attr"`
|
||||
Label string `xml:"label,attr"`
|
||||
}
|
||||
|
||||
// A Text construct contains human-readable text, usually in small
|
||||
// quantities. The content of Text constructs is Language-Sensitive.
|
||||
// Specs: https://datatracker.ietf.org/doc/html/rfc4287#section-3.1
|
||||
// Text: https://datatracker.ietf.org/doc/html/rfc4287#section-3.1.1.1
|
||||
// HTML: https://datatracker.ietf.org/doc/html/rfc4287#section-3.1.1.2
|
||||
// XHTML: https://datatracker.ietf.org/doc/html/rfc4287#section-3.1.1.3
|
||||
func (a *atom10Text) String() string {
|
||||
type Atom10Text struct {
|
||||
Type string `xml:"type,attr"`
|
||||
CharData string `xml:",chardata"`
|
||||
InnerXML string `xml:",innerxml"`
|
||||
XHTMLRootElement AtomXHTMLRootElement `xml:"http://www.w3.org/1999/xhtml div"`
|
||||
}
|
||||
|
||||
func (a *Atom10Text) Body() string {
|
||||
var content string
|
||||
switch {
|
||||
case a.Type == "", a.Type == "text", a.Type == "text/plain":
|
||||
if strings.HasPrefix(strings.TrimSpace(a.InnerXML), `<![CDATA[`) {
|
||||
content = html.EscapeString(a.CharData)
|
||||
} else {
|
||||
content = a.InnerXML
|
||||
}
|
||||
case a.Type == "xhtml":
|
||||
var root = a.XHTMLRootElement
|
||||
if root.XMLName.Local == "div" {
|
||||
content = root.InnerXML
|
||||
} else {
|
||||
content = a.InnerXML
|
||||
}
|
||||
default:
|
||||
|
||||
if strings.EqualFold(a.Type, "xhtml") {
|
||||
content = a.xhtmlContent()
|
||||
} else {
|
||||
content = a.CharData
|
||||
}
|
||||
|
||||
return strings.TrimSpace(content)
|
||||
}
|
||||
|
||||
type atomXHTMLRootElement struct {
|
||||
func (a *Atom10Text) Title() string {
|
||||
var content string
|
||||
|
||||
switch {
|
||||
case strings.EqualFold(a.Type, "xhtml"):
|
||||
content = a.xhtmlContent()
|
||||
case strings.Contains(a.InnerXML, "<![CDATA["):
|
||||
content = html.UnescapeString(a.CharData)
|
||||
default:
|
||||
content = a.CharData
|
||||
}
|
||||
|
||||
content = sanitizer.StripTags(content)
|
||||
return strings.TrimSpace(content)
|
||||
}
|
||||
|
||||
func (a *Atom10Text) xhtmlContent() string {
|
||||
if a.XHTMLRootElement.XMLName.Local == "div" {
|
||||
return a.XHTMLRootElement.InnerXML
|
||||
}
|
||||
return a.InnerXML
|
||||
}
|
||||
|
||||
type AtomXHTMLRootElement struct {
|
||||
XMLName xml.Name `xml:"div"`
|
||||
InnerXML string `xml:",innerxml"`
|
||||
}
|
||||
|
|
|
@ -0,0 +1,254 @@
|
|||
// SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
package atom // import "miniflux.app/v2/internal/reader/atom"
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
"slices"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"miniflux.app/v2/internal/crypto"
|
||||
"miniflux.app/v2/internal/model"
|
||||
"miniflux.app/v2/internal/reader/date"
|
||||
"miniflux.app/v2/internal/reader/sanitizer"
|
||||
"miniflux.app/v2/internal/urllib"
|
||||
)
|
||||
|
||||
type Atom10Adapter struct {
|
||||
atomFeed *Atom10Feed
|
||||
}
|
||||
|
||||
func NewAtom10Adapter(atomFeed *Atom10Feed) *Atom10Adapter {
|
||||
return &Atom10Adapter{atomFeed}
|
||||
}
|
||||
|
||||
func (a *Atom10Adapter) BuildFeed(baseURL string) *model.Feed {
|
||||
feed := new(model.Feed)
|
||||
|
||||
// Populate the feed URL.
|
||||
feedURL := a.atomFeed.Links.firstLinkWithRelation("self")
|
||||
if feedURL != "" {
|
||||
if absoluteFeedURL, err := urllib.AbsoluteURL(baseURL, feedURL); err == nil {
|
||||
feed.FeedURL = absoluteFeedURL
|
||||
}
|
||||
} else {
|
||||
feed.FeedURL = baseURL
|
||||
}
|
||||
|
||||
// Populate the site URL.
|
||||
siteURL := a.atomFeed.Links.OriginalLink()
|
||||
if siteURL != "" {
|
||||
if absoluteSiteURL, err := urllib.AbsoluteURL(baseURL, siteURL); err == nil {
|
||||
feed.SiteURL = absoluteSiteURL
|
||||
}
|
||||
} else {
|
||||
feed.SiteURL = baseURL
|
||||
}
|
||||
|
||||
// Populate the feed title.
|
||||
feed.Title = a.atomFeed.Title.Body()
|
||||
if feed.Title == "" {
|
||||
feed.Title = feed.SiteURL
|
||||
}
|
||||
|
||||
// Populate the feed icon.
|
||||
if a.atomFeed.Icon != "" {
|
||||
if absoluteIconURL, err := urllib.AbsoluteURL(feed.SiteURL, a.atomFeed.Icon); err == nil {
|
||||
feed.IconURL = absoluteIconURL
|
||||
}
|
||||
} else if a.atomFeed.Logo != "" {
|
||||
if absoluteLogoURL, err := urllib.AbsoluteURL(feed.SiteURL, a.atomFeed.Logo); err == nil {
|
||||
feed.IconURL = absoluteLogoURL
|
||||
}
|
||||
}
|
||||
feed.Entries = a.populateEntries(feed.SiteURL)
|
||||
return feed
|
||||
}
|
||||
|
||||
func (a *Atom10Adapter) populateEntries(siteURL string) model.Entries {
|
||||
entries := make(model.Entries, 0, len(a.atomFeed.Entries))
|
||||
|
||||
for _, atomEntry := range a.atomFeed.Entries {
|
||||
entry := model.NewEntry()
|
||||
|
||||
// Populate the entry URL.
|
||||
entry.URL = atomEntry.Links.OriginalLink()
|
||||
if entry.URL != "" {
|
||||
if absoluteEntryURL, err := urllib.AbsoluteURL(siteURL, entry.URL); err == nil {
|
||||
entry.URL = absoluteEntryURL
|
||||
}
|
||||
}
|
||||
|
||||
// Populate the entry content.
|
||||
entry.Content = atomEntry.Content.Body()
|
||||
if entry.Content == "" {
|
||||
entry.Content = atomEntry.Summary.Body()
|
||||
if entry.Content == "" {
|
||||
entry.Content = atomEntry.FirstMediaDescription()
|
||||
}
|
||||
}
|
||||
|
||||
// Populate the entry title.
|
||||
entry.Title = atomEntry.Title.Title()
|
||||
if entry.Title == "" {
|
||||
entry.Title = sanitizer.TruncateHTML(entry.Content, 100)
|
||||
if entry.Title == "" {
|
||||
entry.Title = entry.URL
|
||||
}
|
||||
}
|
||||
|
||||
// Populate the entry author.
|
||||
authors := atomEntry.Authors.PersonNames()
|
||||
if len(authors) == 0 {
|
||||
authors = a.atomFeed.Authors.PersonNames()
|
||||
}
|
||||
sort.Strings(authors)
|
||||
authors = slices.Compact(authors)
|
||||
entry.Author = strings.Join(authors, ", ")
|
||||
|
||||
// Populate the entry date.
|
||||
for _, value := range []string{atomEntry.Published, atomEntry.Updated} {
|
||||
if value != "" {
|
||||
if parsedDate, err := date.Parse(value); err != nil {
|
||||
slog.Debug("Unable to parse date from Atom 1.0 feed",
|
||||
slog.String("date", value),
|
||||
slog.String("url", entry.URL),
|
||||
slog.Any("error", err),
|
||||
)
|
||||
} else {
|
||||
entry.Date = parsedDate
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if entry.Date.IsZero() {
|
||||
entry.Date = time.Now()
|
||||
}
|
||||
|
||||
// Populate categories.
|
||||
categories := atomEntry.Categories.CategoryNames()
|
||||
if len(categories) == 0 {
|
||||
categories = a.atomFeed.Categories.CategoryNames()
|
||||
}
|
||||
sort.Strings(categories)
|
||||
entry.Tags = slices.Compact(categories)
|
||||
|
||||
// Populate the commentsURL if defined.
|
||||
// See https://tools.ietf.org/html/rfc4685#section-4
|
||||
// If the type attribute of the atom:link is omitted, its value is assumed to be "application/atom+xml".
|
||||
// We accept only HTML or XHTML documents for now since the intention is to have the same behavior as RSS.
|
||||
commentsURL := atomEntry.Links.firstLinkWithRelationAndType("replies", "text/html", "application/xhtml+xml")
|
||||
if urllib.IsAbsoluteURL(commentsURL) {
|
||||
entry.CommentsURL = commentsURL
|
||||
}
|
||||
|
||||
// Generate the entry hash.
|
||||
for _, value := range []string{atomEntry.ID, atomEntry.Links.OriginalLink()} {
|
||||
if value != "" {
|
||||
entry.Hash = crypto.Hash(value)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Populate the entry enclosures.
|
||||
uniqueEnclosuresMap := make(map[string]bool)
|
||||
|
||||
for _, mediaThumbnail := range atomEntry.AllMediaThumbnails() {
|
||||
mediaURL := strings.TrimSpace(mediaThumbnail.URL)
|
||||
if mediaURL == "" {
|
||||
continue
|
||||
}
|
||||
if _, found := uniqueEnclosuresMap[mediaURL]; !found {
|
||||
if mediaAbsoluteURL, err := urllib.AbsoluteURL(siteURL, mediaURL); err != nil {
|
||||
slog.Debug("Unable to build absolute URL for media thumbnail",
|
||||
slog.String("url", mediaThumbnail.URL),
|
||||
slog.String("site_url", siteURL),
|
||||
slog.Any("error", err),
|
||||
)
|
||||
} else {
|
||||
uniqueEnclosuresMap[mediaAbsoluteURL] = true
|
||||
entry.Enclosures = append(entry.Enclosures, &model.Enclosure{
|
||||
URL: mediaAbsoluteURL,
|
||||
MimeType: mediaThumbnail.MimeType(),
|
||||
Size: mediaThumbnail.Size(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, link := range atomEntry.Links.findAllLinksWithRelation("enclosure") {
|
||||
absoluteEnclosureURL, err := urllib.AbsoluteURL(siteURL, link.Href)
|
||||
if err != nil {
|
||||
slog.Debug("Unable to resolve absolute URL for enclosure",
|
||||
slog.String("enclosure_url", link.Href),
|
||||
slog.String("entry_url", entry.URL),
|
||||
slog.Any("error", err),
|
||||
)
|
||||
} else {
|
||||
if _, found := uniqueEnclosuresMap[absoluteEnclosureURL]; !found {
|
||||
uniqueEnclosuresMap[absoluteEnclosureURL] = true
|
||||
length, _ := strconv.ParseInt(link.Length, 10, 0)
|
||||
entry.Enclosures = append(entry.Enclosures, &model.Enclosure{
|
||||
URL: absoluteEnclosureURL,
|
||||
MimeType: link.Type,
|
||||
Size: length,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, mediaContent := range atomEntry.AllMediaContents() {
|
||||
mediaURL := strings.TrimSpace(mediaContent.URL)
|
||||
if mediaURL == "" {
|
||||
continue
|
||||
}
|
||||
if mediaAbsoluteURL, err := urllib.AbsoluteURL(siteURL, mediaURL); err != nil {
|
||||
slog.Debug("Unable to build absolute URL for media content",
|
||||
slog.String("url", mediaContent.URL),
|
||||
slog.String("site_url", siteURL),
|
||||
slog.Any("error", err),
|
||||
)
|
||||
} else {
|
||||
if _, found := uniqueEnclosuresMap[mediaAbsoluteURL]; !found {
|
||||
uniqueEnclosuresMap[mediaAbsoluteURL] = true
|
||||
entry.Enclosures = append(entry.Enclosures, &model.Enclosure{
|
||||
URL: mediaAbsoluteURL,
|
||||
MimeType: mediaContent.MimeType(),
|
||||
Size: mediaContent.Size(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, mediaPeerLink := range atomEntry.AllMediaPeerLinks() {
|
||||
mediaURL := strings.TrimSpace(mediaPeerLink.URL)
|
||||
if mediaURL == "" {
|
||||
continue
|
||||
}
|
||||
if mediaAbsoluteURL, err := urllib.AbsoluteURL(siteURL, mediaURL); err != nil {
|
||||
slog.Debug("Unable to build absolute URL for media peer link",
|
||||
slog.String("url", mediaPeerLink.URL),
|
||||
slog.String("site_url", siteURL),
|
||||
slog.Any("error", err),
|
||||
)
|
||||
} else {
|
||||
if _, found := uniqueEnclosuresMap[mediaAbsoluteURL]; !found {
|
||||
uniqueEnclosuresMap[mediaAbsoluteURL] = true
|
||||
entry.Enclosures = append(entry.Enclosures, &model.Enclosure{
|
||||
URL: mediaAbsoluteURL,
|
||||
MimeType: mediaPeerLink.MimeType(),
|
||||
Size: mediaPeerLink.Size(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
entries = append(entries, entry)
|
||||
}
|
||||
|
||||
return entries
|
||||
}
|
|
@ -12,7 +12,6 @@ import (
|
|||
func TestParseAtomSample(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<feed xmlns="http://www.w3.org/2005/Atom">
|
||||
|
||||
<title>Example Feed</title>
|
||||
<link href="http://example.org/"/>
|
||||
<updated>2003-12-13T18:30:02Z</updated>
|
||||
|
@ -20,7 +19,6 @@ func TestParseAtomSample(t *testing.T) {
|
|||
<name>John Doe</name>
|
||||
</author>
|
||||
<id>urn:uuid:60a76c80-d399-11d9-b93C-0003939e0af6</id>
|
||||
|
||||
<entry>
|
||||
<title>Atom-Powered Robots Run Amok</title>
|
||||
<link href="http://example.org/2003/12/13/atom03"/>
|
||||
|
@ -28,7 +26,6 @@ func TestParseAtomSample(t *testing.T) {
|
|||
<updated>2003-12-13T18:30:02Z</updated>
|
||||
<summary>Some text.</summary>
|
||||
</entry>
|
||||
|
||||
</feed>`
|
||||
|
||||
feed, err := Parse("http://example.org/feed.xml", bytes.NewReader([]byte(data)), "10")
|
||||
|
@ -220,12 +217,31 @@ func TestParseFeedURL(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestParseFeedWithRelativeURL(t *testing.T) {
|
||||
func TestParseFeedWithRelativeFeedURL(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<feed xmlns="http://www.w3.org/2005/Atom">
|
||||
<title>Example Feed</title>
|
||||
<link rel="alternate" type="text/html" href="https://example.org/"/>
|
||||
<link rel="self" type="application/atom+xml" href="/feed"/>
|
||||
<updated>2003-12-13T18:30:02Z</updated>
|
||||
</feed>`
|
||||
|
||||
feed, err := Parse("https://example.org/", bytes.NewReader([]byte(data)), "10")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feed.FeedURL != "https://example.org/feed" {
|
||||
t.Errorf("Incorrect feed URL, got: %s", feed.FeedURL)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseFeedWithRelativeSiteURL(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<feed xmlns="http://www.w3.org/2005/Atom">
|
||||
<title>Example Feed</title>
|
||||
<link href="/blog/atom.xml" rel="self" type="application/atom+xml"/>
|
||||
<link href="/blog"/>
|
||||
<link href="/blog "/>
|
||||
|
||||
<entry>
|
||||
<title>Test</title>
|
||||
|
@ -244,15 +260,47 @@ func TestParseFeedWithRelativeURL(t *testing.T) {
|
|||
}
|
||||
|
||||
if feed.FeedURL != "https://example.org/blog/atom.xml" {
|
||||
t.Errorf("Incorrect feed URL, got: %s", feed.FeedURL)
|
||||
t.Errorf("Incorrect feed URL, got: %q", feed.FeedURL)
|
||||
}
|
||||
|
||||
if feed.SiteURL != "https://example.org/blog" {
|
||||
t.Errorf("Incorrect site URL, got: %s", feed.SiteURL)
|
||||
t.Errorf("Incorrect site URL, got: %q", feed.SiteURL)
|
||||
}
|
||||
|
||||
if feed.Entries[0].URL != "https://example.org/blog/article.html" {
|
||||
t.Errorf("Incorrect entry URL, got: %s", feed.Entries[0].URL)
|
||||
t.Errorf("Incorrect entry URL, got: %q", feed.Entries[0].URL)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseFeedSiteURLWithTrailingSpace(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<feed xmlns="http://www.w3.org/2005/Atom">
|
||||
<link href="http://example.org "/>
|
||||
</feed>`
|
||||
|
||||
feed, err := Parse("https://example.org/", bytes.NewReader([]byte(data)), "10")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feed.SiteURL != "http://example.org" {
|
||||
t.Errorf("Incorrect site URL, got: %q", feed.SiteURL)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseFeedWithFeedURLWithTrailingSpace(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<feed xmlns="http://www.w3.org/2005/Atom">
|
||||
<link href="/blog/atom.xml " rel="self" type="application/atom+xml"/>
|
||||
</feed>`
|
||||
|
||||
feed, err := Parse("https://example.org/", bytes.NewReader([]byte(data)), "10")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feed.FeedURL != "https://example.org/blog/atom.xml" {
|
||||
t.Errorf("Incorrect site URL, got: %q", feed.FeedURL)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -420,7 +468,7 @@ func TestParseEntryWithPlainTextTitle(t *testing.T) {
|
|||
expected := `AT&T bought by SBC!`
|
||||
for i := range 2 {
|
||||
if feed.Entries[i].Title != expected {
|
||||
t.Errorf("Incorrect title for entry #%d, got: %q", i, feed.Entries[i].Title)
|
||||
t.Errorf("Incorrect title for entry #%d, got: %q instead of %q", i, feed.Entries[i].Title, expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -430,33 +478,20 @@ func TestParseEntryWithHTMLTitle(t *testing.T) {
|
|||
<feed xmlns="http://www.w3.org/2005/Atom">
|
||||
<title>Example Feed</title>
|
||||
<link href="http://example.org/"/>
|
||||
|
||||
<entry>
|
||||
<title type="html"><code>Test</code> Test</title>
|
||||
<link href="http://example.org/2003/12/13/atom03"/>
|
||||
<id>urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a</id>
|
||||
<updated>2003-12-13T18:30:02Z</updated>
|
||||
<summary>Some text.</summary>
|
||||
<title type="html"><code>Code</code> Test</title>
|
||||
<link href="http://example.org/z"/>
|
||||
</entry>
|
||||
|
||||
<entry>
|
||||
<title type="html"><![CDATA[Test “Test”]]></title>
|
||||
<link href="http://example.org/2003/12/13/atom03"/>
|
||||
<id>urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a</id>
|
||||
<updated>2003-12-13T18:30:02Z</updated>
|
||||
<summary>Some text.</summary>
|
||||
<title type="html"><![CDATA[Test with “unicode quote”]]></title>
|
||||
<link href="http://example.org/b"/>
|
||||
</entry>
|
||||
|
||||
<entry>
|
||||
<title>
|
||||
<![CDATA[Entry title with space around CDATA]]>
|
||||
</title>
|
||||
<link href="http://example.org/2003/12/13/atom03"/>
|
||||
<id>urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a</id>
|
||||
<updated>2003-12-13T18:30:02Z</updated>
|
||||
<summary>Some text.</summary>
|
||||
<link href="http://example.org/c"/>
|
||||
</entry>
|
||||
|
||||
</feed>`
|
||||
|
||||
feed, err := Parse("https://example.org/", bytes.NewReader([]byte(data)), "10")
|
||||
|
@ -464,11 +499,11 @@ func TestParseEntryWithHTMLTitle(t *testing.T) {
|
|||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feed.Entries[0].Title != "<code>Test</code> Test" {
|
||||
if feed.Entries[0].Title != "Code Test" {
|
||||
t.Errorf("Incorrect entry title, got: %q", feed.Entries[0].Title)
|
||||
}
|
||||
|
||||
if feed.Entries[1].Title != "Test “Test”" {
|
||||
if feed.Entries[1].Title != "Test with “unicode quote”" {
|
||||
t.Errorf("Incorrect entry title, got: %q", feed.Entries[1].Title)
|
||||
}
|
||||
|
||||
|
@ -502,8 +537,8 @@ func TestParseEntryWithXHTMLTitle(t *testing.T) {
|
|||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feed.Entries[0].Title != `This is <b>XHTML</b> content.` {
|
||||
t.Errorf("Incorrect entry title, got: %q", feed.Entries[1].Title)
|
||||
if feed.Entries[0].Title != `This is XHTML content.` {
|
||||
t.Errorf("Incorrect entry title, got: %q", feed.Entries[0].Title)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -608,7 +643,7 @@ func TestParseEntryWithDoubleEncodedEntitiesTitle(t *testing.T) {
|
|||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feed.Entries[0].Title != `'AT&T'` {
|
||||
if feed.Entries[0].Title != `'AT&T'` {
|
||||
t.Errorf("Incorrect entry title, got: %q", feed.Entries[0].Title)
|
||||
}
|
||||
}
|
||||
|
@ -644,31 +679,21 @@ func TestParseEntryWithHTMLSummary(t *testing.T) {
|
|||
<feed xmlns="http://www.w3.org/2005/Atom">
|
||||
<title>Example Feed</title>
|
||||
<link href="http://example.org/"/>
|
||||
|
||||
<entry>
|
||||
<title type="html">Example</title>
|
||||
<title type="html">Example 1</title>
|
||||
<link href="http://example.org/1"/>
|
||||
<id>urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a</id>
|
||||
<updated>2003-12-13T18:30:02Z</updated>
|
||||
<summary type="html"><code>std::unique_ptr&lt;S&gt;</code></summary>
|
||||
<summary type="html"><code>std::unique_ptr&lt;S&gt; myvar;</code></summary>
|
||||
</entry>
|
||||
|
||||
<entry>
|
||||
<title type="html">Example</title>
|
||||
<title type="html">Example 2</title>
|
||||
<link href="http://example.org/2"/>
|
||||
<id>urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a</id>
|
||||
<updated>2003-12-13T18:30:02Z</updated>
|
||||
<summary type="text/html"><code>std::unique_ptr&lt;S&gt;</code></summary>
|
||||
<summary type="text/html"><code>std::unique_ptr&lt;S&gt; myvar;</code></summary>
|
||||
</entry>
|
||||
|
||||
<entry>
|
||||
<title type="html">Example</title>
|
||||
<title type="html">Example 3</title>
|
||||
<link href="http://example.org/3"/>
|
||||
<id>urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a</id>
|
||||
<updated>2003-12-13T18:30:02Z</updated>
|
||||
<summary type="html"><![CDATA[<code>std::unique_ptr<S></code>]]></summary>
|
||||
<summary type="html"><![CDATA[<code>std::unique_ptr<S> myvar;</code>]]></summary>
|
||||
</entry>
|
||||
|
||||
</feed>`
|
||||
|
||||
feed, err := Parse("https://example.org/", bytes.NewReader([]byte(data)), "10")
|
||||
|
@ -676,7 +701,11 @@ func TestParseEntryWithHTMLSummary(t *testing.T) {
|
|||
t.Fatal(err)
|
||||
}
|
||||
|
||||
expected := `<code>std::unique_ptr<S></code>`
|
||||
if len(feed.Entries) != 3 {
|
||||
t.Fatalf("Incorrect number of entries, got: %d", len(feed.Entries))
|
||||
}
|
||||
|
||||
expected := `<code>std::unique_ptr<S> myvar;</code>`
|
||||
for i := range 3 {
|
||||
if feed.Entries[i].Content != expected {
|
||||
t.Errorf("Incorrect content for entry #%d, got: %q", i, feed.Entries[i].Content)
|
||||
|
@ -728,7 +757,7 @@ func TestParseEntryWithTextSummary(t *testing.T) {
|
|||
t.Fatal(err)
|
||||
}
|
||||
|
||||
expected := `AT&T <S>`
|
||||
expected := `AT&T <S>`
|
||||
for i := range 4 {
|
||||
if feed.Entries[i].Content != expected {
|
||||
t.Errorf("Incorrect content for entry #%d, got: %q", i, feed.Entries[i].Content)
|
||||
|
@ -747,7 +776,7 @@ func TestParseEntryWithTextContent(t *testing.T) {
|
|||
<link href="http://example.org/a"/>
|
||||
<id>urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a</id>
|
||||
<updated>2003-12-13T18:30:02Z</updated>
|
||||
<content>AT&T <S></content>
|
||||
<content>AT&T <strong>Strong Element</strong></content>
|
||||
</entry>
|
||||
|
||||
<entry>
|
||||
|
@ -755,7 +784,7 @@ func TestParseEntryWithTextContent(t *testing.T) {
|
|||
<link href="http://example.org/b"/>
|
||||
<id>urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a</id>
|
||||
<updated>2003-12-13T18:30:02Z</updated>
|
||||
<content type="text">AT&T <S></content>
|
||||
<content type="text">AT&T <strong>Strong Element</strong></content>
|
||||
</entry>
|
||||
|
||||
<entry>
|
||||
|
@ -763,7 +792,7 @@ func TestParseEntryWithTextContent(t *testing.T) {
|
|||
<link href="http://example.org/c"/>
|
||||
<id>urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a</id>
|
||||
<updated>2003-12-13T18:30:02Z</updated>
|
||||
<content type="text/plain">AT&T <S></content>
|
||||
<content type="text/plain">AT&T <strong>Strong Element</strong></content>
|
||||
</entry>
|
||||
|
||||
<entry>
|
||||
|
@ -771,7 +800,7 @@ func TestParseEntryWithTextContent(t *testing.T) {
|
|||
<link href="http://example.org/d"/>
|
||||
<id>urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a</id>
|
||||
<updated>2003-12-13T18:30:02Z</updated>
|
||||
<content><![CDATA[AT&T <S>]]></content>
|
||||
<content><![CDATA[AT&T <strong>Strong Element</strong>]]></content>
|
||||
</entry>
|
||||
|
||||
</feed>`
|
||||
|
@ -781,10 +810,10 @@ func TestParseEntryWithTextContent(t *testing.T) {
|
|||
t.Fatal(err)
|
||||
}
|
||||
|
||||
expected := `AT&T <S>`
|
||||
expected := `AT&T <strong>Strong Element</strong>`
|
||||
for i := range 4 {
|
||||
if feed.Entries[i].Content != expected {
|
||||
t.Errorf("Incorrect content for entry #%d, got: %q", i, feed.Entries[i].Content)
|
||||
t.Errorf("Incorrect content for entry #%d, got: %q instead of %q", i, feed.Entries[i].Content, expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -925,7 +954,6 @@ func TestParseEntryWithMultipleAuthors(t *testing.T) {
|
|||
<feed xmlns="http://www.w3.org/2005/Atom">
|
||||
<title>Example Feed</title>
|
||||
<link href="http://example.org/"/>
|
||||
|
||||
<entry>
|
||||
<link href="http://example.org/2003/12/13/atom03"/>
|
||||
<id>urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a</id>
|
||||
|
@ -938,7 +966,6 @@ func TestParseEntryWithMultipleAuthors(t *testing.T) {
|
|||
<name>Bob</name>
|
||||
</author>
|
||||
</entry>
|
||||
|
||||
</feed>`
|
||||
|
||||
feed, err := Parse("https://example.org/", bytes.NewReader([]byte(data)), "10")
|
||||
|
@ -951,7 +978,7 @@ func TestParseEntryWithMultipleAuthors(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestParseEntryWithoutAuthor(t *testing.T) {
|
||||
func TestParseFeedWithEntryWithoutAuthor(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<feed xmlns="http://www.w3.org/2005/Atom">
|
||||
<title>Example Feed</title>
|
||||
|
@ -959,14 +986,12 @@ func TestParseEntryWithoutAuthor(t *testing.T) {
|
|||
<author>
|
||||
<name>John Doe</name>
|
||||
</author>
|
||||
|
||||
<entry>
|
||||
<link href="http://example.org/2003/12/13/atom03"/>
|
||||
<id>urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a</id>
|
||||
<updated>2003-12-13T18:30:02Z</updated>
|
||||
<summary>Some text.</summary>
|
||||
</entry>
|
||||
|
||||
</feed>`
|
||||
|
||||
feed, err := Parse("https://example.org/", bytes.NewReader([]byte(data)), "10")
|
||||
|
@ -990,14 +1015,15 @@ func TestParseFeedWithMultipleAuthors(t *testing.T) {
|
|||
<author>
|
||||
<name>Bob</name>
|
||||
</author>
|
||||
|
||||
<author>
|
||||
<name>Bob</name>
|
||||
</author>
|
||||
<entry>
|
||||
<link href="http://example.org/2003/12/13/atom03"/>
|
||||
<id>urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a</id>
|
||||
<updated>2003-12-13T18:30:02Z</updated>
|
||||
<summary>Some text.</summary>
|
||||
</entry>
|
||||
|
||||
</feed>`
|
||||
|
||||
feed, err := Parse("https://example.org/", bytes.NewReader([]byte(data)), "10")
|
||||
|
@ -1015,14 +1041,12 @@ func TestParseFeedWithoutAuthor(t *testing.T) {
|
|||
<feed xmlns="http://www.w3.org/2005/Atom">
|
||||
<title>Example Feed</title>
|
||||
<link href="http://example.org/"/>
|
||||
|
||||
<entry>
|
||||
<link href="http://example.org/2003/12/13/atom03"/>
|
||||
<id>urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a</id>
|
||||
<updated>2003-12-13T18:30:02Z</updated>
|
||||
<summary>Some text.</summary>
|
||||
</entry>
|
||||
|
||||
</feed>`
|
||||
|
||||
feed, err := Parse("https://example.org/", bytes.NewReader([]byte(data)), "10")
|
||||
|
@ -1081,7 +1105,7 @@ func TestParseEntryWithEnclosures(t *testing.T) {
|
|||
}
|
||||
|
||||
if len(feed.Entries) != 1 {
|
||||
t.Errorf("Incorrect number of entries, got: %d", len(feed.Entries))
|
||||
t.Fatalf("Incorrect number of entries, got: %d", len(feed.Entries))
|
||||
}
|
||||
|
||||
if feed.Entries[0].URL != "http://www.example.org/entries/1" {
|
||||
|
@ -1116,6 +1140,89 @@ func TestParseEntryWithEnclosures(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestParseEntryWithRelativeEnclosureURL(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<feed xmlns="http://www.w3.org/2005/Atom">
|
||||
<id>https://www.example.org/myfeed</id>
|
||||
<title>My Podcast Feed</title>
|
||||
<link href="https://example.org" />
|
||||
<link rel="self" href="https://example.org/myfeed" />
|
||||
<entry>
|
||||
<id>https://www.example.org/entries/1</id>
|
||||
<title>Atom 1.0</title>
|
||||
<updated>2005-07-15T12:00:00Z</updated>
|
||||
<link href="https://www.example.org/entries/1" />
|
||||
<link rel="enclosure"
|
||||
type="audio/mpeg"
|
||||
title="MP3"
|
||||
href=" /myaudiofile.mp3 "
|
||||
length="1234" />
|
||||
</content>
|
||||
</entry>
|
||||
</feed>`
|
||||
|
||||
feed, err := Parse("https://example.org/", bytes.NewReader([]byte(data)), "10")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if len(feed.Entries) != 1 {
|
||||
t.Fatalf("Incorrect number of entries, got: %d", len(feed.Entries))
|
||||
}
|
||||
|
||||
if len(feed.Entries[0].Enclosures) != 1 {
|
||||
t.Fatalf("Incorrect number of enclosures, got: %d", len(feed.Entries[0].Enclosures))
|
||||
}
|
||||
|
||||
if feed.Entries[0].Enclosures[0].URL != "https://example.org/myaudiofile.mp3" {
|
||||
t.Errorf("Incorrect enclosure URL, got: %q", feed.Entries[0].Enclosures[0].URL)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseEntryWithDuplicateEnclosureURL(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<feed xmlns="http://www.w3.org/2005/Atom">
|
||||
<id>http://www.example.org/myfeed</id>
|
||||
<title>My Podcast Feed</title>
|
||||
<link href="http://example.org" />
|
||||
<link rel="self" href="http://example.org/myfeed" />
|
||||
<entry>
|
||||
<id>http://www.example.org/entries/1</id>
|
||||
<title>Atom 1.0</title>
|
||||
<updated>2005-07-15T12:00:00Z</updated>
|
||||
<link href="http://www.example.org/entries/1" />
|
||||
<link rel="enclosure"
|
||||
type="audio/mpeg"
|
||||
title="MP3"
|
||||
href="http://www.example.org/myaudiofile.mp3"
|
||||
length="1234" />
|
||||
<link rel="enclosure"
|
||||
type="audio/mpeg"
|
||||
title="MP3"
|
||||
href=" http://www.example.org/myaudiofile.mp3 "
|
||||
length="1234" />
|
||||
</content>
|
||||
</entry>
|
||||
</feed>`
|
||||
|
||||
feed, err := Parse("https://example.org/", bytes.NewReader([]byte(data)), "10")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if len(feed.Entries) != 1 {
|
||||
t.Fatalf("Incorrect number of entries, got: %d", len(feed.Entries))
|
||||
}
|
||||
|
||||
if len(feed.Entries[0].Enclosures) != 1 {
|
||||
t.Fatalf("Incorrect number of enclosures, got: %d", len(feed.Entries[0].Enclosures))
|
||||
}
|
||||
|
||||
if feed.Entries[0].Enclosures[0].URL != "http://www.example.org/myaudiofile.mp3" {
|
||||
t.Errorf("Incorrect enclosure URL, got: %q", feed.Entries[0].Enclosures[0].URL)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseEntryWithoutEnclosureURL(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<feed xmlns="http://www.w3.org/2005/Atom">
|
||||
|
@ -1310,20 +1417,25 @@ func TestParseWithInvalidCharacterEntity(t *testing.T) {
|
|||
func TestParseMediaGroup(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<feed xmlns="http://www.w3.org/2005/Atom" xmlns:media="http://search.yahoo.com/mrss/">
|
||||
<id>http://www.example.org/myfeed</id>
|
||||
<id>https://www.example.org/myfeed</id>
|
||||
<title>My Video Feed</title>
|
||||
<updated>2005-07-15T12:00:00Z</updated>
|
||||
<link href="http://example.org" />
|
||||
<link rel="self" href="http://example.org/myfeed" />
|
||||
<link href="https://example.org" />
|
||||
<link rel="self" href="https://example.org/myfeed" />
|
||||
<entry>
|
||||
<id>http://www.example.org/entries/1</id>
|
||||
<id>https://www.example.org/entries/1</id>
|
||||
<title>Some Video</title>
|
||||
<updated>2005-07-15T12:00:00Z</updated>
|
||||
<link href="http://www.example.org/entries/1" />
|
||||
<link href="https://www.example.org/entries/1" />
|
||||
<media:group>
|
||||
<media:title>Another title</media:title>
|
||||
<media:content url="https://www.youtube.com/v/abcd" type="application/x-shockwave-flash" width="640" height="390"/>
|
||||
<media:thumbnail url="https://example.org/thumbnail.jpg" width="480" height="360"/>
|
||||
<media:content url=" /v/efg " type="application/x-shockwave-flash" width="640" height="390"/>
|
||||
<media:content url=" " type="application/x-shockwave-flash" width="640" height="390"/>
|
||||
<media:thumbnail url="https://www.example.org/duplicate-thumbnail.jpg" width="480" height="360"/>
|
||||
<media:thumbnail url="https://www.example.org/duplicate-thumbnail.jpg" width="480" height="360"/>
|
||||
<media:thumbnail url=" /thumbnail2.jpg " width="480" height="360"/>
|
||||
<media:thumbnail url=" " width="480" height="360"/>
|
||||
<media:description>Some description
|
||||
A website: http://example.org/</media:description>
|
||||
</media:group>
|
||||
|
@ -1336,18 +1448,10 @@ A website: http://example.org/</media:description>
|
|||
}
|
||||
|
||||
if len(feed.Entries) != 1 {
|
||||
t.Errorf("Incorrect number of entries, got: %d", len(feed.Entries))
|
||||
t.Fatalf("Incorrect number of entries, got: %d", len(feed.Entries))
|
||||
}
|
||||
|
||||
if feed.Entries[0].URL != "http://www.example.org/entries/1" {
|
||||
t.Errorf("Incorrect entry URL, got: %s", feed.Entries[0].URL)
|
||||
}
|
||||
|
||||
if feed.Entries[0].Content != `Some description<br>A website: <a href="http://example.org/">http://example.org/</a>` {
|
||||
t.Errorf("Incorrect entry content, got: %q", feed.Entries[0].Content)
|
||||
}
|
||||
|
||||
if len(feed.Entries[0].Enclosures) != 2 {
|
||||
if len(feed.Entries[0].Enclosures) != 4 {
|
||||
t.Fatalf("Incorrect number of enclosures, got: %d", len(feed.Entries[0].Enclosures))
|
||||
}
|
||||
|
||||
|
@ -1356,8 +1460,10 @@ A website: http://example.org/</media:description>
|
|||
mimeType string
|
||||
size int64
|
||||
}{
|
||||
{"https://example.org/thumbnail.jpg", "image/*", 0},
|
||||
{"https://www.example.org/duplicate-thumbnail.jpg", "image/*", 0},
|
||||
{"https://example.org/thumbnail2.jpg", "image/*", 0},
|
||||
{"https://www.youtube.com/v/abcd", "application/x-shockwave-flash", 0},
|
||||
{"https://example.org/v/efg", "application/x-shockwave-flash", 0},
|
||||
}
|
||||
|
||||
for index, enclosure := range feed.Entries[0].Enclosures {
|
||||
|
@ -1378,19 +1484,26 @@ A website: http://example.org/</media:description>
|
|||
func TestParseMediaElements(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<feed xmlns="http://www.w3.org/2005/Atom" xmlns:media="http://search.yahoo.com/mrss/">
|
||||
<id>http://www.example.org/myfeed</id>
|
||||
<id>https://www.example.org/myfeed</id>
|
||||
<title>My Video Feed</title>
|
||||
<updated>2005-07-15T12:00:00Z</updated>
|
||||
<link href="http://example.org" />
|
||||
<link rel="self" href="http://example.org/myfeed" />
|
||||
<link href="https://example.org" />
|
||||
<link rel="self" href="https://example.org/myfeed" />
|
||||
<entry>
|
||||
<id>http://www.example.org/entries/1</id>
|
||||
<id>https://www.example.org/entries/1</id>
|
||||
<title>Some Video</title>
|
||||
<updated>2005-07-15T12:00:00Z</updated>
|
||||
<link href="http://www.example.org/entries/1" />
|
||||
<link href="https://www.example.org/entries/1" />
|
||||
<media:title>Another title</media:title>
|
||||
<media:content url="https://www.youtube.com/v/abcd" type="application/x-shockwave-flash" width="640" height="390"/>
|
||||
<media:thumbnail url="https://example.org/thumbnail.jpg" width="480" height="360"/>
|
||||
<media:content url=" /relative/media.mp4 " type="application/x-shockwave-flash" width="640" height="390"/>
|
||||
<media:content url=" " type="application/x-shockwave-flash" width="640" height="390"/>
|
||||
<media:thumbnail url="https://example.org/duplicated-thumbnail.jpg" width="480" height="360"/>
|
||||
<media:thumbnail url=" https://example.org/duplicated-thumbnail.jpg " width="480" height="360"/>
|
||||
<media:thumbnail url=" " width="480" height="360"/>
|
||||
<media:peerLink type="application/x-bittorrent" href=" http://www.example.org/sampleFile.torrent " />
|
||||
<media:peerLink type="application/x-bittorrent" href=" /sampleFile2.torrent" />
|
||||
<media:peerLink type="application/x-bittorrent" href=" " />
|
||||
<media:description>Some description
|
||||
A website: http://example.org/</media:description>
|
||||
</entry>
|
||||
|
@ -1402,18 +1515,10 @@ A website: http://example.org/</media:description>
|
|||
}
|
||||
|
||||
if len(feed.Entries) != 1 {
|
||||
t.Errorf("Incorrect number of entries, got: %d", len(feed.Entries))
|
||||
t.Fatalf("Incorrect number of entries, got: %d", len(feed.Entries))
|
||||
}
|
||||
|
||||
if feed.Entries[0].URL != "http://www.example.org/entries/1" {
|
||||
t.Errorf("Incorrect entry URL, got: %s", feed.Entries[0].URL)
|
||||
}
|
||||
|
||||
if feed.Entries[0].Content != `Some description<br>A website: <a href="http://example.org/">http://example.org/</a>` {
|
||||
t.Errorf("Incorrect entry content, got: %q", feed.Entries[0].Content)
|
||||
}
|
||||
|
||||
if len(feed.Entries[0].Enclosures) != 2 {
|
||||
if len(feed.Entries[0].Enclosures) != 5 {
|
||||
t.Fatalf("Incorrect number of enclosures, got: %d", len(feed.Entries[0].Enclosures))
|
||||
}
|
||||
|
||||
|
@ -1422,8 +1527,11 @@ A website: http://example.org/</media:description>
|
|||
mimeType string
|
||||
size int64
|
||||
}{
|
||||
{"https://example.org/thumbnail.jpg", "image/*", 0},
|
||||
{"https://example.org/duplicated-thumbnail.jpg", "image/*", 0},
|
||||
{"https://www.youtube.com/v/abcd", "application/x-shockwave-flash", 0},
|
||||
{"https://example.org/relative/media.mp4", "application/x-shockwave-flash", 0},
|
||||
{"http://www.example.org/sampleFile.torrent", "application/x-bittorrent", 0},
|
||||
{"https://example.org/sampleFile2.torrent", "application/x-bittorrent", 0},
|
||||
}
|
||||
|
||||
for index, enclosure := range feed.Entries[0].Enclosures {
|
||||
|
@ -1608,27 +1716,18 @@ func TestAbsoluteCommentsURL(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestParseFeedWithCategories(t *testing.T) {
|
||||
func TestParseItemWithCategories(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<feed xmlns="http://www.w3.org/2005/Atom">
|
||||
<title>Example Feed</title>
|
||||
<link href="http://example.org/"/>
|
||||
<author>
|
||||
<name>Alice</name>
|
||||
</author>
|
||||
<author>
|
||||
<name>Bob</name>
|
||||
</author>
|
||||
|
||||
<entry>
|
||||
<link href="http://example.org/2003/12/13/atom03"/>
|
||||
<id>urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a</id>
|
||||
<link href="http://www.example.org/entries/1" />
|
||||
<updated>2003-12-13T18:30:02Z</updated>
|
||||
<summary>Some text.</summary>
|
||||
<category term='Tech' />
|
||||
<category term='ZZZZ' />
|
||||
<category term='Technology' label='Science' />
|
||||
</entry>
|
||||
|
||||
</feed>`
|
||||
|
||||
feed, err := Parse("https://example.org/", bytes.NewReader([]byte(data)), "10")
|
||||
|
@ -1637,22 +1736,53 @@ func TestParseFeedWithCategories(t *testing.T) {
|
|||
}
|
||||
|
||||
if len(feed.Entries[0].Tags) != 2 {
|
||||
t.Errorf("Incorrect number of tags, got: %d", len(feed.Entries[0].Tags))
|
||||
t.Fatalf("Incorrect number of tags, got: %d", len(feed.Entries[0].Tags))
|
||||
}
|
||||
|
||||
expected := "Tech"
|
||||
expected := "Science"
|
||||
result := feed.Entries[0].Tags[0]
|
||||
if result != expected {
|
||||
t.Errorf("Incorrect entry category, got %q instead of %q", result, expected)
|
||||
}
|
||||
|
||||
expected = "Science"
|
||||
expected = "ZZZZ"
|
||||
result = feed.Entries[0].Tags[1]
|
||||
if result != expected {
|
||||
t.Errorf("Incorrect entry category, got %q instead of %q", result, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseFeedWithCategories(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<feed xmlns="http://www.w3.org/2005/Atom">
|
||||
<title>Example Feed</title>
|
||||
<link href="http://example.org/"/>
|
||||
<category term='Test' label='Some Label' />
|
||||
<category term='Test' label='Some Label' />
|
||||
<category term='Test' label='Some Label' />
|
||||
<entry>
|
||||
<link href="http://www.example.org/entries/1" />
|
||||
<updated>2003-12-13T18:30:02Z</updated>
|
||||
<summary>Some text.</summary>
|
||||
</entry>
|
||||
</feed>`
|
||||
|
||||
feed, err := Parse("https://example.org/", bytes.NewReader([]byte(data)), "10")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if len(feed.Entries[0].Tags) != 1 {
|
||||
t.Fatalf("Incorrect number of tags, got: %d", len(feed.Entries[0].Tags))
|
||||
}
|
||||
|
||||
expected := "Some Label"
|
||||
result := feed.Entries[0].Tags[0]
|
||||
if result != expected {
|
||||
t.Errorf("Incorrect entry category, got %q instead of %q", result, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseFeedWithIconURL(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<feed xmlns="http://www.w3.org/2005/Atom">
|
||||
|
|
|
@ -3,77 +3,91 @@
|
|||
|
||||
package atom // import "miniflux.app/v2/internal/reader/atom"
|
||||
|
||||
import "strings"
|
||||
import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
type atomPerson struct {
|
||||
Name string `xml:"name"`
|
||||
// Specs: https://datatracker.ietf.org/doc/html/rfc4287#section-3.2
|
||||
type AtomPerson struct {
|
||||
// The "atom:name" element's content conveys a human-readable name for the author.
|
||||
// It MAY be the name of a corporation or other entity no individual authors can be named.
|
||||
// Person constructs MUST contain exactly one "atom:name" element, whose content MUST be a string.
|
||||
Name string `xml:"name"`
|
||||
|
||||
// The "atom:email" element's content conveys an e-mail address associated with the Person construct.
|
||||
// Person constructs MAY contain an atom:email element, but MUST NOT contain more than one.
|
||||
// Its content MUST be an e-mail address [RFC2822].
|
||||
// Ordering of the element children of Person constructs MUST NOT be considered significant.
|
||||
Email string `xml:"email"`
|
||||
}
|
||||
|
||||
func (a *atomPerson) String() string {
|
||||
name := ""
|
||||
|
||||
switch {
|
||||
case a.Name != "":
|
||||
name = a.Name
|
||||
case a.Email != "":
|
||||
name = a.Email
|
||||
func (a *AtomPerson) PersonName() string {
|
||||
name := strings.TrimSpace(a.Name)
|
||||
if name != "" {
|
||||
return name
|
||||
}
|
||||
|
||||
return strings.TrimSpace(name)
|
||||
return strings.TrimSpace(a.Email)
|
||||
}
|
||||
|
||||
type atomAuthors []*atomPerson
|
||||
type AtomPersons []*AtomPerson
|
||||
|
||||
func (a atomAuthors) String() string {
|
||||
var authors []string
|
||||
func (a AtomPersons) PersonNames() []string {
|
||||
var names []string
|
||||
authorNamesMap := make(map[string]bool)
|
||||
|
||||
for _, person := range a {
|
||||
authors = append(authors, person.String())
|
||||
personName := person.PersonName()
|
||||
if _, ok := authorNamesMap[personName]; !ok {
|
||||
names = append(names, personName)
|
||||
authorNamesMap[personName] = true
|
||||
}
|
||||
}
|
||||
|
||||
return strings.Join(authors, ", ")
|
||||
return names
|
||||
}
|
||||
|
||||
type atomLink struct {
|
||||
URL string `xml:"href,attr"`
|
||||
// Specs: https://datatracker.ietf.org/doc/html/rfc4287#section-4.2.7
|
||||
type AtomLink struct {
|
||||
Href string `xml:"href,attr"`
|
||||
Type string `xml:"type,attr"`
|
||||
Rel string `xml:"rel,attr"`
|
||||
Length string `xml:"length,attr"`
|
||||
Title string `xml:"title,attr"`
|
||||
}
|
||||
|
||||
type atomLinks []*atomLink
|
||||
type AtomLinks []*AtomLink
|
||||
|
||||
func (a atomLinks) originalLink() string {
|
||||
func (a AtomLinks) OriginalLink() string {
|
||||
for _, link := range a {
|
||||
if strings.EqualFold(link.Rel, "alternate") {
|
||||
return strings.TrimSpace(link.URL)
|
||||
return strings.TrimSpace(link.Href)
|
||||
}
|
||||
|
||||
if link.Rel == "" && (link.Type == "" || link.Type == "text/html") {
|
||||
return strings.TrimSpace(link.URL)
|
||||
return strings.TrimSpace(link.Href)
|
||||
}
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func (a atomLinks) firstLinkWithRelation(relation string) string {
|
||||
func (a AtomLinks) firstLinkWithRelation(relation string) string {
|
||||
for _, link := range a {
|
||||
if strings.EqualFold(link.Rel, relation) {
|
||||
return strings.TrimSpace(link.URL)
|
||||
return strings.TrimSpace(link.Href)
|
||||
}
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func (a atomLinks) firstLinkWithRelationAndType(relation string, contentTypes ...string) string {
|
||||
func (a AtomLinks) firstLinkWithRelationAndType(relation string, contentTypes ...string) string {
|
||||
for _, link := range a {
|
||||
if strings.EqualFold(link.Rel, relation) {
|
||||
for _, contentType := range contentTypes {
|
||||
if strings.EqualFold(link.Type, contentType) {
|
||||
return strings.TrimSpace(link.URL)
|
||||
return strings.TrimSpace(link.Href)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -81,3 +95,61 @@ func (a atomLinks) firstLinkWithRelationAndType(relation string, contentTypes ..
|
|||
|
||||
return ""
|
||||
}
|
||||
|
||||
func (a AtomLinks) findAllLinksWithRelation(relation string) []*AtomLink {
|
||||
var links []*AtomLink
|
||||
|
||||
for _, link := range a {
|
||||
if strings.EqualFold(link.Rel, relation) {
|
||||
link.Href = strings.TrimSpace(link.Href)
|
||||
if link.Href != "" {
|
||||
links = append(links, link)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return links
|
||||
}
|
||||
|
||||
// The "atom:category" element conveys information about a category
|
||||
// associated with an entry or feed. This specification assigns no
|
||||
// meaning to the content (if any) of this element.
|
||||
//
|
||||
// Specs: https://datatracker.ietf.org/doc/html/rfc4287#section-4.2.2
|
||||
type AtomCategory struct {
|
||||
// The "term" attribute is a string that identifies the category to
|
||||
// which the entry or feed belongs. Category elements MUST have a
|
||||
// "term" attribute.
|
||||
Term string `xml:"term,attr"`
|
||||
|
||||
// The "scheme" attribute is an IRI that identifies a categorization
|
||||
// scheme. Category elements MAY have a "scheme" attribute.
|
||||
Scheme string `xml:"scheme,attr"`
|
||||
|
||||
// The "label" attribute provides a human-readable label for display in
|
||||
// end-user applications. The content of the "label" attribute is
|
||||
// Language-Sensitive. Entities such as "&" and "<" represent
|
||||
// their corresponding characters ("&" and "<", respectively), not
|
||||
// markup. Category elements MAY have a "label" attribute.
|
||||
Label string `xml:"label,attr"`
|
||||
}
|
||||
|
||||
type AtomCategories []AtomCategory
|
||||
|
||||
func (ac AtomCategories) CategoryNames() []string {
|
||||
var categories []string
|
||||
|
||||
for _, category := range ac {
|
||||
label := strings.TrimSpace(category.Label)
|
||||
if label != "" {
|
||||
categories = append(categories, label)
|
||||
} else {
|
||||
term := strings.TrimSpace(category.Term)
|
||||
if term != "" {
|
||||
categories = append(categories, term)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return categories
|
||||
}
|
||||
|
|
|
@ -11,22 +11,20 @@ import (
|
|||
xml_decoder "miniflux.app/v2/internal/reader/xml"
|
||||
)
|
||||
|
||||
type atomFeed interface {
|
||||
Transform(baseURL string) *model.Feed
|
||||
}
|
||||
|
||||
// Parse returns a normalized feed struct from a Atom feed.
|
||||
func Parse(baseURL string, r io.ReadSeeker, version string) (*model.Feed, error) {
|
||||
var rawFeed atomFeed
|
||||
if version == "0.3" {
|
||||
rawFeed = new(atom03Feed)
|
||||
} else {
|
||||
rawFeed = new(atom10Feed)
|
||||
switch version {
|
||||
case "0.3":
|
||||
atomFeed := new(Atom03Feed)
|
||||
if err := xml_decoder.NewXMLDecoder(r).Decode(atomFeed); err != nil {
|
||||
return nil, fmt.Errorf("atom: unable to parse Atom 0.3 feed: %w", err)
|
||||
}
|
||||
return NewAtom03Adapter(atomFeed).BuildFeed(baseURL), nil
|
||||
default:
|
||||
atomFeed := new(Atom10Feed)
|
||||
if err := xml_decoder.NewXMLDecoder(r).Decode(atomFeed); err != nil {
|
||||
return nil, fmt.Errorf("atom: unable to parse Atom 1.0 feed: %w", err)
|
||||
}
|
||||
return NewAtom10Adapter(atomFeed).BuildFeed(baseURL), nil
|
||||
}
|
||||
|
||||
if err := xml_decoder.NewXMLDecoder(r).Decode(rawFeed); err != nil {
|
||||
return nil, fmt.Errorf("atom: unable to parse feed: %w", err)
|
||||
}
|
||||
|
||||
return rawFeed.Transform(baseURL), nil
|
||||
}
|
||||
|
|
|
@ -35,8 +35,3 @@ func CharsetReader(charsetLabel string, input io.Reader) (io.Reader, error) {
|
|||
// Transform document to UTF-8 from the specified encoding in XML prolog.
|
||||
return charset.NewReaderLabel(charsetLabel, r)
|
||||
}
|
||||
|
||||
// CharsetReaderFromContentType is used when the encoding is not specified for the input document.
|
||||
func CharsetReaderFromContentType(contentType string, input io.Reader) (io.Reader, error) {
|
||||
return charset.NewReader(input, contentType)
|
||||
}
|
||||
|
|
|
@ -10,6 +10,8 @@ import (
|
|||
"io"
|
||||
"net"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
|
||||
"miniflux.app/v2/internal/locale"
|
||||
)
|
||||
|
@ -94,23 +96,18 @@ func (r *ResponseHandler) ReadBody(maxBodySize int64) ([]byte, *locale.Localized
|
|||
|
||||
func (r *ResponseHandler) LocalizedError() *locale.LocalizedErrorWrapper {
|
||||
if r.clientErr != nil {
|
||||
switch r.clientErr.(type) {
|
||||
case x509.CertificateInvalidError, x509.HostnameError:
|
||||
switch {
|
||||
case isSSLError(r.clientErr):
|
||||
return locale.NewLocalizedErrorWrapper(fmt.Errorf("fetcher: %w", r.clientErr), "error.tls_error", r.clientErr)
|
||||
case *net.OpError:
|
||||
case isNetworkError(r.clientErr):
|
||||
return locale.NewLocalizedErrorWrapper(fmt.Errorf("fetcher: %w", r.clientErr), "error.network_operation", r.clientErr)
|
||||
case net.Error:
|
||||
networkErr := r.clientErr.(net.Error)
|
||||
if networkErr.Timeout() {
|
||||
return locale.NewLocalizedErrorWrapper(fmt.Errorf("fetcher: %w", r.clientErr), "error.network_timeout", r.clientErr)
|
||||
}
|
||||
}
|
||||
|
||||
if errors.Is(r.clientErr, io.EOF) {
|
||||
case os.IsTimeout(r.clientErr):
|
||||
return locale.NewLocalizedErrorWrapper(fmt.Errorf("fetcher: %w", r.clientErr), "error.network_timeout", r.clientErr)
|
||||
case errors.Is(r.clientErr, io.EOF):
|
||||
return locale.NewLocalizedErrorWrapper(fmt.Errorf("fetcher: %w", r.clientErr), "error.http_empty_response")
|
||||
default:
|
||||
return locale.NewLocalizedErrorWrapper(fmt.Errorf("fetcher: %w", r.clientErr), "error.http_client_error", r.clientErr)
|
||||
}
|
||||
|
||||
return locale.NewLocalizedErrorWrapper(fmt.Errorf("fetcher: %w", r.clientErr), "error.http_client_error", r.clientErr)
|
||||
}
|
||||
|
||||
switch r.httpResponse.StatusCode {
|
||||
|
@ -145,3 +142,32 @@ func (r *ResponseHandler) LocalizedError() *locale.LocalizedErrorWrapper {
|
|||
|
||||
return nil
|
||||
}
|
||||
|
||||
func isNetworkError(err error) bool {
|
||||
if _, ok := err.(*url.Error); ok {
|
||||
return true
|
||||
}
|
||||
if err == io.EOF {
|
||||
return true
|
||||
}
|
||||
var opErr *net.OpError
|
||||
if ok := errors.As(err, &opErr); ok {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func isSSLError(err error) bool {
|
||||
var certErr x509.UnknownAuthorityError
|
||||
if errors.As(err, &certErr) {
|
||||
return true
|
||||
}
|
||||
|
||||
var hostErr x509.HostnameError
|
||||
if errors.As(err, &hostErr) {
|
||||
return true
|
||||
}
|
||||
|
||||
var algErr x509.InsecureAlgorithmError
|
||||
return errors.As(err, &algErr)
|
||||
}
|
||||
|
|
|
@ -236,14 +236,18 @@ func RefreshFeed(store *storage.Storage, userID, feedID int64, forceRefresh bool
|
|||
requestBuilder.WithUsernameAndPassword(originalFeed.Username, originalFeed.Password)
|
||||
requestBuilder.WithUserAgent(originalFeed.UserAgent, config.Opts.HTTPClientUserAgent())
|
||||
requestBuilder.WithCookie(originalFeed.Cookie)
|
||||
requestBuilder.WithETag(originalFeed.EtagHeader)
|
||||
requestBuilder.WithLastModified(originalFeed.LastModifiedHeader)
|
||||
requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
|
||||
requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
|
||||
requestBuilder.UseProxy(originalFeed.FetchViaProxy)
|
||||
requestBuilder.IgnoreTLSErrors(originalFeed.AllowSelfSignedCertificates)
|
||||
requestBuilder.DisableHTTP2(originalFeed.DisableHTTP2)
|
||||
|
||||
ignoreHTTPCache := originalFeed.IgnoreHTTPCache || forceRefresh
|
||||
if !ignoreHTTPCache {
|
||||
requestBuilder.WithETag(originalFeed.EtagHeader)
|
||||
requestBuilder.WithLastModified(originalFeed.LastModifiedHeader)
|
||||
}
|
||||
|
||||
responseHandler := fetcher.NewResponseHandler(requestBuilder.ExecuteRequest(originalFeed.FeedURL))
|
||||
defer responseHandler.Close()
|
||||
|
||||
|
@ -261,7 +265,7 @@ func RefreshFeed(store *storage.Storage, userID, feedID int64, forceRefresh bool
|
|||
return localizedError
|
||||
}
|
||||
|
||||
if originalFeed.IgnoreHTTPCache || responseHandler.IsModified(originalFeed.EtagHeader, originalFeed.LastModifiedHeader) {
|
||||
if ignoreHTTPCache || responseHandler.IsModified(originalFeed.EtagHeader, originalFeed.LastModifiedHeader) {
|
||||
slog.Debug("Feed modified",
|
||||
slog.Int64("user_id", userID),
|
||||
slog.Int64("feed_id", feedID),
|
||||
|
|
|
@ -15,11 +15,11 @@ import (
|
|||
"miniflux.app/v2/internal/config"
|
||||
"miniflux.app/v2/internal/crypto"
|
||||
"miniflux.app/v2/internal/model"
|
||||
"miniflux.app/v2/internal/reader/encoding"
|
||||
"miniflux.app/v2/internal/reader/fetcher"
|
||||
"miniflux.app/v2/internal/urllib"
|
||||
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
"golang.org/x/net/html/charset"
|
||||
)
|
||||
|
||||
type IconFinder struct {
|
||||
|
@ -191,7 +191,7 @@ func findIconURLsFromHTMLDocument(body io.Reader, contentType string) ([]string,
|
|||
"link[rel='apple-touch-icon-precomposed.png']",
|
||||
}
|
||||
|
||||
htmlDocumentReader, err := encoding.CharsetReaderFromContentType(contentType, body)
|
||||
htmlDocumentReader, err := charset.NewReader(body, contentType)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("icon: unable to create charset reader: %w", err)
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@ package json // import "miniflux.app/v2/internal/reader/json"
|
|||
|
||||
import (
|
||||
"log/slog"
|
||||
"sort"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
|
@ -24,15 +24,15 @@ func NewJSONAdapter(jsonFeed *JSONFeed) *JSONAdapter {
|
|||
return &JSONAdapter{jsonFeed}
|
||||
}
|
||||
|
||||
func (j *JSONAdapter) BuildFeed(feedURL string) *model.Feed {
|
||||
func (j *JSONAdapter) BuildFeed(baseURL string) *model.Feed {
|
||||
feed := &model.Feed{
|
||||
Title: strings.TrimSpace(j.jsonFeed.Title),
|
||||
FeedURL: j.jsonFeed.FeedURL,
|
||||
SiteURL: j.jsonFeed.HomePageURL,
|
||||
FeedURL: strings.TrimSpace(j.jsonFeed.FeedURL),
|
||||
SiteURL: strings.TrimSpace(j.jsonFeed.HomePageURL),
|
||||
}
|
||||
|
||||
if feed.FeedURL == "" {
|
||||
feed.FeedURL = feedURL
|
||||
feed.FeedURL = strings.TrimSpace(baseURL)
|
||||
}
|
||||
|
||||
// Fallback to the feed URL if the site URL is empty.
|
||||
|
@ -40,11 +40,11 @@ func (j *JSONAdapter) BuildFeed(feedURL string) *model.Feed {
|
|||
feed.SiteURL = feed.FeedURL
|
||||
}
|
||||
|
||||
if feedURL, err := urllib.AbsoluteURL(feedURL, j.jsonFeed.FeedURL); err == nil {
|
||||
if feedURL, err := urllib.AbsoluteURL(baseURL, feed.FeedURL); err == nil {
|
||||
feed.FeedURL = feedURL
|
||||
}
|
||||
|
||||
if siteURL, err := urllib.AbsoluteURL(feedURL, j.jsonFeed.HomePageURL); err == nil {
|
||||
if siteURL, err := urllib.AbsoluteURL(baseURL, feed.SiteURL); err == nil {
|
||||
feed.SiteURL = siteURL
|
||||
}
|
||||
|
||||
|
@ -98,7 +98,6 @@ func (j *JSONAdapter) BuildFeed(feedURL string) *model.Feed {
|
|||
}
|
||||
|
||||
// Populate the entry date.
|
||||
entry.Date = time.Now()
|
||||
for _, value := range []string{item.DatePublished, item.DateModified} {
|
||||
value = strings.TrimSpace(value)
|
||||
if value != "" {
|
||||
|
@ -114,26 +113,26 @@ func (j *JSONAdapter) BuildFeed(feedURL string) *model.Feed {
|
|||
}
|
||||
}
|
||||
}
|
||||
if entry.Date.IsZero() {
|
||||
entry.Date = time.Now()
|
||||
}
|
||||
|
||||
// Populate the entry author.
|
||||
itemAuthors := append(item.Authors, j.jsonFeed.Authors...)
|
||||
itemAuthors := j.jsonFeed.Authors
|
||||
itemAuthors = append(itemAuthors, item.Authors...)
|
||||
itemAuthors = append(itemAuthors, item.Author, j.jsonFeed.Author)
|
||||
|
||||
authorNamesMap := make(map[string]bool)
|
||||
var authorNames []string
|
||||
for _, author := range itemAuthors {
|
||||
authorName := strings.TrimSpace(author.Name)
|
||||
if authorName != "" {
|
||||
authorNamesMap[authorName] = true
|
||||
authorNames = append(authorNames, authorName)
|
||||
}
|
||||
}
|
||||
|
||||
var authors []string
|
||||
for authorName := range authorNamesMap {
|
||||
authors = append(authors, authorName)
|
||||
}
|
||||
|
||||
sort.Strings(authors)
|
||||
entry.Author = strings.Join(authors, ", ")
|
||||
slices.Sort(authorNames)
|
||||
authorNames = slices.Compact(authorNames)
|
||||
entry.Author = strings.Join(authorNames, ", ")
|
||||
|
||||
// Populate the entry enclosures.
|
||||
for _, attachment := range item.Attachments {
|
||||
|
|
|
@ -177,6 +177,82 @@ func TestParsePodcast(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestParseFeedWithFeedURLWithTrailingSpace(t *testing.T) {
|
||||
data := `{
|
||||
"version": "https://jsonfeed.org/version/1",
|
||||
"title": "My Example Feed",
|
||||
"home_page_url": "https://example.org/",
|
||||
"feed_url": "https://example.org/feed.json ",
|
||||
"items": []
|
||||
}`
|
||||
|
||||
feed, err := Parse("https://example.org/feed.json", bytes.NewBufferString(data))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feed.FeedURL != "https://example.org/feed.json" {
|
||||
t.Errorf("Incorrect feed URL, got: %s", feed.FeedURL)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseFeedWithRelativeFeedURL(t *testing.T) {
|
||||
data := `{
|
||||
"version": "https://jsonfeed.org/version/1",
|
||||
"title": "My Example Feed",
|
||||
"home_page_url": "https://example.org/",
|
||||
"feed_url": "/feed.json",
|
||||
"items": []
|
||||
}`
|
||||
|
||||
feed, err := Parse("https://example.org/feed.json", bytes.NewBufferString(data))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feed.FeedURL != "https://example.org/feed.json" {
|
||||
t.Errorf("Incorrect feed URL, got: %s", feed.FeedURL)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseFeedSiteURLWithTrailingSpace(t *testing.T) {
|
||||
data := `{
|
||||
"version": "https://jsonfeed.org/version/1",
|
||||
"title": "My Example Feed",
|
||||
"home_page_url": "https://example.org/ ",
|
||||
"feed_url": "https://example.org/feed.json",
|
||||
"items": []
|
||||
}`
|
||||
|
||||
feed, err := Parse("https://example.org/feed.json", bytes.NewBufferString(data))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feed.SiteURL != "https://example.org/" {
|
||||
t.Errorf("Incorrect site URL, got: %s", feed.SiteURL)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseFeedWithRelativeSiteURL(t *testing.T) {
|
||||
data := `{
|
||||
"version": "https://jsonfeed.org/version/1",
|
||||
"title": "My Example Feed",
|
||||
"home_page_url": "/home ",
|
||||
"feed_url": "https://example.org/feed.json",
|
||||
"items": []
|
||||
}`
|
||||
|
||||
feed, err := Parse("https://example.org/feed.json", bytes.NewBufferString(data))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feed.SiteURL != "https://example.org/home" {
|
||||
t.Errorf("Incorrect site URL, got: %s", feed.SiteURL)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseFeedWithoutTitle(t *testing.T) {
|
||||
data := `{
|
||||
"version": "https://jsonfeed.org/version/1",
|
||||
|
@ -772,6 +848,42 @@ func TestParseFeedIcon(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestParseFeedWithRelativeAttachmentURL(t *testing.T) {
|
||||
data := `{
|
||||
"version": "https://jsonfeed.org/version/1",
|
||||
"title": "My Example Feed",
|
||||
"home_page_url": "https://example.org/",
|
||||
"feed_url": "https://example.org/feed.json",
|
||||
"items": [
|
||||
{
|
||||
"id": "2",
|
||||
"content_text": "This is a second item.",
|
||||
"url": "https://example.org/second-item",
|
||||
"attachments": [
|
||||
{
|
||||
"url": " /attachment.mp3 ",
|
||||
"mime_type": "audio/mpeg",
|
||||
"size_in_bytes": 123456
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}`
|
||||
|
||||
feed, err := Parse("https://example.org/feed.json", bytes.NewBufferString(data))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if len(feed.Entries[0].Enclosures) != 1 {
|
||||
t.Fatalf("Incorrect number of enclosures, got: %d", len(feed.Entries[0].Enclosures))
|
||||
}
|
||||
|
||||
if feed.Entries[0].Enclosures[0].URL != "https://example.org/attachment.mp3" {
|
||||
t.Errorf("Incorrect enclosure URL, got: %q", feed.Entries[0].Enclosures[0].URL)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseInvalidJSON(t *testing.T) {
|
||||
data := `garbage`
|
||||
_, err := Parse("https://example.org/feed.json", bytes.NewBufferString(data))
|
||||
|
|
|
@ -86,17 +86,17 @@ type Content struct {
|
|||
|
||||
// MimeType returns the attachment mime type.
|
||||
func (mc *Content) MimeType() string {
|
||||
switch {
|
||||
case mc.Type == "" && mc.Medium == "image":
|
||||
return "image/*"
|
||||
case mc.Type == "" && mc.Medium == "video":
|
||||
return "video/*"
|
||||
case mc.Type == "" && mc.Medium == "audio":
|
||||
return "audio/*"
|
||||
case mc.Type == "" && mc.Medium == "video":
|
||||
return "video/*"
|
||||
case mc.Type != "":
|
||||
if mc.Type != "" {
|
||||
return mc.Type
|
||||
}
|
||||
|
||||
switch mc.Medium {
|
||||
case "image":
|
||||
return "image/*"
|
||||
case "video":
|
||||
return "video/*"
|
||||
case "audio":
|
||||
return "audio/*"
|
||||
default:
|
||||
return "application/octet-stream"
|
||||
}
|
||||
|
@ -104,9 +104,6 @@ func (mc *Content) MimeType() string {
|
|||
|
||||
// Size returns the attachment size.
|
||||
func (mc *Content) Size() int64 {
|
||||
if mc.FileSize == "" {
|
||||
return 0
|
||||
}
|
||||
size, _ := strconv.ParseInt(mc.FileSize, 10, 0)
|
||||
return size
|
||||
}
|
||||
|
|
|
@ -85,7 +85,35 @@ func FuzzParse(f *testing.F) {
|
|||
})
|
||||
}
|
||||
|
||||
func TestParseAtom(t *testing.T) {
|
||||
func TestParseAtom03Feed(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<feed version="0.3" xmlns="http://purl.org/atom/ns#">
|
||||
<title>dive into mark</title>
|
||||
<link rel="alternate" type="text/html" href="http://diveintomark.org/"/>
|
||||
<modified>2003-12-13T18:30:02Z</modified>
|
||||
<author><name>Mark Pilgrim</name></author>
|
||||
<entry>
|
||||
<title>Atom 0.3 snapshot</title>
|
||||
<link rel="alternate" type="text/html" href="http://diveintomark.org/2003/12/13/atom03"/>
|
||||
<id>tag:diveintomark.org,2003:3.2397</id>
|
||||
<issued>2003-12-13T08:29:29-04:00</issued>
|
||||
<modified>2003-12-13T18:30:02Z</modified>
|
||||
<summary type="text/plain">It's a test</summary>
|
||||
<content type="text/html" mode="escaped"><![CDATA[<p>HTML content</p>]]></content>
|
||||
</entry>
|
||||
</feed>`
|
||||
|
||||
feed, err := ParseFeed("https://example.org/", strings.NewReader(data))
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
if feed.Title != "dive into mark" {
|
||||
t.Errorf("Incorrect title, got: %s", feed.Title)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseAtom10Feed(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<feed xmlns="http://www.w3.org/2005/Atom">
|
||||
|
||||
|
|
|
@ -47,8 +47,7 @@ func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, user *model.Us
|
|||
slog.Int64("feed_id", feed.ID),
|
||||
slog.String("feed_url", feed.FeedURL),
|
||||
)
|
||||
|
||||
if isBlockedEntry(feed, entry) || !isAllowedEntry(feed, entry) {
|
||||
if isBlockedEntry(feed, entry) || !isAllowedEntry(feed, entry) || !isRecentEntry(entry) {
|
||||
continue
|
||||
}
|
||||
|
||||
|
@ -116,58 +115,65 @@ func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, user *model.Us
|
|||
}
|
||||
|
||||
func isBlockedEntry(feed *model.Feed, entry *model.Entry) bool {
|
||||
if feed.BlocklistRules != "" {
|
||||
containsBlockedTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
|
||||
return matchField(feed.BlocklistRules, tag)
|
||||
})
|
||||
if feed.BlocklistRules == "" {
|
||||
return false
|
||||
}
|
||||
|
||||
if matchField(feed.BlocklistRules, entry.URL) || matchField(feed.BlocklistRules, entry.Title) || matchField(feed.BlocklistRules, entry.Author) || containsBlockedTag {
|
||||
slog.Debug("Blocking entry based on rule",
|
||||
slog.Int64("entry_id", entry.ID),
|
||||
slog.String("entry_url", entry.URL),
|
||||
slog.Int64("feed_id", feed.ID),
|
||||
slog.String("feed_url", feed.FeedURL),
|
||||
slog.String("rule", feed.BlocklistRules),
|
||||
)
|
||||
return true
|
||||
}
|
||||
compiledBlocklist, err := regexp.Compile(feed.BlocklistRules)
|
||||
if err != nil {
|
||||
slog.Debug("Failed on regexp compilation",
|
||||
slog.String("pattern", feed.BlocklistRules),
|
||||
slog.Any("error", err),
|
||||
)
|
||||
return false
|
||||
}
|
||||
|
||||
containsBlockedTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
|
||||
return compiledBlocklist.MatchString(tag)
|
||||
})
|
||||
|
||||
if compiledBlocklist.MatchString(entry.URL) || compiledBlocklist.MatchString(entry.Title) || compiledBlocklist.MatchString(entry.Author) || containsBlockedTag {
|
||||
slog.Debug("Blocking entry based on rule",
|
||||
slog.Int64("entry_id", entry.ID),
|
||||
slog.String("entry_url", entry.URL),
|
||||
slog.Int64("feed_id", feed.ID),
|
||||
slog.String("feed_url", feed.FeedURL),
|
||||
slog.String("rule", feed.BlocklistRules),
|
||||
)
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func isAllowedEntry(feed *model.Feed, entry *model.Entry) bool {
|
||||
if feed.KeeplistRules != "" {
|
||||
containsAllowedTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
|
||||
return matchField(feed.KeeplistRules, tag)
|
||||
})
|
||||
|
||||
if matchField(feed.KeeplistRules, entry.URL) || matchField(feed.KeeplistRules, entry.Title) || matchField(feed.KeeplistRules, entry.Author) || containsAllowedTag {
|
||||
slog.Debug("Allow entry based on rule",
|
||||
slog.Int64("entry_id", entry.ID),
|
||||
slog.String("entry_url", entry.URL),
|
||||
slog.Int64("feed_id", feed.ID),
|
||||
slog.String("feed_url", feed.FeedURL),
|
||||
slog.String("rule", feed.KeeplistRules),
|
||||
)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
if feed.KeeplistRules == "" {
|
||||
return true
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func matchField(pattern, value string) bool {
|
||||
match, err := regexp.MatchString(pattern, value)
|
||||
compiledKeeplist, err := regexp.Compile(feed.KeeplistRules)
|
||||
if err != nil {
|
||||
slog.Debug("Failed on regexp match",
|
||||
slog.String("pattern", pattern),
|
||||
slog.String("value", value),
|
||||
slog.Bool("match", match),
|
||||
slog.Debug("Failed on regexp compilation",
|
||||
slog.String("pattern", feed.KeeplistRules),
|
||||
slog.Any("error", err),
|
||||
)
|
||||
return false
|
||||
}
|
||||
return match
|
||||
containsAllowedTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
|
||||
return compiledKeeplist.MatchString(tag)
|
||||
})
|
||||
|
||||
if compiledKeeplist.MatchString(entry.URL) || compiledKeeplist.MatchString(entry.Title) || compiledKeeplist.MatchString(entry.Author) || containsAllowedTag {
|
||||
slog.Debug("Allow entry based on rule",
|
||||
slog.Int64("entry_id", entry.ID),
|
||||
slog.String("entry_url", entry.URL),
|
||||
slog.Int64("feed_id", feed.ID),
|
||||
slog.String("feed_url", feed.FeedURL),
|
||||
slog.String("rule", feed.KeeplistRules),
|
||||
)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// ProcessEntryWebPage downloads the entry web page and apply rewrite rules.
|
||||
|
@ -394,11 +400,11 @@ func parseISO8601(from string) (time.Duration, error) {
|
|||
|
||||
switch name {
|
||||
case "hour":
|
||||
d = d + (time.Duration(val) * time.Hour)
|
||||
d += (time.Duration(val) * time.Hour)
|
||||
case "minute":
|
||||
d = d + (time.Duration(val) * time.Minute)
|
||||
d += (time.Duration(val) * time.Minute)
|
||||
case "second":
|
||||
d = d + (time.Duration(val) * time.Second)
|
||||
d += (time.Duration(val) * time.Second)
|
||||
default:
|
||||
return 0, fmt.Errorf("unknown field %s", name)
|
||||
}
|
||||
|
@ -406,3 +412,10 @@ func parseISO8601(from string) (time.Duration, error) {
|
|||
|
||||
return d, nil
|
||||
}
|
||||
|
||||
func isRecentEntry(entry *model.Entry) bool {
|
||||
if config.Opts.FilterEntryMaxAgeDays() == 0 || entry.Date.After(time.Now().AddDate(0, 0, -config.Opts.FilterEntryMaxAgeDays())) {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ import (
|
|||
"testing"
|
||||
"time"
|
||||
|
||||
"miniflux.app/v2/internal/config"
|
||||
"miniflux.app/v2/internal/model"
|
||||
)
|
||||
|
||||
|
@ -92,3 +93,27 @@ func TestParseISO8601(t *testing.T) {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestIsRecentEntry(t *testing.T) {
|
||||
parser := config.NewParser()
|
||||
var err error
|
||||
config.Opts, err = parser.ParseEnvironmentVariables()
|
||||
if err != nil {
|
||||
t.Fatalf(`Parsing failure: %v`, err)
|
||||
}
|
||||
var scenarios = []struct {
|
||||
entry *model.Entry
|
||||
expected bool
|
||||
}{
|
||||
{&model.Entry{Title: "Example1", Date: time.Date(2005, 5, 1, 05, 05, 05, 05, time.UTC)}, true},
|
||||
{&model.Entry{Title: "Example2", Date: time.Date(2010, 5, 1, 05, 05, 05, 05, time.UTC)}, true},
|
||||
{&model.Entry{Title: "Example3", Date: time.Date(2020, 5, 1, 05, 05, 05, 05, time.UTC)}, true},
|
||||
{&model.Entry{Title: "Example4", Date: time.Date(2024, 3, 15, 05, 05, 05, 05, time.UTC)}, true},
|
||||
}
|
||||
for _, tc := range scenarios {
|
||||
result := isRecentEntry(tc.entry)
|
||||
if tc.expected != result {
|
||||
t.Errorf(`Unexpected result, got %v for entry %q`, result, tc.entry.Title)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,18 +24,18 @@ func NewRDFAdapter(rdf *RDF) *RDFAdapter {
|
|||
return &RDFAdapter{rdf}
|
||||
}
|
||||
|
||||
func (r *RDFAdapter) BuildFeed(feedURL string) *model.Feed {
|
||||
func (r *RDFAdapter) BuildFeed(baseURL string) *model.Feed {
|
||||
feed := &model.Feed{
|
||||
Title: stripTags(r.rdf.Channel.Title),
|
||||
FeedURL: feedURL,
|
||||
SiteURL: r.rdf.Channel.Link,
|
||||
FeedURL: strings.TrimSpace(baseURL),
|
||||
SiteURL: strings.TrimSpace(r.rdf.Channel.Link),
|
||||
}
|
||||
|
||||
if feed.Title == "" {
|
||||
feed.Title = feedURL
|
||||
feed.Title = baseURL
|
||||
}
|
||||
|
||||
if siteURL, err := urllib.AbsoluteURL(feedURL, r.rdf.Channel.Link); err == nil {
|
||||
if siteURL, err := urllib.AbsoluteURL(feed.FeedURL, feed.SiteURL); err == nil {
|
||||
feed.SiteURL = siteURL
|
||||
}
|
||||
|
||||
|
|
|
@ -289,7 +289,37 @@ func TestParseRDFFeedWithRelativeLink(t *testing.T) {
|
|||
xmlns="http://purl.org/rss/1.0/">
|
||||
<channel>
|
||||
<title>Example Feed</title>
|
||||
<link>/test/index.html</link>
|
||||
<link>/test/index.html </link>
|
||||
</channel>
|
||||
<item>
|
||||
<title>Example</title>
|
||||
<link>http://example.org/item</link>
|
||||
<description>Test</description>
|
||||
</item>
|
||||
</rdf:RDF>`
|
||||
|
||||
feed, err := Parse("http://example.org/feed", bytes.NewReader([]byte(data)))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feed.SiteURL != "http://example.org/test/index.html" {
|
||||
t.Errorf(`Incorrect SiteURL, got: %q`, feed.SiteURL)
|
||||
}
|
||||
|
||||
if feed.FeedURL != "http://example.org/feed" {
|
||||
t.Errorf(`Incorrect FeedURL, got: %q`, feed.FeedURL)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseRDFFeedSiteURLWithTrailingSpace(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<rdf:RDF
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns="http://purl.org/rss/1.0/">
|
||||
<channel>
|
||||
<title>Example Feed</title>
|
||||
<link>http://example.org/test/index.html </link>
|
||||
</channel>
|
||||
<item>
|
||||
<title>Example</title>
|
||||
|
|
|
@ -45,11 +45,12 @@ func (c *candidate) String() string {
|
|||
id, _ := c.selection.Attr("id")
|
||||
class, _ := c.selection.Attr("class")
|
||||
|
||||
if id != "" && class != "" {
|
||||
switch {
|
||||
case id != "" && class != "":
|
||||
return fmt.Sprintf("%s#%s.%s => %f", c.Node().DataAtom, id, class, c.score)
|
||||
} else if id != "" {
|
||||
case id != "":
|
||||
return fmt.Sprintf("%s#%s => %f", c.Node().DataAtom, id, c.score)
|
||||
} else if class != "" {
|
||||
case class != "":
|
||||
return fmt.Sprintf("%s.%s => %f", c.Node().DataAtom, class, c.score)
|
||||
}
|
||||
|
||||
|
@ -222,7 +223,7 @@ func getCandidates(document *goquery.Document) candidateList {
|
|||
// should have a relatively small link density (5% or less) and be mostly
|
||||
// unaffected by this operation
|
||||
for _, candidate := range candidates {
|
||||
candidate.score = candidate.score * (1 - getLinkDensity(candidate.selection))
|
||||
candidate.score *= (1 - getLinkDensity(candidate.selection))
|
||||
}
|
||||
|
||||
return candidates
|
||||
|
|
|
@ -14,6 +14,8 @@ import (
|
|||
|
||||
"miniflux.app/v2/internal/config"
|
||||
|
||||
nethtml "golang.org/x/net/html"
|
||||
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
"github.com/yuin/goldmark"
|
||||
goldmarkhtml "github.com/yuin/goldmark/renderer/html"
|
||||
|
@ -301,10 +303,6 @@ func replaceTextLinks(input string) string {
|
|||
return textLinkRegex.ReplaceAllString(input, `<a href="${1}">${1}</a>`)
|
||||
}
|
||||
|
||||
func replaceLineFeeds(input string) string {
|
||||
return strings.ReplaceAll(input, "\n", "<br>")
|
||||
}
|
||||
|
||||
func replaceCustom(entryContent string, searchTerm string, replaceTerm string) string {
|
||||
re, err := regexp.Compile(searchTerm)
|
||||
if err == nil {
|
||||
|
@ -334,7 +332,7 @@ func addCastopodEpisode(entryURL, entryContent string) string {
|
|||
func applyFuncOnTextContent(entryContent string, selector string, repl func(string) string) string {
|
||||
var treatChildren func(i int, s *goquery.Selection)
|
||||
treatChildren = func(i int, s *goquery.Selection) {
|
||||
if s.Nodes[0].Type == 1 {
|
||||
if s.Nodes[0].Type == nethtml.TextNode {
|
||||
s.ReplaceWithHtml(repl(s.Nodes[0].Data))
|
||||
} else {
|
||||
s.Contents().Each(treatChildren)
|
||||
|
@ -378,7 +376,8 @@ func addHackerNewsLinksUsing(entryContent, app string) string {
|
|||
return
|
||||
}
|
||||
|
||||
if app == "opener" {
|
||||
switch app {
|
||||
case "opener":
|
||||
params := url.Values{}
|
||||
params.Add("url", hn_uri.String())
|
||||
|
||||
|
@ -391,12 +390,12 @@ func addHackerNewsLinksUsing(entryContent, app string) string {
|
|||
|
||||
open_with_opener := `<a href="` + url.String() + `">Open with Opener</a>`
|
||||
a.Parent().AppendHtml(" " + open_with_opener)
|
||||
} else if app == "hack" {
|
||||
case "hack":
|
||||
url := strings.Replace(hn_uri.String(), hn_prefix, "hack://", 1)
|
||||
|
||||
open_with_hack := `<a href="` + url + `">Open with HACK</a>`
|
||||
a.Parent().AppendHtml(" " + open_with_hack)
|
||||
} else {
|
||||
default:
|
||||
slog.Warn("Unknown app provided for openHackerNewsLinksWith rewrite rule",
|
||||
slog.String("app", app),
|
||||
)
|
||||
|
@ -457,17 +456,3 @@ func removeTables(entryContent string) string {
|
|||
output, _ := doc.Find("body").First().Html()
|
||||
return output
|
||||
}
|
||||
|
||||
func removeClickbait(entryTitle string) string {
|
||||
titleWords := []string{}
|
||||
for _, word := range strings.Fields(entryTitle) {
|
||||
runes := []rune(word)
|
||||
if len(runes) > 1 {
|
||||
// keep first rune as is to keep the first capital letter
|
||||
titleWords = append(titleWords, string([]rune{runes[0]})+strings.ToLower(string(runes[1:])))
|
||||
} else {
|
||||
titleWords = append(titleWords, word)
|
||||
}
|
||||
}
|
||||
return strings.Join(titleWords, " ")
|
||||
}
|
||||
|
|
|
@ -11,6 +11,9 @@ import (
|
|||
|
||||
"miniflux.app/v2/internal/model"
|
||||
"miniflux.app/v2/internal/urllib"
|
||||
|
||||
"golang.org/x/text/cases"
|
||||
"golang.org/x/text/language"
|
||||
)
|
||||
|
||||
type rule struct {
|
||||
|
@ -18,50 +21,7 @@ type rule struct {
|
|||
args []string
|
||||
}
|
||||
|
||||
// Rewriter modify item contents with a set of rewriting rules.
|
||||
func Rewriter(entryURL string, entry *model.Entry, customRewriteRules string) {
|
||||
rulesList := getPredefinedRewriteRules(entryURL)
|
||||
if customRewriteRules != "" {
|
||||
rulesList = customRewriteRules
|
||||
}
|
||||
|
||||
rules := parseRules(rulesList)
|
||||
rules = append(rules, rule{name: "add_pdf_download_link"})
|
||||
|
||||
slog.Debug("Rewrite rules applied",
|
||||
slog.Any("rules", rules),
|
||||
slog.String("entry_url", entryURL),
|
||||
)
|
||||
|
||||
for _, rule := range rules {
|
||||
applyRule(entryURL, entry, rule)
|
||||
}
|
||||
}
|
||||
|
||||
func parseRules(rulesText string) (rules []rule) {
|
||||
scan := scanner.Scanner{Mode: scanner.ScanIdents | scanner.ScanStrings}
|
||||
scan.Init(strings.NewReader(rulesText))
|
||||
|
||||
for {
|
||||
switch scan.Scan() {
|
||||
case scanner.Ident:
|
||||
rules = append(rules, rule{name: scan.TokenText()})
|
||||
|
||||
case scanner.String:
|
||||
if l := len(rules) - 1; l >= 0 {
|
||||
text := scan.TokenText()
|
||||
text, _ = strconv.Unquote(text)
|
||||
|
||||
rules[l].args = append(rules[l].args, text)
|
||||
}
|
||||
|
||||
case scanner.EOF:
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func applyRule(entryURL string, entry *model.Entry, rule rule) {
|
||||
func (rule rule) applyRule(entryURL string, entry *model.Entry) {
|
||||
switch rule.name {
|
||||
case "add_image_title":
|
||||
entry.Content = addImageTitle(entryURL, entry.Content)
|
||||
|
@ -82,7 +42,7 @@ func applyRule(entryURL string, entry *model.Entry, rule rule) {
|
|||
case "add_pdf_download_link":
|
||||
entry.Content = addPDFLink(entryURL, entry.Content)
|
||||
case "nl2br":
|
||||
entry.Content = replaceLineFeeds(entry.Content)
|
||||
entry.Content = strings.ReplaceAll(entry.Content, "\n", "<br>")
|
||||
case "convert_text_link", "convert_text_links":
|
||||
entry.Content = replaceTextLinks(entry.Content)
|
||||
case "fix_medium_images":
|
||||
|
@ -122,11 +82,11 @@ func applyRule(entryURL string, entry *model.Entry, rule rule) {
|
|||
case "add_castopod_episode":
|
||||
entry.Content = addCastopodEpisode(entryURL, entry.Content)
|
||||
case "base64_decode":
|
||||
selector := "body"
|
||||
if len(rule.args) >= 1 {
|
||||
entry.Content = applyFuncOnTextContent(entry.Content, rule.args[0], decodeBase64Content)
|
||||
} else {
|
||||
entry.Content = applyFuncOnTextContent(entry.Content, "body", decodeBase64Content)
|
||||
selector = rule.args[0]
|
||||
}
|
||||
entry.Content = applyFuncOnTextContent(entry.Content, selector, decodeBase64Content)
|
||||
case "add_hn_links_using_hack":
|
||||
entry.Content = addHackerNewsLinksUsing(entry.Content, "hack")
|
||||
case "add_hn_links_using_opener":
|
||||
|
@ -136,7 +96,46 @@ func applyRule(entryURL string, entry *model.Entry, rule rule) {
|
|||
case "remove_tables":
|
||||
entry.Content = removeTables(entry.Content)
|
||||
case "remove_clickbait":
|
||||
entry.Title = removeClickbait(entry.Title)
|
||||
entry.Title = cases.Title(language.English).String(strings.ToLower(entry.Title))
|
||||
}
|
||||
}
|
||||
|
||||
// Rewriter modify item contents with a set of rewriting rules.
|
||||
func Rewriter(entryURL string, entry *model.Entry, customRewriteRules string) {
|
||||
rulesList := getPredefinedRewriteRules(entryURL)
|
||||
if customRewriteRules != "" {
|
||||
rulesList = customRewriteRules
|
||||
}
|
||||
|
||||
rules := parseRules(rulesList)
|
||||
rules = append(rules, rule{name: "add_pdf_download_link"})
|
||||
|
||||
slog.Debug("Rewrite rules applied",
|
||||
slog.Any("rules", rules),
|
||||
slog.String("entry_url", entryURL),
|
||||
)
|
||||
|
||||
for _, rule := range rules {
|
||||
rule.applyRule(entryURL, entry)
|
||||
}
|
||||
}
|
||||
|
||||
func parseRules(rulesText string) (rules []rule) {
|
||||
scan := scanner.Scanner{Mode: scanner.ScanIdents | scanner.ScanStrings}
|
||||
scan.Init(strings.NewReader(rulesText))
|
||||
|
||||
for {
|
||||
switch scan.Scan() {
|
||||
case scanner.Ident:
|
||||
rules = append(rules, rule{name: scan.TokenText()})
|
||||
case scanner.String:
|
||||
if l := len(rules) - 1; l >= 0 {
|
||||
text, _ := strconv.Unquote(scan.TokenText())
|
||||
rules[l].args = append(rules[l].args, text)
|
||||
}
|
||||
case scanner.EOF:
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -26,22 +26,23 @@ func NewRSSAdapter(rss *RSS) *RSSAdapter {
|
|||
return &RSSAdapter{rss}
|
||||
}
|
||||
|
||||
func (r *RSSAdapter) BuildFeed(feedURL string) *model.Feed {
|
||||
func (r *RSSAdapter) BuildFeed(baseURL string) *model.Feed {
|
||||
feed := &model.Feed{
|
||||
Title: html.UnescapeString(strings.TrimSpace(r.rss.Channel.Title)),
|
||||
FeedURL: feedURL,
|
||||
SiteURL: r.rss.Channel.Link,
|
||||
FeedURL: strings.TrimSpace(baseURL),
|
||||
SiteURL: strings.TrimSpace(r.rss.Channel.Link),
|
||||
}
|
||||
|
||||
if siteURL, err := urllib.AbsoluteURL(feedURL, r.rss.Channel.Link); err == nil {
|
||||
// Ensure the Site URL is absolute.
|
||||
if siteURL, err := urllib.AbsoluteURL(baseURL, feed.SiteURL); err == nil {
|
||||
feed.SiteURL = siteURL
|
||||
}
|
||||
|
||||
// Try to find the feed URL from the Atom links.
|
||||
for _, atomLink := range r.rss.Channel.AtomLinks.Links {
|
||||
atomLinkHref := strings.TrimSpace(atomLink.URL)
|
||||
atomLinkHref := strings.TrimSpace(atomLink.Href)
|
||||
if atomLinkHref != "" && atomLink.Rel == "self" {
|
||||
if absoluteFeedURL, err := urllib.AbsoluteURL(feedURL, atomLinkHref); err == nil {
|
||||
if absoluteFeedURL, err := urllib.AbsoluteURL(feed.FeedURL, atomLinkHref); err == nil {
|
||||
feed.FeedURL = absoluteFeedURL
|
||||
break
|
||||
}
|
||||
|
@ -69,10 +70,9 @@ func (r *RSSAdapter) BuildFeed(feedURL string) *model.Feed {
|
|||
|
||||
for _, item := range r.rss.Channel.Items {
|
||||
entry := model.NewEntry()
|
||||
entry.Author = findEntryAuthor(&item)
|
||||
entry.Date = findEntryDate(&item)
|
||||
entry.Content = findEntryContent(&item)
|
||||
entry.Enclosures = findEntryEnclosures(&item)
|
||||
entry.Enclosures = findEntryEnclosures(&item, feed.SiteURL)
|
||||
|
||||
// Populate the entry URL.
|
||||
entryURL := findEntryURL(&item)
|
||||
|
@ -90,22 +90,21 @@ func (r *RSSAdapter) BuildFeed(feedURL string) *model.Feed {
|
|||
entry.Title = findEntryTitle(&item)
|
||||
if entry.Title == "" {
|
||||
entry.Title = sanitizer.TruncateHTML(entry.Content, 100)
|
||||
if entry.Title == "" {
|
||||
entry.Title = entry.URL
|
||||
}
|
||||
}
|
||||
|
||||
if entry.Title == "" {
|
||||
entry.Title = entry.URL
|
||||
}
|
||||
|
||||
entry.Author = findEntryAuthor(&item)
|
||||
if entry.Author == "" {
|
||||
entry.Author = findFeedAuthor(&r.rss.Channel)
|
||||
}
|
||||
|
||||
// Generate the entry hash.
|
||||
for _, value := range []string{item.GUID.Data, entryURL} {
|
||||
if value != "" {
|
||||
entry.Hash = crypto.Hash(value)
|
||||
break
|
||||
}
|
||||
if item.GUID.Data != "" {
|
||||
entry.Hash = crypto.Hash(item.GUID.Data)
|
||||
} else if entryURL != "" {
|
||||
entry.Hash = crypto.Hash(entryURL)
|
||||
}
|
||||
|
||||
// Find CommentsURL if defined.
|
||||
|
@ -121,13 +120,30 @@ func (r *RSSAdapter) BuildFeed(feedURL string) *model.Feed {
|
|||
}
|
||||
|
||||
// Populate entry categories.
|
||||
entry.Tags = append(entry.Tags, item.Categories...)
|
||||
entry.Tags = append(entry.Tags, item.MediaCategories.Labels()...)
|
||||
entry.Tags = append(entry.Tags, r.rss.Channel.Categories...)
|
||||
entry.Tags = append(entry.Tags, r.rss.Channel.GetItunesCategories()...)
|
||||
|
||||
if r.rss.Channel.GooglePlayCategory.Text != "" {
|
||||
entry.Tags = append(entry.Tags, r.rss.Channel.GooglePlayCategory.Text)
|
||||
for _, tag := range item.Categories {
|
||||
if tag != "" {
|
||||
entry.Tags = append(entry.Tags, tag)
|
||||
}
|
||||
}
|
||||
for _, tag := range item.MediaCategories.Labels() {
|
||||
if tag != "" {
|
||||
entry.Tags = append(entry.Tags, tag)
|
||||
}
|
||||
}
|
||||
if len(entry.Tags) == 0 {
|
||||
for _, tag := range r.rss.Channel.Categories {
|
||||
if tag != "" {
|
||||
entry.Tags = append(entry.Tags, tag)
|
||||
}
|
||||
}
|
||||
for _, tag := range r.rss.Channel.GetItunesCategories() {
|
||||
if tag != "" {
|
||||
entry.Tags = append(entry.Tags, tag)
|
||||
}
|
||||
}
|
||||
if r.rss.Channel.GooglePlayCategory.Text != "" {
|
||||
entry.Tags = append(entry.Tags, r.rss.Channel.GooglePlayCategory.Text)
|
||||
}
|
||||
}
|
||||
|
||||
feed.Entries = append(feed.Entries, entry)
|
||||
|
@ -171,8 +187,8 @@ func findEntryURL(rssItem *RSSItem) string {
|
|||
}
|
||||
|
||||
for _, atomLink := range rssItem.AtomLinks.Links {
|
||||
if atomLink.URL != "" && (strings.EqualFold(atomLink.Rel, "alternate") || atomLink.Rel == "") {
|
||||
return strings.TrimSpace(atomLink.URL)
|
||||
if atomLink.Href != "" && (strings.EqualFold(atomLink.Rel, "alternate") || atomLink.Rel == "") {
|
||||
return strings.TrimSpace(atomLink.Href)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -234,8 +250,8 @@ func findEntryAuthor(rssItem *RSSItem) string {
|
|||
author = rssItem.ItunesAuthor
|
||||
case rssItem.DublinCoreCreator != "":
|
||||
author = rssItem.DublinCoreCreator
|
||||
case rssItem.AtomAuthor.String() != "":
|
||||
author = rssItem.AtomAuthor.String()
|
||||
case rssItem.AtomAuthor.PersonName() != "":
|
||||
author = rssItem.AtomAuthor.PersonName()
|
||||
case strings.Contains(rssItem.Author.Inner, "<![CDATA["):
|
||||
author = rssItem.Author.Data
|
||||
default:
|
||||
|
@ -245,18 +261,30 @@ func findEntryAuthor(rssItem *RSSItem) string {
|
|||
return strings.TrimSpace(sanitizer.StripTags(author))
|
||||
}
|
||||
|
||||
func findEntryEnclosures(rssItem *RSSItem) model.EnclosureList {
|
||||
func findEntryEnclosures(rssItem *RSSItem, siteURL string) model.EnclosureList {
|
||||
enclosures := make(model.EnclosureList, 0)
|
||||
duplicates := make(map[string]bool)
|
||||
|
||||
for _, mediaThumbnail := range rssItem.AllMediaThumbnails() {
|
||||
if _, found := duplicates[mediaThumbnail.URL]; !found {
|
||||
duplicates[mediaThumbnail.URL] = true
|
||||
enclosures = append(enclosures, &model.Enclosure{
|
||||
URL: mediaThumbnail.URL,
|
||||
MimeType: mediaThumbnail.MimeType(),
|
||||
Size: mediaThumbnail.Size(),
|
||||
})
|
||||
mediaURL := strings.TrimSpace(mediaThumbnail.URL)
|
||||
if mediaURL == "" {
|
||||
continue
|
||||
}
|
||||
if _, found := duplicates[mediaURL]; !found {
|
||||
if mediaAbsoluteURL, err := urllib.AbsoluteURL(siteURL, mediaURL); err != nil {
|
||||
slog.Debug("Unable to build absolute URL for media thumbnail",
|
||||
slog.String("url", mediaThumbnail.URL),
|
||||
slog.String("site_url", siteURL),
|
||||
slog.Any("error", err),
|
||||
)
|
||||
} else {
|
||||
duplicates[mediaAbsoluteURL] = true
|
||||
enclosures = append(enclosures, &model.Enclosure{
|
||||
URL: mediaAbsoluteURL,
|
||||
MimeType: mediaThumbnail.MimeType(),
|
||||
Size: mediaThumbnail.Size(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -265,15 +293,20 @@ func findEntryEnclosures(rssItem *RSSItem) model.EnclosureList {
|
|||
|
||||
if rssItem.FeedBurnerEnclosureLink != "" {
|
||||
filename := path.Base(rssItem.FeedBurnerEnclosureLink)
|
||||
if strings.Contains(enclosureURL, filename) {
|
||||
if strings.HasSuffix(enclosureURL, filename) {
|
||||
enclosureURL = rssItem.FeedBurnerEnclosureLink
|
||||
}
|
||||
}
|
||||
|
||||
enclosureURL = strings.TrimSpace(enclosureURL)
|
||||
if enclosureURL == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
if absoluteEnclosureURL, err := urllib.AbsoluteURL(siteURL, enclosureURL); err == nil {
|
||||
enclosureURL = absoluteEnclosureURL
|
||||
}
|
||||
|
||||
if _, found := duplicates[enclosureURL]; !found {
|
||||
duplicates[enclosureURL] = true
|
||||
|
||||
|
@ -286,24 +319,50 @@ func findEntryEnclosures(rssItem *RSSItem) model.EnclosureList {
|
|||
}
|
||||
|
||||
for _, mediaContent := range rssItem.AllMediaContents() {
|
||||
if _, found := duplicates[mediaContent.URL]; !found {
|
||||
duplicates[mediaContent.URL] = true
|
||||
enclosures = append(enclosures, &model.Enclosure{
|
||||
URL: mediaContent.URL,
|
||||
MimeType: mediaContent.MimeType(),
|
||||
Size: mediaContent.Size(),
|
||||
})
|
||||
mediaURL := strings.TrimSpace(mediaContent.URL)
|
||||
if mediaURL == "" {
|
||||
continue
|
||||
}
|
||||
if _, found := duplicates[mediaURL]; !found {
|
||||
mediaURL := strings.TrimSpace(mediaContent.URL)
|
||||
if mediaAbsoluteURL, err := urllib.AbsoluteURL(siteURL, mediaURL); err != nil {
|
||||
slog.Debug("Unable to build absolute URL for media content",
|
||||
slog.String("url", mediaContent.URL),
|
||||
slog.String("site_url", siteURL),
|
||||
slog.Any("error", err),
|
||||
)
|
||||
} else {
|
||||
duplicates[mediaAbsoluteURL] = true
|
||||
enclosures = append(enclosures, &model.Enclosure{
|
||||
URL: mediaAbsoluteURL,
|
||||
MimeType: mediaContent.MimeType(),
|
||||
Size: mediaContent.Size(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, mediaPeerLink := range rssItem.AllMediaPeerLinks() {
|
||||
if _, found := duplicates[mediaPeerLink.URL]; !found {
|
||||
duplicates[mediaPeerLink.URL] = true
|
||||
enclosures = append(enclosures, &model.Enclosure{
|
||||
URL: mediaPeerLink.URL,
|
||||
MimeType: mediaPeerLink.MimeType(),
|
||||
Size: mediaPeerLink.Size(),
|
||||
})
|
||||
mediaURL := strings.TrimSpace(mediaPeerLink.URL)
|
||||
if mediaURL == "" {
|
||||
continue
|
||||
}
|
||||
if _, found := duplicates[mediaURL]; !found {
|
||||
mediaURL := strings.TrimSpace(mediaPeerLink.URL)
|
||||
if mediaAbsoluteURL, err := urllib.AbsoluteURL(siteURL, mediaURL); err != nil {
|
||||
slog.Debug("Unable to build absolute URL for media peer link",
|
||||
slog.String("url", mediaPeerLink.URL),
|
||||
slog.String("site_url", siteURL),
|
||||
slog.Any("error", err),
|
||||
)
|
||||
} else {
|
||||
duplicates[mediaAbsoluteURL] = true
|
||||
enclosures = append(enclosures, &model.Enclosure{
|
||||
URL: mediaAbsoluteURL,
|
||||
MimeType: mediaPeerLink.MimeType(),
|
||||
Size: mediaPeerLink.Size(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -3,41 +3,18 @@
|
|||
|
||||
package rss // import "miniflux.app/v2/internal/reader/rss"
|
||||
|
||||
import "strings"
|
||||
import (
|
||||
"miniflux.app/v2/internal/reader/atom"
|
||||
)
|
||||
|
||||
type AtomAuthor struct {
|
||||
Author AtomPerson `xml:"http://www.w3.org/2005/Atom author"`
|
||||
Author atom.AtomPerson `xml:"http://www.w3.org/2005/Atom author"`
|
||||
}
|
||||
|
||||
func (a *AtomAuthor) String() string {
|
||||
return a.Author.String()
|
||||
}
|
||||
|
||||
type AtomPerson struct {
|
||||
Name string `xml:"name"`
|
||||
Email string `xml:"email"`
|
||||
}
|
||||
|
||||
func (a *AtomPerson) String() string {
|
||||
var name string
|
||||
|
||||
switch {
|
||||
case a.Name != "":
|
||||
name = a.Name
|
||||
case a.Email != "":
|
||||
name = a.Email
|
||||
}
|
||||
|
||||
return strings.TrimSpace(name)
|
||||
}
|
||||
|
||||
type AtomLink struct {
|
||||
URL string `xml:"href,attr"`
|
||||
Type string `xml:"type,attr"`
|
||||
Rel string `xml:"rel,attr"`
|
||||
Length string `xml:"length,attr"`
|
||||
func (a *AtomAuthor) PersonName() string {
|
||||
return a.Author.PersonName()
|
||||
}
|
||||
|
||||
type AtomLinks struct {
|
||||
Links []*AtomLink `xml:"http://www.w3.org/2005/Atom link"`
|
||||
Links []*atom.AtomLink `xml:"http://www.w3.org/2005/Atom link"`
|
||||
}
|
||||
|
|
|
@ -109,6 +109,100 @@ func TestParseRss2Sample(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestParseFeedWithFeedURLWithTrailingSpace(t *testing.T) {
|
||||
data := `<?xml version="1.0"?>
|
||||
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
|
||||
<channel>
|
||||
<title>Example</title>
|
||||
<link>https://example.org/</link>
|
||||
<atom:link href="https://example.org/rss " type="application/rss+xml" rel="self"></atom:link>
|
||||
<item>
|
||||
<title>Test</title>
|
||||
<link>https://example.org/item</link>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>`
|
||||
|
||||
feed, err := Parse("https://example.org/ ", bytes.NewReader([]byte(data)))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feed.FeedURL != "https://example.org/rss" {
|
||||
t.Errorf("Incorrect feed URL, got: %s", feed.FeedURL)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseFeedWithRelativeFeedURL(t *testing.T) {
|
||||
data := `<?xml version="1.0"?>
|
||||
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
|
||||
<channel>
|
||||
<title>Example</title>
|
||||
<link>https://example.org/</link>
|
||||
<atom:link href="/rss" type="application/rss+xml" rel="self"></atom:link>
|
||||
<item>
|
||||
<title>Test</title>
|
||||
<link>https://example.org/item</link>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>`
|
||||
|
||||
feed, err := Parse("https://example.org/", bytes.NewReader([]byte(data)))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feed.FeedURL != "https://example.org/rss" {
|
||||
t.Errorf("Incorrect feed URL, got: %s", feed.FeedURL)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseFeedSiteURLWithTrailingSpace(t *testing.T) {
|
||||
data := `<?xml version="1.0"?>
|
||||
<rss version="2.0">
|
||||
<channel>
|
||||
<title>Example</title>
|
||||
<link>https://example.org/ </link>
|
||||
<item>
|
||||
<title>Test</title>
|
||||
<link>https://example.org/item</link>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>`
|
||||
|
||||
feed, err := Parse("https://example.org/", bytes.NewReader([]byte(data)))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feed.SiteURL != "https://example.org/" {
|
||||
t.Errorf("Incorrect site URL, got: %s", feed.SiteURL)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseFeedWithRelativeSiteURL(t *testing.T) {
|
||||
data := `<?xml version="1.0"?>
|
||||
<rss version="2.0">
|
||||
<channel>
|
||||
<title>Example</title>
|
||||
<link>/example </link>
|
||||
<item>
|
||||
<title>Test</title>
|
||||
<link>https://example.org/item</link>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>`
|
||||
|
||||
feed, err := Parse("https://example.org/", bytes.NewReader([]byte(data)))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feed.SiteURL != "https://example.org/example" {
|
||||
t.Errorf("Incorrect site URL, got: %s", feed.SiteURL)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseFeedWithoutTitle(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<rss version="2.0">
|
||||
|
@ -746,6 +840,106 @@ func TestParseEntryWithContentEncoded(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
// https://www.rssboard.org/rss-encoding-examples
|
||||
func TestParseEntryDescriptionWithEncodedHTMLTags(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<rss version="2.0" xmlns:content="http://purl.org/rss/1.0/modules/content/">
|
||||
<channel>
|
||||
<title>Example</title>
|
||||
<link>http://example.org/</link>
|
||||
<item>
|
||||
<title>Item 1</title>
|
||||
<link>http://example.org/item1</link>
|
||||
<description>this is <b>bold</b></description>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>`
|
||||
|
||||
feed, err := Parse("https://example.org/", bytes.NewReader([]byte(data)))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feed.Entries[0].Content != `this is <b>bold</b>` {
|
||||
t.Errorf("Incorrect entry content, got: %q", feed.Entries[0].Content)
|
||||
}
|
||||
}
|
||||
|
||||
// https://www.rssboard.org/rss-encoding-examples
|
||||
func TestParseEntryWithDescriptionWithHTMLCDATA(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<rss version="2.0" xmlns:content="http://purl.org/rss/1.0/modules/content/">
|
||||
<channel>
|
||||
<title>Example</title>
|
||||
<link>http://example.org/</link>
|
||||
<item>
|
||||
<title>Item 1</title>
|
||||
<link>http://example.org/item1</link>
|
||||
<description><![CDATA[this is <b>bold</b>]]></description>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>`
|
||||
|
||||
feed, err := Parse("https://example.org/", bytes.NewReader([]byte(data)))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feed.Entries[0].Content != `this is <b>bold</b>` {
|
||||
t.Errorf("Incorrect entry content, got: %q", feed.Entries[0].Content)
|
||||
}
|
||||
}
|
||||
|
||||
// https://www.rssboard.org/rss-encoding-examples
|
||||
func TestParseEntryDescriptionWithEncodingAngleBracketsInText(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<rss version="2.0" xmlns:content="http://purl.org/rss/1.0/modules/content/">
|
||||
<channel>
|
||||
<title>Example</title>
|
||||
<link>http://example.org/</link>
|
||||
<item>
|
||||
<title>Item 1</title>
|
||||
<link>http://example.org/item1</link>
|
||||
<description>5 &lt; 8, ticker symbol &lt;BIGCO&gt;</description>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>`
|
||||
|
||||
feed, err := Parse("https://example.org/", bytes.NewReader([]byte(data)))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feed.Entries[0].Content != `5 < 8, ticker symbol <BIGCO>` {
|
||||
t.Errorf("Incorrect entry content, got: %q", feed.Entries[0].Content)
|
||||
}
|
||||
}
|
||||
|
||||
// https://www.rssboard.org/rss-encoding-examples
|
||||
func TestParseEntryDescriptionWithEncodingAngleBracketsWithinCDATASection(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<rss version="2.0" xmlns:content="http://purl.org/rss/1.0/modules/content/">
|
||||
<channel>
|
||||
<title>Example</title>
|
||||
<link>http://example.org/</link>
|
||||
<item>
|
||||
<title>Item 1</title>
|
||||
<link>http://example.org/item1</link>
|
||||
<description><![CDATA[5 < 8, ticker symbol <BIGCO>]]></description>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>`
|
||||
|
||||
feed, err := Parse("https://example.org/", bytes.NewReader([]byte(data)))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feed.Entries[0].Content != `5 < 8, ticker symbol <BIGCO>` {
|
||||
t.Errorf("Incorrect entry content, got: %q", feed.Entries[0].Content)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseEntryWithFeedBurnerLink(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<rss version="2.0" xmlns:feedburner="http://rssnamespace.org/feedburner/ext/1.0">
|
||||
|
@ -822,15 +1016,11 @@ func TestParseEntryWithEnclosures(t *testing.T) {
|
|||
}
|
||||
|
||||
if len(feed.Entries) != 1 {
|
||||
t.Errorf("Incorrect number of entries, got: %d", len(feed.Entries))
|
||||
}
|
||||
|
||||
if feed.Entries[0].URL != "http://www.example.org/entries/1" {
|
||||
t.Errorf("Incorrect entry URL, got: %s", feed.Entries[0].URL)
|
||||
t.Fatalf("Incorrect number of entries, got: %d", len(feed.Entries))
|
||||
}
|
||||
|
||||
if len(feed.Entries[0].Enclosures) != 1 {
|
||||
t.Errorf("Incorrect number of enclosures, got: %d", len(feed.Entries[0].Enclosures))
|
||||
t.Fatalf("Incorrect number of enclosures, got: %d", len(feed.Entries[0].Enclosures))
|
||||
}
|
||||
|
||||
if feed.Entries[0].Enclosures[0].URL != "http://www.example.org/myaudiofile.mp3" {
|
||||
|
@ -871,15 +1061,11 @@ func TestParseEntryWithIncorrectEnclosureLength(t *testing.T) {
|
|||
}
|
||||
|
||||
if len(feed.Entries) != 1 {
|
||||
t.Errorf("Incorrect number of entries, got: %d", len(feed.Entries))
|
||||
}
|
||||
|
||||
if feed.Entries[0].URL != "http://www.example.org/entries/1" {
|
||||
t.Errorf("Incorrect entry URL, got: %s", feed.Entries[0].URL)
|
||||
t.Fatalf("Incorrect number of entries, got: %d", len(feed.Entries))
|
||||
}
|
||||
|
||||
if len(feed.Entries[0].Enclosures) != 2 {
|
||||
t.Errorf("Incorrect number of enclosures, got: %d", len(feed.Entries[0].Enclosures))
|
||||
t.Fatalf("Incorrect number of enclosures, got: %d", len(feed.Entries[0].Enclosures))
|
||||
}
|
||||
|
||||
if feed.Entries[0].Enclosures[0].URL != "http://www.example.org/myaudiofile.mp3" {
|
||||
|
@ -899,6 +1085,39 @@ func TestParseEntryWithIncorrectEnclosureLength(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestParseEntryWithDuplicatedEnclosureURL(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<rss version="2.0">
|
||||
<channel>
|
||||
<title>My Podcast Feed</title>
|
||||
<link>http://example.org</link>
|
||||
<item>
|
||||
<title>Podcasting with RSS</title>
|
||||
<link>http://www.example.org/entries/1</link>
|
||||
<enclosure url="http://www.example.org/myaudiofile.mp3" type="audio/mpeg" />
|
||||
<enclosure url=" http://www.example.org/myaudiofile.mp3 " type="audio/mpeg" />
|
||||
</item>
|
||||
</channel>
|
||||
</rss>`
|
||||
|
||||
feed, err := Parse("https://example.org/", bytes.NewReader([]byte(data)))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if len(feed.Entries) != 1 {
|
||||
t.Fatalf("Incorrect number of entries, got: %d", len(feed.Entries))
|
||||
}
|
||||
|
||||
if len(feed.Entries[0].Enclosures) != 1 {
|
||||
t.Fatalf("Incorrect number of enclosures, got: %d", len(feed.Entries[0].Enclosures))
|
||||
}
|
||||
|
||||
if feed.Entries[0].Enclosures[0].URL != "http://www.example.org/myaudiofile.mp3" {
|
||||
t.Errorf("Incorrect enclosure URL, got: %s", feed.Entries[0].Enclosures[0].URL)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseEntryWithEmptyEnclosureURL(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<rss version="2.0">
|
||||
|
@ -912,7 +1131,7 @@ func TestParseEntryWithEmptyEnclosureURL(t *testing.T) {
|
|||
<description>An overview of RSS podcasting</description>
|
||||
<pubDate>Fri, 15 Jul 2005 00:00:00 -0500</pubDate>
|
||||
<guid isPermaLink="true">http://www.example.org/entries/1</guid>
|
||||
<enclosure url="" length="0"/>
|
||||
<enclosure url=" " length="0"/>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>`
|
||||
|
@ -923,15 +1142,47 @@ func TestParseEntryWithEmptyEnclosureURL(t *testing.T) {
|
|||
}
|
||||
|
||||
if len(feed.Entries) != 1 {
|
||||
t.Errorf("Incorrect number of entries, got: %d", len(feed.Entries))
|
||||
}
|
||||
|
||||
if feed.Entries[0].URL != "http://www.example.org/entries/1" {
|
||||
t.Errorf("Incorrect entry URL, got: %s", feed.Entries[0].URL)
|
||||
t.Fatalf("Incorrect number of entries, got: %d", len(feed.Entries))
|
||||
}
|
||||
|
||||
if len(feed.Entries[0].Enclosures) != 0 {
|
||||
t.Errorf("Incorrect number of enclosures, got: %d", len(feed.Entries[0].Enclosures))
|
||||
t.Fatalf("Incorrect number of enclosures, got: %d", len(feed.Entries[0].Enclosures))
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseEntryWithRelativeEnclosureURL(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<rss version="2.0">
|
||||
<channel>
|
||||
<title>My Podcast Feed</title>
|
||||
<link>http://example.org</link>
|
||||
<author>some.email@example.org</author>
|
||||
<item>
|
||||
<title>Podcasting with RSS</title>
|
||||
<link>http://www.example.org/entries/1</link>
|
||||
<description>An overview of RSS podcasting</description>
|
||||
<pubDate>Fri, 15 Jul 2005 00:00:00 -0500</pubDate>
|
||||
<guid isPermaLink="true">http://www.example.org/entries/1</guid>
|
||||
<enclosure url=" /files/file.mp3 "/>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>`
|
||||
|
||||
feed, err := Parse("https://example.org/", bytes.NewReader([]byte(data)))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if len(feed.Entries) != 1 {
|
||||
t.Fatalf("Incorrect number of entries, got: %d", len(feed.Entries))
|
||||
}
|
||||
|
||||
if len(feed.Entries[0].Enclosures) != 1 {
|
||||
t.Fatalf("Incorrect number of enclosures, got: %d", len(feed.Entries[0].Enclosures))
|
||||
}
|
||||
|
||||
if feed.Entries[0].Enclosures[0].URL != "http://example.org/files/file.mp3" {
|
||||
t.Errorf("Incorrect enclosure URL, got: %q", feed.Entries[0].Enclosures[0].URL)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -960,15 +1211,11 @@ func TestParseEntryWithFeedBurnerEnclosures(t *testing.T) {
|
|||
}
|
||||
|
||||
if len(feed.Entries) != 1 {
|
||||
t.Errorf("Incorrect number of entries, got: %d", len(feed.Entries))
|
||||
}
|
||||
|
||||
if feed.Entries[0].URL != "http://www.example.org/entries/1" {
|
||||
t.Errorf("Incorrect entry URL, got: %s", feed.Entries[0].URL)
|
||||
t.Fatalf("Incorrect number of entries, got: %d", len(feed.Entries))
|
||||
}
|
||||
|
||||
if len(feed.Entries[0].Enclosures) != 1 {
|
||||
t.Errorf("Incorrect number of enclosures, got: %d", len(feed.Entries[0].Enclosures))
|
||||
t.Fatalf("Incorrect number of enclosures, got: %d", len(feed.Entries[0].Enclosures))
|
||||
}
|
||||
|
||||
if feed.Entries[0].Enclosures[0].URL != "http://example.org/67ca416c-f22a-4228-a681-68fc9998ec10/File.mp3" {
|
||||
|
@ -984,6 +1231,42 @@ func TestParseEntryWithFeedBurnerEnclosures(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestParseEntryWithFeedBurnerEnclosuresAndRelativeURL(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<rss version="2.0" xmlns:feedburner="http://rssnamespace.org/feedburner/ext/1.0">
|
||||
<channel>
|
||||
<title>My Example Feed</title>
|
||||
<link>http://example.org</link>
|
||||
<item>
|
||||
<title>Example Item</title>
|
||||
<link>http://www.example.org/entries/1</link>
|
||||
<enclosure
|
||||
url="http://feedproxy.google.com/~r/example/~5/lpMyFSCvubs/File.mp3"
|
||||
length="76192460"
|
||||
type="audio/mpeg" />
|
||||
<feedburner:origEnclosureLink>/67ca416c-f22a-4228-a681-68fc9998ec10/File.mp3</feedburner:origEnclosureLink>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>`
|
||||
|
||||
feed, err := Parse("https://example.org/", bytes.NewReader([]byte(data)))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if len(feed.Entries) != 1 {
|
||||
t.Errorf("Incorrect number of entries, got: %d", len(feed.Entries))
|
||||
}
|
||||
|
||||
if len(feed.Entries[0].Enclosures) != 1 {
|
||||
t.Fatalf("Incorrect number of enclosures, got: %d", len(feed.Entries[0].Enclosures))
|
||||
}
|
||||
|
||||
if feed.Entries[0].Enclosures[0].URL != "http://example.org/67ca416c-f22a-4228-a681-68fc9998ec10/File.mp3" {
|
||||
t.Errorf("Incorrect enclosure URL, got: %s", feed.Entries[0].Enclosures[0].URL)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseEntryWithRelativeURL(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<rss version="2.0">
|
||||
|
@ -1195,7 +1478,7 @@ func TestParseEntryWithMediaGroup(t *testing.T) {
|
|||
<rss version="2.0" xmlns:media="http://search.yahoo.com/mrss/">
|
||||
<channel>
|
||||
<title>My Example Feed</title>
|
||||
<link>http://example.org</link>
|
||||
<link>https://example.org</link>
|
||||
<item>
|
||||
<title>Example Item</title>
|
||||
<link>http://www.example.org/entries/1</link>
|
||||
|
@ -1206,7 +1489,9 @@ func TestParseEntryWithMediaGroup(t *testing.T) {
|
|||
<media:content type="application/x-bittorrent" url="https://example.org/file2.torrent" isDefault="true"></media:content>
|
||||
<media:content type="application/x-bittorrent" url="https://example.org/file3.torrent"></media:content>
|
||||
<media:content type="application/x-bittorrent" url="https://example.org/file4.torrent"></media:content>
|
||||
<media:content type="application/x-bittorrent" url="https://example.org/file5.torrent" fileSize="42"></media:content>
|
||||
<media:content type="application/x-bittorrent" url="https://example.org/file4.torrent"></media:content>
|
||||
<media:content type="application/x-bittorrent" url=" file5.torrent " fileSize="42"></media:content>
|
||||
<media:content type="application/x-bittorrent" url=" " fileSize="42"></media:content>
|
||||
<media:rating>nonadult</media:rating>
|
||||
</media:group>
|
||||
<media:thumbnail url="https://example.org/image.jpg" height="122" width="223"></media:thumbnail>
|
||||
|
@ -1259,15 +1544,19 @@ func TestParseEntryWithMediaContent(t *testing.T) {
|
|||
<rss version="2.0" xmlns:media="http://search.yahoo.com/mrss/">
|
||||
<channel>
|
||||
<title>My Example Feed</title>
|
||||
<link>http://example.org</link>
|
||||
<link>https://example.org</link>
|
||||
<item>
|
||||
<title>Example Item</title>
|
||||
<link>http://www.example.org/entries/1</link>
|
||||
<media:thumbnail url="https://example.org/thumbnail.jpg" />
|
||||
<media:thumbnail url="https://example.org/thumbnail.jpg" />
|
||||
<media:thumbnail url=" thumbnail.jpg " />
|
||||
<media:thumbnail url=" " />
|
||||
<media:content url="https://example.org/media1.jpg" medium="image">
|
||||
<media:title type="html">Some Title for Media 1</media:title>
|
||||
</media:content>
|
||||
<media:content url="https://example.org/media2.jpg" medium="image" />
|
||||
<media:content url=" /media2.jpg " medium="image" />
|
||||
<media:content url=" " medium="image" />
|
||||
</item>
|
||||
</channel>
|
||||
</rss>`
|
||||
|
@ -1278,9 +1567,9 @@ func TestParseEntryWithMediaContent(t *testing.T) {
|
|||
}
|
||||
|
||||
if len(feed.Entries) != 1 {
|
||||
t.Errorf("Incorrect number of entries, got: %d", len(feed.Entries))
|
||||
t.Fatalf("Incorrect number of entries, got: %d", len(feed.Entries))
|
||||
}
|
||||
if len(feed.Entries[0].Enclosures) != 3 {
|
||||
if len(feed.Entries[0].Enclosures) != 4 {
|
||||
t.Fatalf("Incorrect number of enclosures, got: %d", len(feed.Entries[0].Enclosures))
|
||||
}
|
||||
|
||||
|
@ -1289,6 +1578,7 @@ func TestParseEntryWithMediaContent(t *testing.T) {
|
|||
mimeType string
|
||||
size int64
|
||||
}{
|
||||
{"https://example.org/thumbnail.jpg", "image/*", 0},
|
||||
{"https://example.org/thumbnail.jpg", "image/*", 0},
|
||||
{"https://example.org/media1.jpg", "image/*", 0},
|
||||
{"https://example.org/media2.jpg", "image/*", 0},
|
||||
|
@ -1314,11 +1604,14 @@ func TestParseEntryWithMediaPeerLink(t *testing.T) {
|
|||
<rss version="2.0" xmlns:media="http://search.yahoo.com/mrss/">
|
||||
<channel>
|
||||
<title>My Example Feed</title>
|
||||
<link>http://example.org</link>
|
||||
<link>https://website.example.org</link>
|
||||
<item>
|
||||
<title>Example Item</title>
|
||||
<link>http://www.example.org/entries/1</link>
|
||||
<media:peerLink type="application/x-bittorrent" href="http://www.example.org/file.torrent" />
|
||||
<media:peerLink type="application/x-bittorrent" href="https://www.example.org/file.torrent" />
|
||||
<media:peerLink type="application/x-bittorrent" href="https://www.example.org/file.torrent" />
|
||||
<media:peerLink type="application/x-bittorrent" href=" file2.torrent " />
|
||||
<media:peerLink type="application/x-bittorrent" href=" " />
|
||||
</item>
|
||||
</channel>
|
||||
</rss>`
|
||||
|
@ -1329,10 +1622,10 @@ func TestParseEntryWithMediaPeerLink(t *testing.T) {
|
|||
}
|
||||
|
||||
if len(feed.Entries) != 1 {
|
||||
t.Errorf("Incorrect number of entries, got: %d", len(feed.Entries))
|
||||
t.Fatalf("Incorrect number of entries, got: %d", len(feed.Entries))
|
||||
}
|
||||
|
||||
if len(feed.Entries[0].Enclosures) != 1 {
|
||||
if len(feed.Entries[0].Enclosures) != 2 {
|
||||
t.Fatalf("Incorrect number of enclosures, got: %d", len(feed.Entries[0].Enclosures))
|
||||
}
|
||||
|
||||
|
@ -1341,7 +1634,8 @@ func TestParseEntryWithMediaPeerLink(t *testing.T) {
|
|||
mimeType string
|
||||
size int64
|
||||
}{
|
||||
{"http://www.example.org/file.torrent", "application/x-bittorrent", 0},
|
||||
{"https://www.example.org/file.torrent", "application/x-bittorrent", 0},
|
||||
{"https://website.example.org/file2.torrent", "application/x-bittorrent", 0},
|
||||
}
|
||||
|
||||
for index, enclosure := range feed.Entries[0].Enclosures {
|
||||
|
@ -1596,11 +1890,11 @@ func TestParseEntryWithCategories(t *testing.T) {
|
|||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if len(feed.Entries[0].Tags) != 3 {
|
||||
t.Errorf("Incorrect number of tags, got: %d", len(feed.Entries[0].Tags))
|
||||
if len(feed.Entries[0].Tags) != 2 {
|
||||
t.Fatalf("Incorrect number of tags, got: %d", len(feed.Entries[0].Tags))
|
||||
}
|
||||
|
||||
expected := []string{"Category 1", "Category 2", "Category 3"}
|
||||
expected := []string{"Category 1", "Category 2"}
|
||||
result := feed.Entries[0].Tags
|
||||
|
||||
for i, tag := range result {
|
||||
|
|
|
@ -16,29 +16,75 @@ import (
|
|||
|
||||
// Specs: https://www.rssboard.org/rss-specification
|
||||
type RSS struct {
|
||||
Version string `xml:"rss version,attr"`
|
||||
// Version is the version of the RSS specification.
|
||||
Version string `xml:"rss version,attr"`
|
||||
|
||||
// Channel is the main container for the RSS feed.
|
||||
Channel RSSChannel `xml:"rss channel"`
|
||||
}
|
||||
|
||||
type RSSChannel struct {
|
||||
Title string `xml:"rss title"`
|
||||
Link string `xml:"rss link"`
|
||||
Description string `xml:"rss description"`
|
||||
Language string `xml:"rss language"`
|
||||
Copyright string `xml:"rss copyRight"`
|
||||
ManagingEditor string `xml:"rss managingEditor"`
|
||||
Webmaster string `xml:"rss webMaster"`
|
||||
PubDate string `xml:"rss pubDate"`
|
||||
LastBuildDate string `xml:"rss lastBuildDate"`
|
||||
Categories []string `xml:"rss category"`
|
||||
Generator string `xml:"rss generator"`
|
||||
Docs string `xml:"rss docs"`
|
||||
Cloud *RSSCloud `xml:"rss cloud"`
|
||||
Image *RSSImage `xml:"rss image"`
|
||||
TTL string `xml:"rss ttl"`
|
||||
SkipHours []string `xml:"rss skipHours>hour"`
|
||||
SkipDays []string `xml:"rss skipDays>day"`
|
||||
Items []RSSItem `xml:"rss item"`
|
||||
// Title is the name of the channel.
|
||||
Title string `xml:"rss title"`
|
||||
|
||||
// Link is the URL to the HTML website corresponding to the channel.
|
||||
Link string `xml:"rss link"`
|
||||
|
||||
// Description is a phrase or sentence describing the channel.
|
||||
Description string `xml:"rss description"`
|
||||
|
||||
// Language is the language the channel is written in.
|
||||
// A list of allowable values for this element, as provided by Netscape, is here: https://www.rssboard.org/rss-language-codes.
|
||||
// You may also use values defined by the W3C: https://www.w3.org/TR/REC-html40/struct/dirlang.html#langcodes.
|
||||
Language string `xml:"rss language"`
|
||||
|
||||
// Copyright is a string indicating the copyright.
|
||||
Copyright string `xml:"rss copyRight"`
|
||||
|
||||
// ManagingEditor is the email address for the person responsible for editorial content.
|
||||
ManagingEditor string `xml:"rss managingEditor"`
|
||||
|
||||
// Webmaster is the email address for the person responsible for technical issues relating to the channel.
|
||||
Webmaster string `xml:"rss webMaster"`
|
||||
|
||||
// PubDate is the publication date for the content in the channel.
|
||||
// All date-times in RSS conform to the Date and Time Specification of RFC 822, with the exception that the year may be expressed with two characters or four characters (four preferred).
|
||||
PubDate string `xml:"rss pubDate"`
|
||||
|
||||
// LastBuildDate is the last time the content of the channel changed.
|
||||
LastBuildDate string `xml:"rss lastBuildDate"`
|
||||
|
||||
// Categories is a collection of categories to which the channel belongs.
|
||||
Categories []string `xml:"rss category"`
|
||||
|
||||
// Generator is a string indicating the program used to generate the channel.
|
||||
Generator string `xml:"rss generator"`
|
||||
|
||||
// Docs is a URL that points to the documentation for the format used in the RSS file.
|
||||
DocumentationURL string `xml:"rss docs"`
|
||||
|
||||
// Cloud is a web service that supports the rssCloud interface which can be implemented in HTTP-POST, XML-RPC or SOAP 1.1.
|
||||
Cloud *RSSCloud `xml:"rss cloud"`
|
||||
|
||||
// Image specifies a GIF, JPEG or PNG image that can be displayed with the channel.
|
||||
Image *RSSImage `xml:"rss image"`
|
||||
|
||||
// TTL is a number of minutes that indicates how long a channel can be cached before refreshing from the source.
|
||||
TTL string `xml:"rss ttl"`
|
||||
|
||||
// SkipHours is a hint for aggregators telling them which hours they can skip.
|
||||
// An XML element that contains up to 24 <hour> sub-elements whose value is a number between 0 and 23,
|
||||
// representing a time in GMT, when aggregators,
|
||||
// if they support the feature, may not read the channel on hours listed in the skipHours element.
|
||||
SkipHours []string `xml:"rss skipHours>hour"`
|
||||
|
||||
// SkipDays is a hint for aggregators telling them which days they can skip.
|
||||
// An XML element that contains up to seven <day> sub-elements whose value is Monday, Tuesday, Wednesday, Thursday, Friday, Saturday or Sunday.
|
||||
SkipDays []string `xml:"rss skipDays>day"`
|
||||
|
||||
// Items is a collection of items.
|
||||
Items []RSSItem `xml:"rss item"`
|
||||
|
||||
AtomLinks
|
||||
itunes.ItunesChannelElement
|
||||
googleplay.GooglePlayChannelElement
|
||||
|
@ -64,16 +110,56 @@ type RSSImage struct {
|
|||
}
|
||||
|
||||
type RSSItem struct {
|
||||
Title string `xml:"rss title"`
|
||||
Link string `xml:"rss link"`
|
||||
Description string `xml:"rss description"`
|
||||
Author RSSAuthor `xml:"rss author"`
|
||||
Categories []string `xml:"rss category"`
|
||||
CommentsURL string `xml:"rss comments"`
|
||||
Enclosures []RSSEnclosure `xml:"rss enclosure"`
|
||||
GUID RSSGUID `xml:"rss guid"`
|
||||
PubDate string `xml:"rss pubDate"`
|
||||
Source RSSSource `xml:"rss source"`
|
||||
// Title is the title of the item.
|
||||
Title string `xml:"rss title"`
|
||||
|
||||
// Link is the URL of the item.
|
||||
Link string `xml:"rss link"`
|
||||
|
||||
// Description is the item synopsis.
|
||||
Description string `xml:"rss description"`
|
||||
|
||||
// Author is the email address of the author of the item.
|
||||
Author RSSAuthor `xml:"rss author"`
|
||||
|
||||
// <category> is an optional sub-element of <item>.
|
||||
// It has one optional attribute, domain, a string that identifies a categorization taxonomy.
|
||||
Categories []string `xml:"rss category"`
|
||||
|
||||
// <comments> is an optional sub-element of <item>.
|
||||
// If present, it contains the URL of the comments page for the item.
|
||||
CommentsURL string `xml:"rss comments"`
|
||||
|
||||
// <enclosure> is an optional sub-element of <item>.
|
||||
// It has three required attributes. url says where the enclosure is located,
|
||||
// length says how big it is in bytes, and type says what its type is, a standard MIME type.
|
||||
Enclosures []RSSEnclosure `xml:"rss enclosure"`
|
||||
|
||||
// <guid> is an optional sub-element of <item>.
|
||||
// It's a string that uniquely identifies the item.
|
||||
// When present, an aggregator may choose to use this string to determine if an item is new.
|
||||
//
|
||||
// There are no rules for the syntax of a guid.
|
||||
// Aggregators must view them as a string.
|
||||
// It's up to the source of the feed to establish the uniqueness of the string.
|
||||
//
|
||||
// If the guid element has an attribute named isPermaLink with a value of true,
|
||||
// the reader may assume that it is a permalink to the item, that is, a url that can be opened in a Web browser,
|
||||
// that points to the full item described by the <item> element.
|
||||
//
|
||||
// isPermaLink is optional, its default value is true.
|
||||
// If its value is false, the guid may not be assumed to be a url, or a url to anything in particular.
|
||||
GUID RSSGUID `xml:"rss guid"`
|
||||
|
||||
// <pubDate> is the publication date of the item.
|
||||
// Its value is a string in RFC 822 format.
|
||||
PubDate string `xml:"rss pubDate"`
|
||||
|
||||
// <source> is an optional sub-element of <item>.
|
||||
// Its value is the name of the RSS channel that the item came from, derived from its <title>.
|
||||
// It has one required attribute, url, which contains the URL of the RSS channel.
|
||||
Source RSSSource `xml:"rss source"`
|
||||
|
||||
dublincore.DublinCoreItemElement
|
||||
FeedBurnerItemElement
|
||||
media.MediaItemElement
|
||||
|
|
|
@ -190,17 +190,18 @@ func sanitizeAttributes(baseURL, tagName string, attributes []html.Attribute) ([
|
|||
}
|
||||
|
||||
if isExternalResourceAttribute(attribute.Key) {
|
||||
if tagName == "iframe" {
|
||||
switch {
|
||||
case tagName == "iframe":
|
||||
if !isValidIframeSource(baseURL, attribute.Val) {
|
||||
continue
|
||||
}
|
||||
value = rewriteIframeURL(attribute.Val)
|
||||
} else if tagName == "img" && attribute.Key == "src" && isValidDataAttribute(attribute.Val) {
|
||||
case tagName == "img" && attribute.Key == "src" && isValidDataAttribute(attribute.Val):
|
||||
value = attribute.Val
|
||||
} else if isAnchor("a", attribute) {
|
||||
case isAnchor("a", attribute):
|
||||
value = attribute.Val
|
||||
isAnchorLink = true
|
||||
} else {
|
||||
default:
|
||||
value, err = urllib.AbsoluteURL(baseURL, value)
|
||||
if err != nil {
|
||||
continue
|
||||
|
|
|
@ -27,8 +27,7 @@ func StripTags(input string) string {
|
|||
}
|
||||
|
||||
token := tokenizer.Token()
|
||||
switch token.Type {
|
||||
case html.TextToken:
|
||||
if token.Type == html.TextToken {
|
||||
buffer.WriteString(token.Data)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,12 +10,12 @@ import (
|
|||
"strings"
|
||||
|
||||
"miniflux.app/v2/internal/config"
|
||||
"miniflux.app/v2/internal/reader/encoding"
|
||||
"miniflux.app/v2/internal/reader/fetcher"
|
||||
"miniflux.app/v2/internal/reader/readability"
|
||||
"miniflux.app/v2/internal/urllib"
|
||||
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
"golang.org/x/net/html/charset"
|
||||
)
|
||||
|
||||
func ScrapeWebsite(requestBuilder *fetcher.RequestBuilder, websiteURL, rules string) (string, error) {
|
||||
|
@ -42,9 +42,9 @@ func ScrapeWebsite(requestBuilder *fetcher.RequestBuilder, websiteURL, rules str
|
|||
var content string
|
||||
var err error
|
||||
|
||||
htmlDocumentReader, err := encoding.CharsetReaderFromContentType(
|
||||
responseHandler.ContentType(),
|
||||
htmlDocumentReader, err := charset.NewReader(
|
||||
responseHandler.Body(config.Opts.HTTPClientMaxBodySize()),
|
||||
responseHandler.ContentType(),
|
||||
)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("scraper: unable to read HTML document: %v", err)
|
||||
|
|
|
@ -14,12 +14,12 @@ import (
|
|||
"miniflux.app/v2/internal/integration/rssbridge"
|
||||
"miniflux.app/v2/internal/locale"
|
||||
"miniflux.app/v2/internal/model"
|
||||
"miniflux.app/v2/internal/reader/encoding"
|
||||
"miniflux.app/v2/internal/reader/fetcher"
|
||||
"miniflux.app/v2/internal/reader/parser"
|
||||
"miniflux.app/v2/internal/urllib"
|
||||
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
"golang.org/x/net/html/charset"
|
||||
)
|
||||
|
||||
var (
|
||||
|
@ -150,7 +150,7 @@ func (f *SubscriptionFinder) FindSubscriptionsFromWebPage(websiteURL, contentTyp
|
|||
"link[type='application/feed+json']": parser.FormatJSON,
|
||||
}
|
||||
|
||||
htmlDocumentReader, err := encoding.CharsetReaderFromContentType(contentType, body)
|
||||
htmlDocumentReader, err := charset.NewReader(body, contentType)
|
||||
if err != nil {
|
||||
return nil, locale.NewLocalizedErrorWrapper(err, "error.unable_to_parse_html_document", err)
|
||||
}
|
||||
|
|
|
@ -66,7 +66,7 @@ func filterValidXMLChar(r rune) rune {
|
|||
func procInst(param, s string) string {
|
||||
// TODO: this parsing is somewhat lame and not exact.
|
||||
// It works for all actual cases, though.
|
||||
param = param + "="
|
||||
param += "="
|
||||
idx := strings.Index(s, param)
|
||||
if idx == -1 {
|
||||
return ""
|
||||
|
|
|
@ -60,18 +60,16 @@ func (b *BatchBuilder) WithoutDisabledFeeds() *BatchBuilder {
|
|||
}
|
||||
|
||||
func (b *BatchBuilder) FetchJobs() (jobs model.JobList, err error) {
|
||||
var parts []string
|
||||
parts = append(parts, `SELECT id, user_id FROM feeds`)
|
||||
query := `SELECT id, user_id FROM feeds`
|
||||
|
||||
if len(b.conditions) > 0 {
|
||||
parts = append(parts, fmt.Sprintf("WHERE %s", strings.Join(b.conditions, " AND ")))
|
||||
query += fmt.Sprintf(" WHERE %s", strings.Join(b.conditions, " AND "))
|
||||
}
|
||||
|
||||
if b.limit > 0 {
|
||||
parts = append(parts, fmt.Sprintf("ORDER BY next_check_at ASC LIMIT %d", b.limit))
|
||||
query += fmt.Sprintf(" ORDER BY next_check_at ASC LIMIT %d", b.limit)
|
||||
}
|
||||
|
||||
query := strings.Join(parts, " ")
|
||||
rows, err := b.db.Query(query, b.args...)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf(`store: unable to fetch batch of jobs: %v`, err)
|
||||
|
|
|
@ -133,12 +133,12 @@ func (s *Storage) CategoriesWithFeedCount(userID int64) (model.Categories, error
|
|||
`
|
||||
|
||||
if user.CategoriesSortingOrder == "alphabetical" {
|
||||
query = query + `
|
||||
query += `
|
||||
ORDER BY
|
||||
c.title ASC
|
||||
`
|
||||
} else {
|
||||
query = query + `
|
||||
query += `
|
||||
ORDER BY
|
||||
count_unread DESC,
|
||||
c.title ASC
|
||||
|
@ -255,14 +255,14 @@ func (s *Storage) RemoveAndReplaceCategoriesByName(userid int64, titles []string
|
|||
}
|
||||
|
||||
query = `
|
||||
WITH d_cats AS (SELECT id FROM categories WHERE user_id = $1 AND title = ANY($2))
|
||||
UPDATE feeds
|
||||
SET category_id =
|
||||
(SELECT id
|
||||
FROM categories
|
||||
WHERE user_id = $1 AND id NOT IN (SELECT id FROM d_cats)
|
||||
ORDER BY title ASC
|
||||
LIMIT 1)
|
||||
WITH d_cats AS (SELECT id FROM categories WHERE user_id = $1 AND title = ANY($2))
|
||||
UPDATE feeds
|
||||
SET category_id =
|
||||
(SELECT id
|
||||
FROM categories
|
||||
WHERE user_id = $1 AND id NOT IN (SELECT id FROM d_cats)
|
||||
ORDER BY title ASC
|
||||
LIMIT 1)
|
||||
WHERE user_id = $1 AND category_id IN (SELECT id FROM d_cats)
|
||||
`
|
||||
_, err = tx.Exec(query, userid, titleParam)
|
||||
|
|
|
@ -8,6 +8,8 @@ import (
|
|||
"errors"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"miniflux.app/v2/internal/crypto"
|
||||
|
@ -138,7 +140,7 @@ func (s *Storage) createEntry(tx *sql.Tx, entry *model.Entry) error {
|
|||
entry.UserID,
|
||||
entry.FeedID,
|
||||
entry.ReadingTime,
|
||||
pq.Array(removeDuplicates(entry.Tags)),
|
||||
pq.Array(removeEmpty(removeDuplicates(entry.Tags))),
|
||||
).Scan(
|
||||
&entry.ID,
|
||||
&entry.Status,
|
||||
|
@ -194,7 +196,7 @@ func (s *Storage) updateEntry(tx *sql.Tx, entry *model.Entry) error {
|
|||
entry.UserID,
|
||||
entry.FeedID,
|
||||
entry.Hash,
|
||||
pq.Array(removeDuplicates(entry.Tags)),
|
||||
pq.Array(removeEmpty(removeDuplicates(entry.Tags))),
|
||||
).Scan(&entry.ID)
|
||||
|
||||
if err != nil {
|
||||
|
@ -615,15 +617,17 @@ func (s *Storage) UnshareEntry(userID int64, entryID int64) (err error) {
|
|||
return
|
||||
}
|
||||
|
||||
// removeDuplicate removes duplicate entries from a slice
|
||||
func removeDuplicates[T string | int](sliceList []T) []T {
|
||||
allKeys := make(map[T]bool)
|
||||
list := []T{}
|
||||
for _, item := range sliceList {
|
||||
if _, value := allKeys[item]; !value {
|
||||
allKeys[item] = true
|
||||
list = append(list, item)
|
||||
func removeDuplicates(l []string) []string {
|
||||
slices.Sort(l)
|
||||
return slices.Compact(l)
|
||||
}
|
||||
|
||||
func removeEmpty(l []string) []string {
|
||||
var finalSlice []string
|
||||
for _, item := range l {
|
||||
if strings.TrimSpace(item) != "" {
|
||||
finalSlice = append(finalSlice, item)
|
||||
}
|
||||
}
|
||||
return list
|
||||
return finalSlice
|
||||
}
|
||||
|
|
|
@ -439,21 +439,21 @@ func (e *EntryQueryBuilder) buildCondition() string {
|
|||
}
|
||||
|
||||
func (e *EntryQueryBuilder) buildSorting() string {
|
||||
var parts []string
|
||||
var parts string
|
||||
|
||||
if len(e.sortExpressions) > 0 {
|
||||
parts = append(parts, fmt.Sprintf(`ORDER BY %s`, strings.Join(e.sortExpressions, ", ")))
|
||||
parts += fmt.Sprintf(" ORDER BY %s", strings.Join(e.sortExpressions, ", "))
|
||||
}
|
||||
|
||||
if e.limit > 0 {
|
||||
parts = append(parts, fmt.Sprintf(`LIMIT %d`, e.limit))
|
||||
parts += fmt.Sprintf(" LIMIT %d", e.limit)
|
||||
}
|
||||
|
||||
if e.offset > 0 {
|
||||
parts = append(parts, fmt.Sprintf(`OFFSET %d`, e.offset))
|
||||
parts += fmt.Sprintf(" OFFSET %d", e.offset)
|
||||
}
|
||||
|
||||
return strings.Join(parts, " ")
|
||||
return parts
|
||||
}
|
||||
|
||||
// NewEntryQueryBuilder returns a new EntryQueryBuilder.
|
||||
|
|
|
@ -91,25 +91,25 @@ func (f *FeedQueryBuilder) buildCounterCondition() string {
|
|||
}
|
||||
|
||||
func (f *FeedQueryBuilder) buildSorting() string {
|
||||
var parts []string
|
||||
var parts string
|
||||
|
||||
if len(f.sortExpressions) > 0 {
|
||||
parts = append(parts, fmt.Sprintf(`ORDER BY %s`, strings.Join(f.sortExpressions, ", ")))
|
||||
parts += fmt.Sprintf(" ORDER BY %s", strings.Join(f.sortExpressions, ", "))
|
||||
}
|
||||
|
||||
if len(parts) > 0 {
|
||||
parts = append(parts, ", lower(f.title) ASC")
|
||||
parts += ", lower(f.title) ASC"
|
||||
}
|
||||
|
||||
if f.limit > 0 {
|
||||
parts = append(parts, fmt.Sprintf(`LIMIT %d`, f.limit))
|
||||
parts += fmt.Sprintf(" LIMIT %d", f.limit)
|
||||
}
|
||||
|
||||
if f.offset > 0 {
|
||||
parts = append(parts, fmt.Sprintf(`OFFSET %d`, f.offset))
|
||||
parts += fmt.Sprintf(" OFFSET %d", f.offset)
|
||||
}
|
||||
|
||||
return strings.Join(parts, " ")
|
||||
return parts
|
||||
}
|
||||
|
||||
// GetFeed returns a single feed that match the condition.
|
||||
|
|
|
@ -91,7 +91,8 @@ func (s *Storage) CreateUser(userCreationRequest *model.UserCreationRequest) (*m
|
|||
cjk_reading_speed,
|
||||
default_home_page,
|
||||
categories_sorting_order,
|
||||
mark_read_on_view
|
||||
mark_read_on_view,
|
||||
media_playback_rate
|
||||
`
|
||||
|
||||
tx, err := s.db.Begin()
|
||||
|
@ -130,6 +131,7 @@ func (s *Storage) CreateUser(userCreationRequest *model.UserCreationRequest) (*m
|
|||
&user.DefaultHomePage,
|
||||
&user.CategoriesSortingOrder,
|
||||
&user.MarkReadOnView,
|
||||
&user.MediaPlaybackRate,
|
||||
)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
|
@ -186,9 +188,10 @@ func (s *Storage) UpdateUser(user *model.User) error {
|
|||
cjk_reading_speed=$19,
|
||||
default_home_page=$20,
|
||||
categories_sorting_order=$21,
|
||||
mark_read_on_view=$22
|
||||
mark_read_on_view=$22,
|
||||
media_playback_rate=$23
|
||||
WHERE
|
||||
id=$23
|
||||
id=$24
|
||||
`
|
||||
|
||||
_, err = s.db.Exec(
|
||||
|
@ -215,6 +218,7 @@ func (s *Storage) UpdateUser(user *model.User) error {
|
|||
user.DefaultHomePage,
|
||||
user.CategoriesSortingOrder,
|
||||
user.MarkReadOnView,
|
||||
user.MediaPlaybackRate,
|
||||
user.ID,
|
||||
)
|
||||
if err != nil {
|
||||
|
@ -243,9 +247,10 @@ func (s *Storage) UpdateUser(user *model.User) error {
|
|||
cjk_reading_speed=$18,
|
||||
default_home_page=$19,
|
||||
categories_sorting_order=$20,
|
||||
mark_read_on_view=$21
|
||||
mark_read_on_view=$21,
|
||||
media_playback_rate=$22
|
||||
WHERE
|
||||
id=$22
|
||||
id=$23
|
||||
`
|
||||
|
||||
_, err := s.db.Exec(
|
||||
|
@ -271,6 +276,7 @@ func (s *Storage) UpdateUser(user *model.User) error {
|
|||
user.DefaultHomePage,
|
||||
user.CategoriesSortingOrder,
|
||||
user.MarkReadOnView,
|
||||
user.MediaPlaybackRate,
|
||||
user.ID,
|
||||
)
|
||||
|
||||
|
@ -318,7 +324,8 @@ func (s *Storage) UserByID(userID int64) (*model.User, error) {
|
|||
cjk_reading_speed,
|
||||
default_home_page,
|
||||
categories_sorting_order,
|
||||
mark_read_on_view
|
||||
mark_read_on_view,
|
||||
media_playback_rate
|
||||
FROM
|
||||
users
|
||||
WHERE
|
||||
|
@ -353,7 +360,8 @@ func (s *Storage) UserByUsername(username string) (*model.User, error) {
|
|||
cjk_reading_speed,
|
||||
default_home_page,
|
||||
categories_sorting_order,
|
||||
mark_read_on_view
|
||||
mark_read_on_view,
|
||||
media_playback_rate
|
||||
FROM
|
||||
users
|
||||
WHERE
|
||||
|
@ -388,7 +396,8 @@ func (s *Storage) UserByField(field, value string) (*model.User, error) {
|
|||
cjk_reading_speed,
|
||||
default_home_page,
|
||||
categories_sorting_order,
|
||||
mark_read_on_view
|
||||
mark_read_on_view,
|
||||
media_playback_rate
|
||||
FROM
|
||||
users
|
||||
WHERE
|
||||
|
@ -430,7 +439,8 @@ func (s *Storage) UserByAPIKey(token string) (*model.User, error) {
|
|||
u.cjk_reading_speed,
|
||||
u.default_home_page,
|
||||
u.categories_sorting_order,
|
||||
u.mark_read_on_view
|
||||
u.mark_read_on_view,
|
||||
media_playback_rate
|
||||
FROM
|
||||
users u
|
||||
LEFT JOIN
|
||||
|
@ -467,6 +477,7 @@ func (s *Storage) fetchUser(query string, args ...interface{}) (*model.User, err
|
|||
&user.DefaultHomePage,
|
||||
&user.CategoriesSortingOrder,
|
||||
&user.MarkReadOnView,
|
||||
&user.MediaPlaybackRate,
|
||||
)
|
||||
|
||||
if err == sql.ErrNoRows {
|
||||
|
@ -574,7 +585,8 @@ func (s *Storage) Users() (model.Users, error) {
|
|||
cjk_reading_speed,
|
||||
default_home_page,
|
||||
categories_sorting_order,
|
||||
mark_read_on_view
|
||||
mark_read_on_view,
|
||||
media_playback_rate
|
||||
FROM
|
||||
users
|
||||
ORDER BY username ASC
|
||||
|
@ -612,6 +624,7 @@ func (s *Storage) Users() (model.Users, error) {
|
|||
&user.DefaultHomePage,
|
||||
&user.CategoriesSortingOrder,
|
||||
&user.MarkReadOnView,
|
||||
&user.MediaPlaybackRate,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
|
|
|
@ -16,8 +16,8 @@ import (
|
|||
"miniflux.app/v2/internal/crypto"
|
||||
"miniflux.app/v2/internal/http/route"
|
||||
"miniflux.app/v2/internal/locale"
|
||||
"miniflux.app/v2/internal/mediaproxy"
|
||||
"miniflux.app/v2/internal/model"
|
||||
"miniflux.app/v2/internal/proxy"
|
||||
"miniflux.app/v2/internal/timezone"
|
||||
"miniflux.app/v2/internal/urllib"
|
||||
|
||||
|
@ -57,19 +57,19 @@ func (f *funcMap) Map() template.FuncMap {
|
|||
return template.HTML(str)
|
||||
},
|
||||
"proxyFilter": func(data string) string {
|
||||
return proxy.ProxyRewriter(f.router, data)
|
||||
return mediaproxy.RewriteDocumentWithRelativeProxyURL(f.router, data)
|
||||
},
|
||||
"proxyURL": func(link string) string {
|
||||
proxyOption := config.Opts.ProxyOption()
|
||||
mediaProxyMode := config.Opts.MediaProxyMode()
|
||||
|
||||
if proxyOption == "all" || (proxyOption != "none" && !urllib.IsHTTPS(link)) {
|
||||
return proxy.ProxifyURL(f.router, link)
|
||||
if mediaProxyMode == "all" || (mediaProxyMode != "none" && !urllib.IsHTTPS(link)) {
|
||||
return mediaproxy.ProxifyRelativeURL(f.router, link)
|
||||
}
|
||||
|
||||
return link
|
||||
},
|
||||
"mustBeProxyfied": func(mediaType string) bool {
|
||||
return slices.Contains(config.Opts.ProxyMediaTypes(), mediaType)
|
||||
return slices.Contains(config.Opts.MediaProxyResourceTypes(), mediaType)
|
||||
},
|
||||
"domain": urllib.Domain,
|
||||
"hasPrefix": strings.HasPrefix,
|
||||
|
|
|
@ -36,10 +36,10 @@
|
|||
|
||||
{{ if and .user .user.Stylesheet }}
|
||||
{{ $stylesheetNonce := nonce }}
|
||||
<meta http-equiv="Content-Security-Policy" content="default-src 'self'; img-src * data:; media-src *; frame-src *; style-src 'self' 'nonce-{{ $stylesheetNonce }}'">
|
||||
<meta http-equiv="Content-Security-Policy" content="default-src 'self'; img-src * data:; media-src *; frame-src *; style-src 'self' 'nonce-{{ $stylesheetNonce }}'; require-trusted-types-for 'script'; trusted-types ttpolicy;">
|
||||
<style nonce="{{ $stylesheetNonce }}">{{ .user.Stylesheet | safeCSS }}</style>
|
||||
{{ else }}
|
||||
<meta http-equiv="Content-Security-Policy" content="default-src 'self'; img-src * data:; media-src *; frame-src *">
|
||||
<meta http-equiv="Content-Security-Policy" content="default-src 'self'; img-src * data:; media-src *; frame-src *; require-trusted-types-for 'script'; trusted-types ttpolicy;">
|
||||
{{ end }}
|
||||
|
||||
<script src="{{ route "javascript" "name" "app" "checksum" .app_js_checksum }}" defer></script>
|
||||
|
@ -154,6 +154,8 @@
|
|||
<li>{{ t "page.keyboard_shortcuts.go_to_previous_item" }} = <strong>p</strong>, <strong>k</strong>, <strong>⏴</strong></li>
|
||||
<li>{{ t "page.keyboard_shortcuts.go_to_next_item" }} = <strong>n</strong>, <strong>j</strong>, <strong>⏵</strong></li>
|
||||
<li>{{ t "page.keyboard_shortcuts.go_to_feed" }} = <strong>F</strong></li>
|
||||
<li>{{ t "page.keyboard_shortcuts.go_to_top_item" }} = <strong>g + g</strong></li>
|
||||
<li>{{ t "page.keyboard_shortcuts.go_to_bottom_item" }} = <strong>G</strong></li>
|
||||
</ul>
|
||||
|
||||
<p>{{ t "page.keyboard_shortcuts.subtitle.pages" }}</p>
|
||||
|
|
|
@ -172,6 +172,7 @@
|
|||
<div class="enclosure-audio" >
|
||||
<audio controls preload="metadata"
|
||||
data-last-position="{{ .MediaProgression }}"
|
||||
{{ if $.user.MediaPlaybackRate }}data-playback-rate="{{ $.user.MediaPlaybackRate }}"{{ end }}
|
||||
data-save-url="{{ route "saveEnclosureProgression" "enclosureID" .ID }}"
|
||||
>
|
||||
{{ if (and $.user (mustBeProxyfied "audio")) }}
|
||||
|
@ -185,6 +186,7 @@
|
|||
<div class="enclosure-video">
|
||||
<video controls preload="metadata"
|
||||
data-last-position="{{ .MediaProgression }}"
|
||||
{{ if $.user.MediaPlaybackRate }}data-playback-rate="{{ $.user.MediaPlaybackRate }}"{{ end }}
|
||||
data-save-url="{{ route "saveEnclosureProgression" "enclosureID" .ID }}"
|
||||
>
|
||||
{{ if (and $.user (mustBeProxyfied "video")) }}
|
||||
|
@ -214,6 +216,7 @@
|
|||
<div class="enclosure-audio">
|
||||
<audio controls preload="metadata"
|
||||
data-last-position="{{ .MediaProgression }}"
|
||||
{{ if $.user.MediaPlaybackRate }}data-playback-rate="{{ $.user.MediaPlaybackRate }}"{{ end }}
|
||||
data-save-url="{{ route "saveEnclosureProgression" "enclosureID" .ID }}"
|
||||
>
|
||||
{{ if (and $.user (mustBeProxyfied "audio")) }}
|
||||
|
@ -227,6 +230,7 @@
|
|||
<div class="enclosure-video">
|
||||
<video controls preload="metadata"
|
||||
data-last-position="{{ .MediaProgression }}"
|
||||
{{ if $.user.MediaPlaybackRate }}data-playback-rate="{{ $.user.MediaPlaybackRate }}"{{ end }}
|
||||
data-save-url="{{ route "saveEnclosureProgression" "enclosureID" .ID }}"
|
||||
>
|
||||
{{ if (and $.user (mustBeProxyfied "video")) }}
|
||||
|
|
|
@ -108,6 +108,9 @@
|
|||
<label for="form-default-reading-speed">{{ t "form.prefs.label.default_reading_speed" }}</label>
|
||||
<input type="number" name="default_reading_speed" id="form-default-reading-speed" value="{{ .form.DefaultReadingSpeed }}" min="1">
|
||||
|
||||
<label for="form-media-playback-rate">{{ t "form.prefs.label.media_playback_rate" }}</label>
|
||||
<input type="number" name="media_playback_rate" id="form-media-playback-rate" value="{{ .form.MediaPlaybackRate }}" min="0.25" max="4" step="any" />
|
||||
|
||||
<label><input type="checkbox" name="show_reading_time" value="1" {{ if .form.ShowReadingTime }}checked{{ end }}> {{ t "form.prefs.label.show_reading_time" }}</label>
|
||||
|
||||
<label><input type="checkbox" name="mark_read_on_view" value="1" {{ if .form.MarkReadOnView }}checked{{ end }}> {{ t "form.prefs.label.mark_read_on_view" }}</label>
|
||||
|
|
|
@ -1,196 +0,0 @@
|
|||
// SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
//go:build integration
|
||||
// +build integration
|
||||
|
||||
package tests
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
miniflux "miniflux.app/v2/client"
|
||||
)
|
||||
|
||||
func TestCreateCategory(t *testing.T) {
|
||||
categoryName := "My category"
|
||||
client := createClient(t)
|
||||
category, err := client.CreateCategory(categoryName)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if category.ID == 0 {
|
||||
t.Fatalf(`Invalid categoryID, got "%v"`, category.ID)
|
||||
}
|
||||
|
||||
if category.UserID <= 0 {
|
||||
t.Fatalf(`Invalid userID, got "%v"`, category.UserID)
|
||||
}
|
||||
|
||||
if category.Title != categoryName {
|
||||
t.Fatalf(`Invalid title, got "%v" instead of "%v"`, category.Title, categoryName)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCreateCategoryWithEmptyTitle(t *testing.T) {
|
||||
client := createClient(t)
|
||||
_, err := client.CreateCategory("")
|
||||
if err == nil {
|
||||
t.Fatal(`The category title should be mandatory`)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCannotCreateDuplicatedCategory(t *testing.T) {
|
||||
client := createClient(t)
|
||||
|
||||
categoryName := "My category"
|
||||
_, err := client.CreateCategory(categoryName)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
_, err = client.CreateCategory(categoryName)
|
||||
if err == nil {
|
||||
t.Fatal(`Duplicated categories should not be allowed`)
|
||||
}
|
||||
}
|
||||
|
||||
func TestUpdateCategory(t *testing.T) {
|
||||
categoryName := "My category"
|
||||
client := createClient(t)
|
||||
category, err := client.CreateCategory(categoryName)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
categoryName = "Updated category"
|
||||
category, err = client.UpdateCategory(category.ID, categoryName)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if category.ID == 0 {
|
||||
t.Fatalf(`Invalid categoryID, got "%v"`, category.ID)
|
||||
}
|
||||
|
||||
if category.UserID <= 0 {
|
||||
t.Fatalf(`Invalid userID, got "%v"`, category.UserID)
|
||||
}
|
||||
|
||||
if category.Title != categoryName {
|
||||
t.Fatalf(`Invalid title, got %q instead of %q`, category.Title, categoryName)
|
||||
}
|
||||
}
|
||||
|
||||
func TestUpdateInexistingCategory(t *testing.T) {
|
||||
client := createClient(t)
|
||||
|
||||
_, err := client.UpdateCategory(4200000, "Test")
|
||||
if err != miniflux.ErrNotFound {
|
||||
t.Errorf(`Updating an inexisting category should returns a 404 instead of %v`, err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestMarkCategoryAsRead(t *testing.T) {
|
||||
client := createClient(t)
|
||||
|
||||
feed, category := createFeed(t, client)
|
||||
|
||||
results, err := client.FeedEntries(feed.ID, nil)
|
||||
if err != nil {
|
||||
t.Fatalf(`Failed to get entries: %v`, err)
|
||||
}
|
||||
if results.Total == 0 {
|
||||
t.Fatalf(`Invalid number of entries: %d`, results.Total)
|
||||
}
|
||||
if results.Entries[0].Status != miniflux.EntryStatusUnread {
|
||||
t.Fatalf(`Invalid entry status, got %q instead of %q`, results.Entries[0].Status, miniflux.EntryStatusUnread)
|
||||
}
|
||||
|
||||
if err := client.MarkCategoryAsRead(category.ID); err != nil {
|
||||
t.Fatalf(`Failed to mark category as read: %v`, err)
|
||||
}
|
||||
|
||||
results, err = client.FeedEntries(feed.ID, nil)
|
||||
if err != nil {
|
||||
t.Fatalf(`Failed to get updated entries: %v`, err)
|
||||
}
|
||||
|
||||
for _, entry := range results.Entries {
|
||||
if entry.Status != miniflux.EntryStatusRead {
|
||||
t.Errorf(`Status for entry %d was %q instead of %q`, entry.ID, entry.Status, miniflux.EntryStatusRead)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestListCategories(t *testing.T) {
|
||||
categoryName := "My category"
|
||||
client := createClient(t)
|
||||
|
||||
_, err := client.CreateCategory(categoryName)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
categories, err := client.Categories()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if len(categories) != 2 {
|
||||
t.Fatalf(`Invalid number of categories, got "%v" instead of "%v"`, len(categories), 2)
|
||||
}
|
||||
|
||||
if categories[0].ID == 0 {
|
||||
t.Fatalf(`Invalid categoryID, got "%v"`, categories[0].ID)
|
||||
}
|
||||
|
||||
if categories[0].UserID <= 0 {
|
||||
t.Fatalf(`Invalid userID, got "%v"`, categories[0].UserID)
|
||||
}
|
||||
|
||||
if categories[0].Title != "All" {
|
||||
t.Fatalf(`Invalid title, got "%v" instead of "%v"`, categories[0].Title, "All")
|
||||
}
|
||||
|
||||
if categories[1].ID == 0 {
|
||||
t.Fatalf(`Invalid categoryID, got "%v"`, categories[0].ID)
|
||||
}
|
||||
|
||||
if categories[1].UserID <= 0 {
|
||||
t.Fatalf(`Invalid userID, got "%v"`, categories[1].UserID)
|
||||
}
|
||||
|
||||
if categories[1].Title != categoryName {
|
||||
t.Fatalf(`Invalid title, got "%v" instead of "%v"`, categories[1].Title, categoryName)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDeleteCategory(t *testing.T) {
|
||||
client := createClient(t)
|
||||
|
||||
category, err := client.CreateCategory("My category")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
err = client.DeleteCategory(category.ID)
|
||||
if err != nil {
|
||||
t.Fatal(`Removing a category should not raise any error`)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCannotDeleteCategoryOfAnotherUser(t *testing.T) {
|
||||
client := createClient(t)
|
||||
categories, err := client.Categories()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
client = createClient(t)
|
||||
err = client.DeleteCategory(categories[0].ID)
|
||||
if err == nil {
|
||||
t.Fatal(`Removing a category that belongs to another user should be forbidden`)
|
||||
}
|
||||
}
|
|
@ -1,21 +0,0 @@
|
|||
// SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
//go:build integration
|
||||
// +build integration
|
||||
|
||||
package tests
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
miniflux "miniflux.app/v2/client"
|
||||
)
|
||||
|
||||
func TestWithBadEndpoint(t *testing.T) {
|
||||
client := miniflux.New("bad url", testAdminUsername, testAdminPassword)
|
||||
_, err := client.Users()
|
||||
if err == nil {
|
||||
t.Fatal(`Using a bad URL should raise an error`)
|
||||
}
|
||||
}
|
|
@ -1,517 +0,0 @@
|
|||
// SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
//go:build integration
|
||||
// +build integration
|
||||
|
||||
package tests
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
miniflux "miniflux.app/v2/client"
|
||||
)
|
||||
|
||||
func TestGetAllFeedEntries(t *testing.T) {
|
||||
client := createClient(t)
|
||||
feed, _ := createFeed(t, client)
|
||||
|
||||
allResults, err := client.FeedEntries(feed.ID, nil)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if allResults.Total == 0 {
|
||||
t.Fatal(`Invalid number of entries`)
|
||||
}
|
||||
|
||||
if allResults.Entries[0].Title == "" {
|
||||
t.Fatal(`Invalid entry title`)
|
||||
}
|
||||
|
||||
filteredResults, err := client.FeedEntries(feed.ID, &miniflux.Filter{Limit: 1, Offset: 5})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if allResults.Total != filteredResults.Total {
|
||||
t.Fatal(`Total should always contains the total number of items regardless of filters`)
|
||||
}
|
||||
|
||||
if allResults.Entries[0].ID == filteredResults.Entries[0].ID {
|
||||
t.Fatal(`Filtered entries should be different than previous results`)
|
||||
}
|
||||
|
||||
filteredResultsByEntryID, err := client.FeedEntries(feed.ID, &miniflux.Filter{AfterEntryID: allResults.Entries[0].ID})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if filteredResultsByEntryID.Entries[0].ID == allResults.Entries[0].ID {
|
||||
t.Fatal(`The first entry should be filtered out`)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetAllCategoryEntries(t *testing.T) {
|
||||
client := createClient(t)
|
||||
_, category := createFeed(t, client)
|
||||
|
||||
allResults, err := client.CategoryEntries(category.ID, nil)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if allResults.Total == 0 {
|
||||
t.Fatal(`Invalid number of entries`)
|
||||
}
|
||||
|
||||
if allResults.Entries[0].Title == "" {
|
||||
t.Fatal(`Invalid entry title`)
|
||||
}
|
||||
|
||||
filteredResults, err := client.CategoryEntries(category.ID, &miniflux.Filter{Limit: 1, Offset: 5})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if allResults.Total != filteredResults.Total {
|
||||
t.Fatal(`Total should always contains the total number of items regardless of filters`)
|
||||
}
|
||||
|
||||
if allResults.Entries[0].ID == filteredResults.Entries[0].ID {
|
||||
t.Fatal(`Filtered entries should be different than previous results`)
|
||||
}
|
||||
|
||||
filteredResultsByEntryID, err := client.CategoryEntries(category.ID, &miniflux.Filter{AfterEntryID: allResults.Entries[0].ID})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if filteredResultsByEntryID.Entries[0].ID == allResults.Entries[0].ID {
|
||||
t.Fatal(`The first entry should be filtered out`)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetAllEntries(t *testing.T) {
|
||||
client := createClient(t)
|
||||
createFeed(t, client)
|
||||
|
||||
resultWithoutSorting, err := client.Entries(nil)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if resultWithoutSorting.Total == 0 {
|
||||
t.Fatal(`Invalid number of entries`)
|
||||
}
|
||||
|
||||
resultWithStatusFilter, err := client.Entries(&miniflux.Filter{Status: miniflux.EntryStatusRead})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if resultWithStatusFilter.Total != 0 {
|
||||
t.Fatal(`We should have 0 read entries`)
|
||||
}
|
||||
|
||||
resultWithDifferentSorting, err := client.Entries(&miniflux.Filter{Order: "published_at", Direction: "desc"})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if resultWithDifferentSorting.Entries[0].Title == resultWithoutSorting.Entries[0].Title {
|
||||
t.Fatalf(`The items should be sorted differently "%v" vs "%v"`, resultWithDifferentSorting.Entries[0].Title, resultWithoutSorting.Entries[0].Title)
|
||||
}
|
||||
|
||||
resultWithStarredEntries, err := client.Entries(&miniflux.Filter{Starred: miniflux.FilterOnlyStarred})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if resultWithStarredEntries.Total != 0 {
|
||||
t.Fatalf(`We are not supposed to have starred entries yet`)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFilterEntriesByCategory(t *testing.T) {
|
||||
client := createClient(t)
|
||||
category, err := client.CreateCategory("Test Filter by Category")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
feedID, err := client.CreateFeed(&miniflux.FeedCreationRequest{
|
||||
FeedURL: testFeedURL,
|
||||
CategoryID: category.ID,
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feedID == 0 {
|
||||
t.Fatalf(`Invalid feed ID, got %q`, feedID)
|
||||
}
|
||||
|
||||
results, err := client.Entries(&miniflux.Filter{CategoryID: category.ID})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if results.Total == 0 {
|
||||
t.Fatalf(`We should have more than one entry`)
|
||||
}
|
||||
|
||||
if results.Entries[0].Feed.Category == nil {
|
||||
t.Fatalf(`The entry feed category should not be nil`)
|
||||
}
|
||||
|
||||
if results.Entries[0].Feed.Category.ID != category.ID {
|
||||
t.Errorf(`Entries should be filtered by category_id=%d`, category.ID)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFilterEntriesByFeed(t *testing.T) {
|
||||
client := createClient(t)
|
||||
category, err := client.CreateCategory("Test Filter by Feed")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
feedID, err := client.CreateFeed(&miniflux.FeedCreationRequest{
|
||||
FeedURL: testFeedURL,
|
||||
CategoryID: category.ID,
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feedID == 0 {
|
||||
t.Fatalf(`Invalid feed ID, got %q`, feedID)
|
||||
}
|
||||
|
||||
results, err := client.Entries(&miniflux.Filter{FeedID: feedID})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if results.Total == 0 {
|
||||
t.Fatalf(`We should have more than one entry`)
|
||||
}
|
||||
|
||||
if results.Entries[0].Feed.Category == nil {
|
||||
t.Fatalf(`The entry feed category should not be nil`)
|
||||
}
|
||||
|
||||
if results.Entries[0].Feed.Category.ID != category.ID {
|
||||
t.Errorf(`Entries should be filtered by category_id=%d`, category.ID)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFilterEntriesByStatuses(t *testing.T) {
|
||||
client := createClient(t)
|
||||
category, err := client.CreateCategory("Test Filter by statuses")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
feedID, err := client.CreateFeed(&miniflux.FeedCreationRequest{
|
||||
FeedURL: testFeedURL,
|
||||
CategoryID: category.ID,
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feedID == 0 {
|
||||
t.Fatalf(`Invalid feed ID, got %q`, feedID)
|
||||
}
|
||||
|
||||
results, err := client.Entries(&miniflux.Filter{FeedID: feedID})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if err := client.UpdateEntries([]int64{results.Entries[0].ID}, miniflux.EntryStatusRead); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if err := client.UpdateEntries([]int64{results.Entries[1].ID}, miniflux.EntryStatusRemoved); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
results, err = client.Entries(&miniflux.Filter{Statuses: []string{miniflux.EntryStatusRead, miniflux.EntryStatusRemoved}})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if results.Total != 2 {
|
||||
t.Fatalf(`We should have 2 entries`)
|
||||
}
|
||||
|
||||
if results.Entries[0].Status != "read" {
|
||||
t.Errorf(`The first entry has the wrong status: %s`, results.Entries[0].Status)
|
||||
}
|
||||
|
||||
if results.Entries[1].Status != "removed" {
|
||||
t.Errorf(`The 2nd entry has the wrong status: %s`, results.Entries[1].Status)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSearchEntries(t *testing.T) {
|
||||
client := createClient(t)
|
||||
categories, err := client.Categories()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
feedID, err := client.CreateFeed(&miniflux.FeedCreationRequest{
|
||||
FeedURL: testFeedURL,
|
||||
CategoryID: categories[0].ID,
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feedID == 0 {
|
||||
t.Fatalf(`Invalid feed ID, got %q`, feedID)
|
||||
}
|
||||
|
||||
results, err := client.Entries(&miniflux.Filter{Search: "2.0.8"})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if results.Total != 1 {
|
||||
t.Fatalf(`We should have only one entry instead of %d`, results.Total)
|
||||
}
|
||||
}
|
||||
|
||||
func TestInvalidFilters(t *testing.T) {
|
||||
client := createClient(t)
|
||||
createFeed(t, client)
|
||||
|
||||
_, err := client.Entries(&miniflux.Filter{Status: "invalid"})
|
||||
if err == nil {
|
||||
t.Fatal(`Using invalid status should raise an error`)
|
||||
}
|
||||
|
||||
_, err = client.Entries(&miniflux.Filter{Direction: "invalid"})
|
||||
if err == nil {
|
||||
t.Fatal(`Using invalid direction should raise an error`)
|
||||
}
|
||||
|
||||
_, err = client.Entries(&miniflux.Filter{Order: "invalid"})
|
||||
if err == nil {
|
||||
t.Fatal(`Using invalid order should raise an error`)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetFeedEntry(t *testing.T) {
|
||||
client := createClient(t)
|
||||
createFeed(t, client)
|
||||
|
||||
result, err := client.Entries(&miniflux.Filter{Limit: 1})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Test get entry by entry id and feed id
|
||||
entry, err := client.FeedEntry(result.Entries[0].FeedID, result.Entries[0].ID)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if entry.ID != result.Entries[0].ID {
|
||||
t.Fatal("Wrong entry returned")
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetCategoryEntry(t *testing.T) {
|
||||
client := createClient(t)
|
||||
_, category := createFeed(t, client)
|
||||
|
||||
result, err := client.Entries(&miniflux.Filter{Limit: 1})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Test get entry by entry id and category id
|
||||
entry, err := client.CategoryEntry(category.ID, result.Entries[0].ID)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if entry.ID != result.Entries[0].ID {
|
||||
t.Fatal("Wrong entry returned")
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetEntry(t *testing.T) {
|
||||
client := createClient(t)
|
||||
createFeed(t, client)
|
||||
|
||||
result, err := client.Entries(&miniflux.Filter{Limit: 1})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Test get entry by entry id only
|
||||
entry, err := client.Entry(result.Entries[0].ID)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if entry.ID != result.Entries[0].ID {
|
||||
t.Fatal("Wrong entry returned")
|
||||
}
|
||||
}
|
||||
|
||||
func TestUpdateStatus(t *testing.T) {
|
||||
client := createClient(t)
|
||||
createFeed(t, client)
|
||||
|
||||
result, err := client.Entries(&miniflux.Filter{Limit: 1})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
err = client.UpdateEntries([]int64{result.Entries[0].ID}, miniflux.EntryStatusRead)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
entry, err := client.Entry(result.Entries[0].ID)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if entry.Status != miniflux.EntryStatusRead {
|
||||
t.Fatal("The entry status should be updated")
|
||||
}
|
||||
|
||||
err = client.UpdateEntries([]int64{result.Entries[0].ID}, "invalid")
|
||||
if err == nil {
|
||||
t.Fatal(`Invalid entry status should not be accepted`)
|
||||
}
|
||||
|
||||
err = client.UpdateEntries([]int64{}, miniflux.EntryStatusRead)
|
||||
if err == nil {
|
||||
t.Fatal(`An empty list of entry should not be accepted`)
|
||||
}
|
||||
}
|
||||
|
||||
func TestUpdateEntry(t *testing.T) {
|
||||
client := createClient(t)
|
||||
createFeed(t, client)
|
||||
|
||||
result, err := client.Entries(&miniflux.Filter{Limit: 1})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
title := "New title"
|
||||
content := "New content"
|
||||
|
||||
_, err = client.UpdateEntry(result.Entries[0].ID, &miniflux.EntryModificationRequest{
|
||||
Title: &title,
|
||||
Content: &content,
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
entry, err := client.Entry(result.Entries[0].ID)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if entry.Title != title {
|
||||
t.Fatal("The entry title should be updated")
|
||||
}
|
||||
|
||||
if entry.Content != content {
|
||||
t.Fatal("The entry content should be updated")
|
||||
}
|
||||
}
|
||||
|
||||
func TestToggleBookmark(t *testing.T) {
|
||||
client := createClient(t)
|
||||
createFeed(t, client)
|
||||
|
||||
result, err := client.Entries(&miniflux.Filter{Limit: 1})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if result.Entries[0].Starred {
|
||||
t.Fatal("The entry should not be starred")
|
||||
}
|
||||
|
||||
err = client.ToggleBookmark(result.Entries[0].ID)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
entry, err := client.Entry(result.Entries[0].ID)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if !entry.Starred {
|
||||
t.Fatal("The entry should be starred")
|
||||
}
|
||||
}
|
||||
|
||||
func TestHistoryOrder(t *testing.T) {
|
||||
client := createClient(t)
|
||||
createFeed(t, client)
|
||||
|
||||
result, err := client.Entries(&miniflux.Filter{Limit: 3})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
selectedEntryID := result.Entries[2].ID
|
||||
|
||||
err = client.UpdateEntries([]int64{selectedEntryID}, miniflux.EntryStatusRead)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
history, err := client.Entries(&miniflux.Filter{Order: "changed_at", Direction: "desc", Limit: 1})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if history.Entries[0].ID != selectedEntryID {
|
||||
t.Fatal("The entry that we just read should be at the top of the history")
|
||||
}
|
||||
}
|
||||
|
||||
func TestFlushHistory(t *testing.T) {
|
||||
client := createClient(t)
|
||||
createFeed(t, client)
|
||||
|
||||
result, err := client.Entries(&miniflux.Filter{Limit: 1})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
selectedEntryID := result.Entries[0].ID
|
||||
|
||||
err = client.UpdateEntries([]int64{selectedEntryID}, miniflux.EntryStatusRead)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
err = client.FlushHistory()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
history, err := client.Entries(&miniflux.Filter{Status: miniflux.EntryStatusRemoved})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if history.Entries[0].ID != selectedEntryID {
|
||||
t.Fatal("The entry that we just read should have the removed status")
|
||||
}
|
||||
}
|
|
@ -1,880 +0,0 @@
|
|||
// SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
//go:build integration
|
||||
// +build integration
|
||||
|
||||
package tests
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
miniflux "miniflux.app/v2/client"
|
||||
)
|
||||
|
||||
func TestCreateFeed(t *testing.T) {
|
||||
client := createClient(t)
|
||||
feed, _ := createFeed(t, client)
|
||||
|
||||
if feed.ID == 0 {
|
||||
t.Fatalf(`Invalid feed ID, got %q`, feed.ID)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCannotCreateDuplicatedFeed(t *testing.T) {
|
||||
client := createClient(t)
|
||||
feed, category := createFeed(t, client)
|
||||
|
||||
_, err := client.CreateFeed(&miniflux.FeedCreationRequest{
|
||||
FeedURL: feed.FeedURL,
|
||||
CategoryID: category.ID,
|
||||
})
|
||||
if err == nil {
|
||||
t.Fatal(`Duplicated feeds should not be allowed`)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCreateFeedWithInexistingCategory(t *testing.T) {
|
||||
client := createClient(t)
|
||||
_, err := client.CreateFeed(&miniflux.FeedCreationRequest{
|
||||
FeedURL: testFeedURL,
|
||||
CategoryID: -1,
|
||||
})
|
||||
if err == nil {
|
||||
t.Fatal(`Feeds should not be created with inexisting category`)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCreateFeedWithEmptyFeedURL(t *testing.T) {
|
||||
client := createClient(t)
|
||||
categories, err := client.Categories()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
_, err = client.CreateFeed(&miniflux.FeedCreationRequest{
|
||||
FeedURL: "",
|
||||
CategoryID: categories[0].ID,
|
||||
})
|
||||
if err == nil {
|
||||
t.Fatal(`Feeds should not be created with an empty feed URL`)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCreateFeedWithInvalidFeedURL(t *testing.T) {
|
||||
client := createClient(t)
|
||||
categories, err := client.Categories()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
_, err = client.CreateFeed(&miniflux.FeedCreationRequest{
|
||||
FeedURL: "invalid",
|
||||
CategoryID: categories[0].ID,
|
||||
})
|
||||
if err == nil {
|
||||
t.Fatal(`Feeds should not be created with an invalid feed URL`)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCreateDisabledFeed(t *testing.T) {
|
||||
client := createClient(t)
|
||||
|
||||
categories, err := client.Categories()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
feedID, err := client.CreateFeed(&miniflux.FeedCreationRequest{
|
||||
FeedURL: testFeedURL,
|
||||
CategoryID: categories[0].ID,
|
||||
Disabled: true,
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feedID == 0 {
|
||||
t.Fatalf(`Invalid feed ID, got %q`, feedID)
|
||||
}
|
||||
|
||||
feed, err := client.Feed(feedID)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if !feed.Disabled {
|
||||
t.Error(`The feed should be disabled`)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCreateFeedWithDisabledCache(t *testing.T) {
|
||||
client := createClient(t)
|
||||
|
||||
categories, err := client.Categories()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
feedID, err := client.CreateFeed(&miniflux.FeedCreationRequest{
|
||||
FeedURL: testFeedURL,
|
||||
CategoryID: categories[0].ID,
|
||||
IgnoreHTTPCache: true,
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feedID == 0 {
|
||||
t.Fatalf(`Invalid feed ID, got %q`, feedID)
|
||||
}
|
||||
|
||||
feed, err := client.Feed(feedID)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if !feed.IgnoreHTTPCache {
|
||||
t.Error(`The feed should be ignoring HTTP cache`)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCreateFeedWithCrawlerEnabled(t *testing.T) {
|
||||
client := createClient(t)
|
||||
|
||||
categories, err := client.Categories()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
feedID, err := client.CreateFeed(&miniflux.FeedCreationRequest{
|
||||
FeedURL: testFeedURL,
|
||||
CategoryID: categories[0].ID,
|
||||
Crawler: true,
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feedID == 0 {
|
||||
t.Fatalf(`Invalid feed ID, got %q`, feedID)
|
||||
}
|
||||
|
||||
feed, err := client.Feed(feedID)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if !feed.Crawler {
|
||||
t.Error(`The feed should have the scraper enabled`)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCreateFeedWithSelfSignedCertificatesAllowed(t *testing.T) {
|
||||
client := createClient(t)
|
||||
|
||||
categories, err := client.Categories()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
feedID, err := client.CreateFeed(&miniflux.FeedCreationRequest{
|
||||
FeedURL: testFeedURL,
|
||||
CategoryID: categories[0].ID,
|
||||
AllowSelfSignedCertificates: true,
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feedID == 0 {
|
||||
t.Fatalf(`Invalid feed ID, got %q`, feedID)
|
||||
}
|
||||
|
||||
feed, err := client.Feed(feedID)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if !feed.AllowSelfSignedCertificates {
|
||||
t.Error(`The feed should have self-signed certificates enabled`)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCreateFeedWithScraperRule(t *testing.T) {
|
||||
client := createClient(t)
|
||||
|
||||
categories, err := client.Categories()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
feedID, err := client.CreateFeed(&miniflux.FeedCreationRequest{
|
||||
FeedURL: testFeedURL,
|
||||
CategoryID: categories[0].ID,
|
||||
ScraperRules: "article",
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feedID == 0 {
|
||||
t.Fatalf(`Invalid feed ID, got %q`, feedID)
|
||||
}
|
||||
|
||||
feed, err := client.Feed(feedID)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feed.ScraperRules != "article" {
|
||||
t.Error(`The feed should have the custom scraper rule saved`)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCreateFeedWithKeeplistRule(t *testing.T) {
|
||||
client := createClient(t)
|
||||
|
||||
categories, err := client.Categories()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
feedID, err := client.CreateFeed(&miniflux.FeedCreationRequest{
|
||||
FeedURL: testFeedURL,
|
||||
CategoryID: categories[0].ID,
|
||||
KeeplistRules: "(?i)miniflux",
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feedID == 0 {
|
||||
t.Fatalf(`Invalid feed ID, got %q`, feedID)
|
||||
}
|
||||
|
||||
feed, err := client.Feed(feedID)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feed.KeeplistRules != "(?i)miniflux" {
|
||||
t.Error(`The feed should have the custom keep list rule saved`)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCreateFeedWithInvalidBlocklistRule(t *testing.T) {
|
||||
client := createClient(t)
|
||||
|
||||
categories, err := client.Categories()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
_, err = client.CreateFeed(&miniflux.FeedCreationRequest{
|
||||
FeedURL: testFeedURL,
|
||||
CategoryID: categories[0].ID,
|
||||
BlocklistRules: "[",
|
||||
})
|
||||
if err == nil {
|
||||
t.Fatal(`Feed with invalid block list rule should not be created`)
|
||||
}
|
||||
}
|
||||
|
||||
func TestUpdateFeedURL(t *testing.T) {
|
||||
client := createClient(t)
|
||||
feed, _ := createFeed(t, client)
|
||||
|
||||
url := "https://www.example.org/feed.xml"
|
||||
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{FeedURL: &url})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if updatedFeed.FeedURL != url {
|
||||
t.Fatalf(`Wrong FeedURL, got %q instead of %q`, updatedFeed.FeedURL, url)
|
||||
}
|
||||
}
|
||||
|
||||
func TestUpdateFeedWithEmptyFeedURL(t *testing.T) {
|
||||
client := createClient(t)
|
||||
feed, _ := createFeed(t, client)
|
||||
|
||||
url := ""
|
||||
if _, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{FeedURL: &url}); err == nil {
|
||||
t.Error(`Updating a feed with an empty feed URL should not be possible`)
|
||||
}
|
||||
}
|
||||
|
||||
func TestUpdateFeedWithInvalidFeedURL(t *testing.T) {
|
||||
client := createClient(t)
|
||||
feed, _ := createFeed(t, client)
|
||||
|
||||
url := "invalid"
|
||||
if _, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{FeedURL: &url}); err == nil {
|
||||
t.Error(`Updating a feed with an invalid feed URL should not be possible`)
|
||||
}
|
||||
}
|
||||
|
||||
func TestUpdateFeedSiteURL(t *testing.T) {
|
||||
client := createClient(t)
|
||||
feed, _ := createFeed(t, client)
|
||||
|
||||
url := "https://www.example.org/"
|
||||
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{SiteURL: &url})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if updatedFeed.SiteURL != url {
|
||||
t.Fatalf(`Wrong SiteURL, got %q instead of %q`, updatedFeed.SiteURL, url)
|
||||
}
|
||||
}
|
||||
|
||||
func TestUpdateFeedWithEmptySiteURL(t *testing.T) {
|
||||
client := createClient(t)
|
||||
feed, _ := createFeed(t, client)
|
||||
|
||||
url := ""
|
||||
if _, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{SiteURL: &url}); err == nil {
|
||||
t.Error(`Updating a feed with an empty site URL should not be possible`)
|
||||
}
|
||||
}
|
||||
|
||||
func TestUpdateFeedWithInvalidSiteURL(t *testing.T) {
|
||||
client := createClient(t)
|
||||
feed, _ := createFeed(t, client)
|
||||
|
||||
url := "invalid"
|
||||
if _, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{SiteURL: &url}); err == nil {
|
||||
t.Error(`Updating a feed with an invalid site URL should not be possible`)
|
||||
}
|
||||
}
|
||||
|
||||
func TestUpdateFeedTitle(t *testing.T) {
|
||||
client := createClient(t)
|
||||
feed, _ := createFeed(t, client)
|
||||
|
||||
newTitle := "My new feed"
|
||||
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{Title: &newTitle})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if updatedFeed.Title != newTitle {
|
||||
t.Fatalf(`Wrong title, got %q instead of %q`, updatedFeed.Title, newTitle)
|
||||
}
|
||||
}
|
||||
|
||||
func TestUpdateFeedWithEmptyTitle(t *testing.T) {
|
||||
client := createClient(t)
|
||||
feed, _ := createFeed(t, client)
|
||||
|
||||
title := ""
|
||||
if _, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{Title: &title}); err == nil {
|
||||
t.Error(`Updating a feed with an empty title should not be possible`)
|
||||
}
|
||||
}
|
||||
|
||||
func TestUpdateFeedCrawler(t *testing.T) {
|
||||
client := createClient(t)
|
||||
feed, _ := createFeed(t, client)
|
||||
|
||||
crawler := true
|
||||
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{Crawler: &crawler})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if updatedFeed.Crawler != crawler {
|
||||
t.Fatalf(`Wrong crawler value, got "%v" instead of "%v"`, updatedFeed.Crawler, crawler)
|
||||
}
|
||||
|
||||
if updatedFeed.Title != feed.Title {
|
||||
t.Fatalf(`The titles should be the same after update`)
|
||||
}
|
||||
|
||||
crawler = false
|
||||
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{Crawler: &crawler})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if updatedFeed.Crawler != crawler {
|
||||
t.Fatalf(`Wrong crawler value, got "%v" instead of "%v"`, updatedFeed.Crawler, crawler)
|
||||
}
|
||||
}
|
||||
|
||||
func TestUpdateFeedAllowSelfSignedCertificates(t *testing.T) {
|
||||
client := createClient(t)
|
||||
feed, _ := createFeed(t, client)
|
||||
|
||||
selfSigned := true
|
||||
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{AllowSelfSignedCertificates: &selfSigned})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if updatedFeed.AllowSelfSignedCertificates != selfSigned {
|
||||
t.Fatalf(`Wrong AllowSelfSignedCertificates value, got "%v" instead of "%v"`, updatedFeed.AllowSelfSignedCertificates, selfSigned)
|
||||
}
|
||||
|
||||
selfSigned = false
|
||||
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{AllowSelfSignedCertificates: &selfSigned})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if updatedFeed.AllowSelfSignedCertificates != selfSigned {
|
||||
t.Fatalf(`Wrong AllowSelfSignedCertificates value, got "%v" instead of "%v"`, updatedFeed.AllowSelfSignedCertificates, selfSigned)
|
||||
}
|
||||
}
|
||||
|
||||
func TestUpdateFeedScraperRules(t *testing.T) {
|
||||
client := createClient(t)
|
||||
feed, _ := createFeed(t, client)
|
||||
|
||||
scraperRules := "test"
|
||||
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{ScraperRules: &scraperRules})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if updatedFeed.ScraperRules != scraperRules {
|
||||
t.Fatalf(`Wrong ScraperRules value, got "%v" instead of "%v"`, updatedFeed.ScraperRules, scraperRules)
|
||||
}
|
||||
|
||||
scraperRules = ""
|
||||
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{ScraperRules: &scraperRules})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if updatedFeed.ScraperRules != scraperRules {
|
||||
t.Fatalf(`Wrong ScraperRules value, got "%v" instead of "%v"`, updatedFeed.ScraperRules, scraperRules)
|
||||
}
|
||||
}
|
||||
|
||||
func TestUpdateFeedRewriteRules(t *testing.T) {
|
||||
client := createClient(t)
|
||||
feed, _ := createFeed(t, client)
|
||||
|
||||
rewriteRules := "test"
|
||||
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{RewriteRules: &rewriteRules})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if updatedFeed.RewriteRules != rewriteRules {
|
||||
t.Fatalf(`Wrong RewriteRules value, got "%v" instead of "%v"`, updatedFeed.RewriteRules, rewriteRules)
|
||||
}
|
||||
|
||||
rewriteRules = ""
|
||||
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{RewriteRules: &rewriteRules})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if updatedFeed.RewriteRules != rewriteRules {
|
||||
t.Fatalf(`Wrong RewriteRules value, got "%v" instead of "%v"`, updatedFeed.RewriteRules, rewriteRules)
|
||||
}
|
||||
}
|
||||
|
||||
func TestUpdateFeedKeeplistRules(t *testing.T) {
|
||||
client := createClient(t)
|
||||
feed, _ := createFeed(t, client)
|
||||
|
||||
keeplistRules := "test"
|
||||
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{KeeplistRules: &keeplistRules})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if updatedFeed.KeeplistRules != keeplistRules {
|
||||
t.Fatalf(`Wrong KeeplistRules value, got "%v" instead of "%v"`, updatedFeed.KeeplistRules, keeplistRules)
|
||||
}
|
||||
|
||||
keeplistRules = ""
|
||||
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{KeeplistRules: &keeplistRules})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if updatedFeed.KeeplistRules != keeplistRules {
|
||||
t.Fatalf(`Wrong KeeplistRules value, got "%v" instead of "%v"`, updatedFeed.KeeplistRules, keeplistRules)
|
||||
}
|
||||
}
|
||||
|
||||
func TestUpdateFeedUserAgent(t *testing.T) {
|
||||
client := createClient(t)
|
||||
feed, _ := createFeed(t, client)
|
||||
|
||||
userAgent := "test"
|
||||
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{UserAgent: &userAgent})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if updatedFeed.UserAgent != userAgent {
|
||||
t.Fatalf(`Wrong UserAgent value, got "%v" instead of "%v"`, updatedFeed.UserAgent, userAgent)
|
||||
}
|
||||
|
||||
userAgent = ""
|
||||
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{UserAgent: &userAgent})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if updatedFeed.UserAgent != userAgent {
|
||||
t.Fatalf(`Wrong UserAgent value, got "%v" instead of "%v"`, updatedFeed.UserAgent, userAgent)
|
||||
}
|
||||
}
|
||||
|
||||
func TestUpdateFeedCookie(t *testing.T) {
|
||||
client := createClient(t)
|
||||
feed, _ := createFeed(t, client)
|
||||
|
||||
cookie := "test"
|
||||
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{Cookie: &cookie})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if updatedFeed.Cookie != cookie {
|
||||
t.Fatalf(`Wrong Cookie value, got "%v" instead of "%v"`, updatedFeed.Cookie, cookie)
|
||||
}
|
||||
|
||||
cookie = ""
|
||||
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{Cookie: &cookie})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if updatedFeed.Cookie != cookie {
|
||||
t.Fatalf(`Wrong Cookie value, got "%v" instead of "%v"`, updatedFeed.Cookie, cookie)
|
||||
}
|
||||
}
|
||||
|
||||
func TestUpdateFeedUsername(t *testing.T) {
|
||||
client := createClient(t)
|
||||
feed, _ := createFeed(t, client)
|
||||
|
||||
username := "test"
|
||||
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{Username: &username})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if updatedFeed.Username != username {
|
||||
t.Fatalf(`Wrong Username value, got "%v" instead of "%v"`, updatedFeed.Username, username)
|
||||
}
|
||||
|
||||
username = ""
|
||||
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{Username: &username})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if updatedFeed.Username != username {
|
||||
t.Fatalf(`Wrong Username value, got "%v" instead of "%v"`, updatedFeed.Username, username)
|
||||
}
|
||||
}
|
||||
|
||||
func TestUpdateFeedPassword(t *testing.T) {
|
||||
client := createClient(t)
|
||||
feed, _ := createFeed(t, client)
|
||||
|
||||
password := "test"
|
||||
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{Password: &password})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if updatedFeed.Password != password {
|
||||
t.Fatalf(`Wrong Password value, got "%v" instead of "%v"`, updatedFeed.Password, password)
|
||||
}
|
||||
|
||||
password = ""
|
||||
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{Password: &password})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if updatedFeed.Password != password {
|
||||
t.Fatalf(`Wrong Password value, got "%v" instead of "%v"`, updatedFeed.Password, password)
|
||||
}
|
||||
}
|
||||
|
||||
func TestUpdateFeedCategory(t *testing.T) {
|
||||
client := createClient(t)
|
||||
feed, _ := createFeed(t, client)
|
||||
|
||||
newCategory, err := client.CreateCategory("my new category")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{CategoryID: &newCategory.ID})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if updatedFeed.Category.ID != newCategory.ID {
|
||||
t.Fatalf(`Wrong CategoryID value, got "%v" instead of "%v"`, updatedFeed.Category.ID, newCategory.ID)
|
||||
}
|
||||
}
|
||||
|
||||
func TestUpdateFeedWithEmptyCategoryID(t *testing.T) {
|
||||
client := createClient(t)
|
||||
feed, _ := createFeed(t, client)
|
||||
|
||||
categoryID := int64(0)
|
||||
if _, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{CategoryID: &categoryID}); err == nil {
|
||||
t.Error(`Updating a feed with an empty category should not be possible`)
|
||||
}
|
||||
}
|
||||
|
||||
func TestUpdateFeedWithInvalidCategoryID(t *testing.T) {
|
||||
client := createClient(t)
|
||||
feed, _ := createFeed(t, client)
|
||||
|
||||
categoryID := int64(-1)
|
||||
if _, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{CategoryID: &categoryID}); err == nil {
|
||||
t.Error(`Updating a feed with an invalid category should not be possible`)
|
||||
}
|
||||
}
|
||||
|
||||
func TestMarkFeedAsRead(t *testing.T) {
|
||||
client := createClient(t)
|
||||
|
||||
feed, _ := createFeed(t, client)
|
||||
|
||||
results, err := client.FeedEntries(feed.ID, nil)
|
||||
if err != nil {
|
||||
t.Fatalf(`Failed to get entries: %v`, err)
|
||||
}
|
||||
if results.Total == 0 {
|
||||
t.Fatalf(`Invalid number of entries: %d`, results.Total)
|
||||
}
|
||||
if results.Entries[0].Status != miniflux.EntryStatusUnread {
|
||||
t.Fatalf(`Invalid entry status, got %q instead of %q`, results.Entries[0].Status, miniflux.EntryStatusUnread)
|
||||
}
|
||||
|
||||
if err := client.MarkFeedAsRead(feed.ID); err != nil {
|
||||
t.Fatalf(`Failed to mark feed as read: %v`, err)
|
||||
}
|
||||
|
||||
results, err = client.FeedEntries(feed.ID, nil)
|
||||
if err != nil {
|
||||
t.Fatalf(`Failed to get updated entries: %v`, err)
|
||||
}
|
||||
|
||||
for _, entry := range results.Entries {
|
||||
if entry.Status != miniflux.EntryStatusRead {
|
||||
t.Errorf(`Status for entry %d was %q instead of %q`, entry.ID, entry.Status, miniflux.EntryStatusRead)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestFetchCounters(t *testing.T) {
|
||||
client := createClient(t)
|
||||
|
||||
feed, _ := createFeed(t, client)
|
||||
|
||||
results, err := client.FeedEntries(feed.ID, nil)
|
||||
if err != nil {
|
||||
t.Fatalf(`Failed to get entries: %v`, err)
|
||||
}
|
||||
|
||||
counters, err := client.FetchCounters()
|
||||
if err != nil {
|
||||
t.Fatalf(`Failed to fetch unread count: %v`, err)
|
||||
}
|
||||
unreadCounter, exists := counters.UnreadCounters[feed.ID]
|
||||
if !exists {
|
||||
unreadCounter = 0
|
||||
}
|
||||
|
||||
unreadExpected := 0
|
||||
for _, entry := range results.Entries {
|
||||
if entry.Status == miniflux.EntryStatusUnread {
|
||||
unreadExpected++
|
||||
}
|
||||
}
|
||||
|
||||
if unreadExpected != unreadCounter {
|
||||
t.Errorf(`Expected %d unread entries but %d instead`, unreadExpected, unreadCounter)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDeleteFeed(t *testing.T) {
|
||||
client := createClient(t)
|
||||
feed, _ := createFeed(t, client)
|
||||
if err := client.DeleteFeed(feed.ID); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRefreshFeed(t *testing.T) {
|
||||
client := createClient(t)
|
||||
feed, _ := createFeed(t, client)
|
||||
if err := client.RefreshFeed(feed.ID); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetFeed(t *testing.T) {
|
||||
client := createClient(t)
|
||||
feed, category := createFeed(t, client)
|
||||
|
||||
if feed.Title != testFeedTitle {
|
||||
t.Fatalf(`Invalid feed title, got "%v" instead of "%v"`, feed.Title, testFeedTitle)
|
||||
}
|
||||
|
||||
if feed.SiteURL != testWebsiteURL {
|
||||
t.Fatalf(`Invalid site URL, got "%v" instead of "%v"`, feed.SiteURL, testWebsiteURL)
|
||||
}
|
||||
|
||||
if feed.FeedURL != testFeedURL {
|
||||
t.Fatalf(`Invalid feed URL, got "%v" instead of "%v"`, feed.FeedURL, testFeedURL)
|
||||
}
|
||||
|
||||
if feed.Category.ID != category.ID {
|
||||
t.Fatalf(`Invalid feed category ID, got "%v" instead of "%v"`, feed.Category.ID, category.ID)
|
||||
}
|
||||
|
||||
if feed.Category.UserID != category.UserID {
|
||||
t.Fatalf(`Invalid feed category user ID, got "%v" instead of "%v"`, feed.Category.UserID, category.UserID)
|
||||
}
|
||||
|
||||
if feed.Category.Title != category.Title {
|
||||
t.Fatalf(`Invalid feed category title, got "%v" instead of "%v"`, feed.Category.Title, category.Title)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetFeedIcon(t *testing.T) {
|
||||
client := createClient(t)
|
||||
feed, _ := createFeed(t, client)
|
||||
feedIcon, err := client.FeedIcon(feed.ID)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feedIcon.ID == 0 {
|
||||
t.Fatalf(`Invalid feed icon ID, got "%d"`, feedIcon.ID)
|
||||
}
|
||||
|
||||
expectedMimeType := "image/x-icon"
|
||||
if feedIcon.MimeType != expectedMimeType {
|
||||
t.Fatalf(`Invalid feed icon mime type, got %q instead of %q`, feedIcon.MimeType, expectedMimeType)
|
||||
}
|
||||
|
||||
if !strings.HasPrefix(feedIcon.Data, expectedMimeType) {
|
||||
t.Fatalf(`Invalid feed icon data, got "%v"`, feedIcon.Data)
|
||||
}
|
||||
|
||||
feedIcon, err = client.Icon(feedIcon.ID)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if feedIcon.MimeType != expectedMimeType {
|
||||
t.Fatalf(`Invalid feed icon mime type, got %q instead of %q`, feedIcon.MimeType, expectedMimeType)
|
||||
}
|
||||
|
||||
if !strings.HasPrefix(feedIcon.Data, expectedMimeType) {
|
||||
t.Fatalf(`Invalid feed icon data, got "%v"`, feedIcon.Data)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetFeedIconNotFound(t *testing.T) {
|
||||
client := createClient(t)
|
||||
if _, err := client.FeedIcon(42); err == nil {
|
||||
t.Fatalf(`The feed icon should be null`)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetFeeds(t *testing.T) {
|
||||
client := createClient(t)
|
||||
feed, category := createFeed(t, client)
|
||||
|
||||
feeds, err := client.Feeds()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if len(feeds) != 1 {
|
||||
t.Fatalf(`Invalid number of feeds`)
|
||||
}
|
||||
|
||||
if feeds[0].ID != feed.ID {
|
||||
t.Fatalf(`Invalid feed ID, got "%v" instead of "%v"`, feeds[0].ID, feed.ID)
|
||||
}
|
||||
|
||||
if feeds[0].Title != testFeedTitle {
|
||||
t.Fatalf(`Invalid feed title, got "%v" instead of "%v"`, feeds[0].Title, testFeedTitle)
|
||||
}
|
||||
|
||||
if feeds[0].SiteURL != testWebsiteURL {
|
||||
t.Fatalf(`Invalid site URL, got "%v" instead of "%v"`, feeds[0].SiteURL, testWebsiteURL)
|
||||
}
|
||||
|
||||
if feeds[0].FeedURL != testFeedURL {
|
||||
t.Fatalf(`Invalid feed URL, got "%v" instead of "%v"`, feeds[0].FeedURL, testFeedURL)
|
||||
}
|
||||
|
||||
if feeds[0].Category.ID != category.ID {
|
||||
t.Fatalf(`Invalid feed category ID, got "%v" instead of "%v"`, feeds[0].Category.ID, category.ID)
|
||||
}
|
||||
|
||||
if feeds[0].Category.UserID != category.UserID {
|
||||
t.Fatalf(`Invalid feed category user ID, got "%v" instead of "%v"`, feeds[0].Category.UserID, category.UserID)
|
||||
}
|
||||
|
||||
if feeds[0].Category.Title != category.Title {
|
||||
t.Fatalf(`Invalid feed category title, got "%v" instead of "%v"`, feeds[0].Category.Title, category.Title)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetFeedsByCategory(t *testing.T) {
|
||||
client := createClient(t)
|
||||
feed, category := createFeed(t, client)
|
||||
|
||||
feeds, err := client.CategoryFeeds(category.ID)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if len(feeds) != 1 {
|
||||
t.Fatalf(`Invalid number of feeds`)
|
||||
}
|
||||
|
||||
if feeds[0].ID != feed.ID {
|
||||
t.Fatalf(`Invalid feed ID, got "%v" instead of "%v"`, feeds[0].ID, feed.ID)
|
||||
}
|
||||
|
||||
if feeds[0].Title != testFeedTitle {
|
||||
t.Fatalf(`Invalid feed title, got "%v" instead of "%v"`, feeds[0].Title, testFeedTitle)
|
||||
}
|
||||
|
||||
if feeds[0].SiteURL != testWebsiteURL {
|
||||
t.Fatalf(`Invalid site URL, got "%v" instead of "%v"`, feeds[0].SiteURL, testWebsiteURL)
|
||||
}
|
||||
|
||||
if feeds[0].FeedURL != testFeedURL {
|
||||
t.Fatalf(`Invalid feed URL, got "%v" instead of "%v"`, feeds[0].FeedURL, testFeedURL)
|
||||
}
|
||||
|
||||
if feeds[0].Category.ID != category.ID {
|
||||
t.Fatalf(`Invalid feed category ID, got "%v" instead of "%v"`, feeds[0].Category.ID, category.ID)
|
||||
}
|
||||
|
||||
if feeds[0].Category.UserID != category.UserID {
|
||||
t.Fatalf(`Invalid feed category user ID, got "%v" instead of "%v"`, feeds[0].Category.UserID, category.UserID)
|
||||
}
|
||||
|
||||
if feeds[0].Category.Title != category.Title {
|
||||
t.Fatalf(`Invalid feed category title, got "%v" instead of "%v"`, feeds[0].Category.Title, category.Title)
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue