diff --git a/.env b/.env
index 86f8410..96e9c60 100644
--- a/.env
+++ b/.env
@@ -5,3 +5,5 @@ CUPDATE_K8S_HOST=http://localhost:8001
CUPDATE_PROCESSING_INTERVAL=20s
CUPDATE_PROCESSING_ITEMS=1
CUPDATE_PROCESSING_MIN_AGE=2s
+
+CUPDATE_WEB_ADDRESS=http://localhost:5173
diff --git a/README.md b/README.md
index 391bb2c..ac0682e 100644
--- a/README.md
+++ b/README.md
@@ -24,7 +24,7 @@ Features:
- Auto-detect container images in Kubernetes and Docker (planned)
- Auto-detect the latest available container image versions
- UI for discovering updates
-- Subscribe to updates via an RSS feed (planned)
+- Subscribe to updates via an RSS feed
- Graphs image versions' dependants explaining why they're in use
- Vulnerability scanning
- APIs for custom integrations
diff --git a/api.yaml b/api.yaml
index 46a5482..4e292c5 100644
--- a/api.yaml
+++ b/api.yaml
@@ -151,6 +151,15 @@ paths:
'201':
description: Accepted
+ /feed.rss:
+ get:
+ summary: Get an RSS feed of outdated images.
+ respones:
+ '200':
+ description: RSS feed.
+ content:
+ application/rss+xml:
+
components:
schemas:
ImagePage:
diff --git a/cmd/cupdate/main.go b/cmd/cupdate/main.go
index 843789b..4ce2038 100644
--- a/cmd/cupdate/main.go
+++ b/cmd/cupdate/main.go
@@ -36,7 +36,8 @@ type Config struct {
} `envPrefix:"API_"`
Web struct {
- Disabled bool `env:"DISABLED"`
+ Disabled bool `env:"DISABLED"`
+ Address string `env:"ADDRESS"`
} `envPrefix:"WEB_"`
Cache struct {
@@ -298,6 +299,7 @@ func main() {
mux := http.NewServeMux()
apiServer := api.NewServer(readStore, processQueue)
+ apiServer.WebAddress = config.Web.Address
mux.Handle("/api/v1/", apiServer)
if !config.Web.Disabled {
diff --git a/docs/config.md b/docs/config.md
index 7d75e0a..88bdfbd 100644
--- a/docs/config.md
+++ b/docs/config.md
@@ -5,18 +5,19 @@
Cupdate requires zero configuration, but is very configurable. Configuration is
done using environment variables.
-| Environment variable | Description | Default |
-| -------------------------- | ----------------------------------------------------------------------- | ---------------- |
-| `LOG_LEVEL` | `debug`, `info`, `warn`, `error` | `info` |
-| `API_ADDRESS` | The address to expose the API on. | `0.0.0.0` |
-| `API_PORT` | The port to expose the API on. | `8080` |
-| `WEB_DISABLED` | Whether or not to disable the web UI. | `false` |
-| `CACHE_PATH` | A path to the boltdb file in which to store cache. | `cachev1.boltdb` |
-| `CACHE_MAX_AGE` | The maximum age of cache entries. | `24h` |
-| `DB_PATH` | A path to the sqlite file in which to store data. | `dbv1.sqlite` |
-| `PROCESSING_INTERVAL` | The interval between worker runs. | `1h` |
-| `PROCESSING_ITEMS` | The number of items (images) to process each worker run. | `10` |
-| `PROCESSING_MIN_AGE` | The minimum age of an item (image) before being processed. | `72h` |
-| `PROCESSING_TIMEOUT` | The maximum time one image may take to process before being terminated. | `2m` |
-| `K8S_HOST` | The host of the Kubernetes API. For use with proxying. | Required. |
-| `K8S_INCLUDE_OLD_REPLICAS` | Whether or not to include old replica sets when scraping. | `false` |
+| Environment variable | Description | Default |
+| -------------------------- | --------------------------------------------------------------------------------------------------------------------- | --------------------------- |
+| `LOG_LEVEL` | `debug`, `info`, `warn`, `error` | `info` |
+| `API_ADDRESS` | The address to expose the API on. | `0.0.0.0` |
+| `API_PORT` | The port to expose the API on. | `8080` |
+| `WEB_DISABLED` | Whether or not to disable the web UI. | `false` |
+| `WEB_ADDRESS` | The URL at which the UI is available (such as `https://example.com`). Used for RSS feeds, should generally not be set | Automatically resolved |
+| `CACHE_PATH` | A path to the boltdb file in which to store cache. | `cachev1.boltdb` |
+| `CACHE_MAX_AGE` | The maximum age of cache entries. | `24h` |
+| `DB_PATH` | A path to the sqlite file in which to store data. | `dbv1.sqlite` |
+| `PROCESSING_INTERVAL` | The interval between worker runs. | `1h` |
+| `PROCESSING_ITEMS` | The number of items (images) to process each worker run. | `10` |
+| `PROCESSING_MIN_AGE` | The minimum age of an item (image) before being processed. | `72h` |
+| `PROCESSING_TIMEOUT` | The maximum time one image may take to process before being terminated. | `2m` |
+| `K8S_HOST` | The host of the Kubernetes API. For use with proxying. | Required. |
+| `K8S_INCLUDE_OLD_REPLICAS` | Whether or not to include old replica sets when scraping. | `false` |
diff --git a/internal/api/server.go b/internal/api/server.go
index 2ffa678..f58568d 100644
--- a/internal/api/server.go
+++ b/internal/api/server.go
@@ -2,12 +2,17 @@ package api
import (
"encoding/json"
+ "encoding/xml"
"errors"
+ "fmt"
"log/slog"
"net/http"
+ "net/url"
"strconv"
+ "github.com/AlexGustafsson/cupdate/internal/httputil"
"github.com/AlexGustafsson/cupdate/internal/registry/oci"
+ "github.com/AlexGustafsson/cupdate/internal/rss"
"github.com/AlexGustafsson/cupdate/internal/store"
)
@@ -19,6 +24,8 @@ var (
type Server struct {
api *store.Store
mux *http.ServeMux
+
+ WebAddress string
}
func NewServer(api *store.Store, processQueue chan<- oci.Reference) *Server {
@@ -135,6 +142,77 @@ func NewServer(api *store.Store, processQueue chan<- oci.Reference) *Server {
w.WriteHeader(http.StatusAccepted)
})
+ s.mux.HandleFunc("GET /api/v1/feed.rss", func(w http.ResponseWriter, r *http.Request) {
+ var requestURL *url.URL
+ var err error
+ if s.WebAddress == "" {
+ requestURL, err = httputil.ResolveRequestURL(r)
+ } else {
+ requestURL, err = url.Parse(s.WebAddress)
+ }
+ if err != nil {
+ s.handleGenericResponse(w, r, ErrBadRequest)
+ return
+ }
+
+ // TODO: When we support other sort properties (like latest release), sort
+ // by that
+ // TODO: We currently use the default count. IIRC, it's good practice in RSS
+ // to return just the latest ~20 items.
+ options := &store.ListImageOptions{
+ Tags: []string{"outdated"},
+ }
+
+ page, err := api.ListImages(r.Context(), options)
+ if err != nil {
+ s.handleGenericResponse(w, r, err)
+ return
+ }
+
+ items := make([]rss.Item, len(page.Images))
+ for i, image := range page.Images {
+ ref, err := oci.ParseReference(image.LatestReference)
+ if err != nil {
+ s.handleGenericResponse(w, r, err)
+ return
+ }
+
+ items[i] = rss.Item{
+ GUID: rss.NewDeterministicGUID(image.Reference),
+ // TODO: Use image update time instead
+ PubDate: rss.Time(image.LastModified),
+ Title: fmt.Sprintf("%s updated", ref.Name()),
+ Link: requestURL.Scheme + "://" + requestURL.Host + "/image?reference=" + url.QueryEscape(image.Reference),
+ Description: fmt.Sprintf("%s updated to %s", ref.Name(), ref.Version()),
+ }
+ }
+
+ feed := rss.Feed{
+ Version: "2.0",
+ Channels: []rss.Channel{
+ {
+ Title: "Cupdate",
+ Link: requestURL.Scheme + "://" + requestURL.Host,
+ Description: "Container images discovered by Cupdate",
+ Items: items,
+ },
+ },
+ }
+
+ w.Header().Set("Content-Type", "application/rss+xml")
+ w.WriteHeader(http.StatusOK)
+
+ encoder := xml.NewEncoder(w)
+ encoder.Indent("", "\t")
+
+ if _, err := w.Write([]byte(xml.Header)); err != nil {
+ return
+ }
+ if err := encoder.Encode(&feed); err != nil {
+ return
+ }
+ })
+
return s
}
diff --git a/internal/httputil/url.go b/internal/httputil/url.go
new file mode 100644
index 0000000..a539098
--- /dev/null
+++ b/internal/httputil/url.go
@@ -0,0 +1,32 @@
+package httputil
+
+import (
+ "net/http"
+ "net/url"
+)
+
+func ResolveRequestURL(r *http.Request) (*url.URL, error) {
+ scheme := "http"
+ if r.TLS != nil {
+ scheme = "https"
+ }
+
+ host := r.Host
+
+ if header := r.Header.Get("X-Forwarded-Host"); header != "" {
+ host = header
+ }
+
+ if header := r.Header.Get("X-Forwarded-Proto"); header != "" {
+ if header == "http" || header == "https" {
+ scheme = header
+ }
+ }
+
+ base, err := url.Parse(scheme + "://" + host)
+ if err != nil {
+ return nil, err
+ }
+
+ return base.ResolveReference(r.URL), nil
+}
diff --git a/internal/httputil/url_test.go b/internal/httputil/url_test.go
new file mode 100644
index 0000000..ad26b1f
--- /dev/null
+++ b/internal/httputil/url_test.go
@@ -0,0 +1,60 @@
+package httputil
+
+import (
+ "net/http"
+ "net/url"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestResolveRequestURL(t *testing.T) {
+ testCases := []struct {
+ Name string
+ Request *http.Request
+ Expected *url.URL
+ }{
+ {
+ Name: "localhost",
+ Request: &http.Request{
+ Host: "localhost:8080",
+ URL: &url.URL{
+ Path: "/api/v1/feed.rss",
+ },
+ Header: http.Header{},
+ },
+ Expected: &url.URL{
+ Scheme: "http",
+ Host: "localhost:8080",
+ Path: "/api/v1/feed.rss",
+ },
+ },
+ {
+ Name: "proxied",
+ Request: &http.Request{
+ Host: "localhost:8080",
+ URL: &url.URL{
+ Path: "/api/v1/feed.rss",
+ },
+ Header: http.Header{
+ "X-Forwarded-Host": []string{"example.com"},
+ "X-Forwarded-Proto": []string{"https"},
+ },
+ },
+ Expected: &url.URL{
+ Scheme: "https",
+ Host: "example.com",
+ Path: "/api/v1/feed.rss",
+ },
+ },
+ }
+
+ for _, testCase := range testCases {
+ t.Run(testCase.Name, func(t *testing.T) {
+ actual, err := ResolveRequestURL(testCase.Request)
+ require.NoError(t, err)
+ assert.Equal(t, testCase.Expected, actual)
+ })
+ }
+}
diff --git a/internal/rss/rss.go b/internal/rss/rss.go
new file mode 100644
index 0000000..cd78a2b
--- /dev/null
+++ b/internal/rss/rss.go
@@ -0,0 +1,55 @@
+package rss
+
+import (
+ "encoding/xml"
+ "time"
+)
+
+var rfc2822 = "Mon, 02 Jan 2006 15:04:05 MST"
+
+type Feed struct {
+ XMLName xml.Name `xml:"rss"`
+ Version string `xml:"version,attr"`
+
+ Channels []Channel `xml:"channel"`
+}
+
+type Channel struct {
+ XMLName xml.Name `xml:"channel"`
+ Title string `xml:"title"`
+ Link string `xml:"link"`
+ Description string `xml:"description"`
+ Items []Item `xml:"item"`
+}
+
+type Item struct {
+ XMLName xml.Name `xml:"item"`
+ GUID string `xml:"guid"`
+ PubDate Time `xml:"pubDate"`
+ Title string `xml:"title"`
+ Link string `xml:"link"`
+ Description string `xml:"description"`
+}
+
+// Time represents a RFC2822 time, as used by RSS.
+type Time time.Time
+
+func (t Time) MarshalXML(e *xml.Encoder, start xml.StartElement) error {
+ return e.EncodeElement(time.Time(t).Format(rfc2822), start)
+}
+
+func (t *Time) UnmarshalXML(d *xml.Decoder, start xml.StartElement) error {
+ var value string
+ err := d.DecodeElement(&value, &start)
+ if err != nil {
+ return err
+ }
+
+ time, err := time.Parse(rfc2822, value)
+ if err != nil {
+ return err
+ }
+
+ *t = Time(time)
+ return nil
+}
diff --git a/internal/rss/rss_test.go b/internal/rss/rss_test.go
new file mode 100644
index 0000000..356083f
--- /dev/null
+++ b/internal/rss/rss_test.go
@@ -0,0 +1,139 @@
+package rss
+
+import (
+ "encoding/xml"
+ "testing"
+ "time"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestMarshalRSS(t *testing.T) {
+ feed := Feed{
+ Version: "2.0",
+ Channels: []Channel{
+ {
+ Title: "First channel",
+ Link: "https://example.com/first-channel",
+ Description: "The first channel",
+ Items: []Item{
+ {
+ GUID: "1",
+ PubDate: Time(time.Date(2024, 12, 14, 12, 37, 0, 0, time.UTC)),
+ Title: "First item",
+ Link: "https://example.com/first-channel/first-item",
+ Description: "The first item",
+ },
+ {
+ GUID: "2",
+ PubDate: Time(time.Date(2024, 12, 14, 12, 37, 0, 0, time.UTC)),
+ Title: "Second item",
+ Link: "https://example.com/first-channel/second-item",
+ Description: "The second item",
+ },
+ },
+ },
+ },
+ }
+
+ expected := `