mirror of
https://github.com/photoprism/photoprism.git
synced 2025-12-12 08:44:04 +01:00
Server: Add X-Robots-Tag response header to control search engines #4574
Signed-off-by: Michael Mayer <michael@photoprism.app>
This commit is contained in:
@@ -31,12 +31,7 @@ func AddDownloadHeader(c *gin.Context, fileName string) {
|
||||
c.Header(header.ContentDisposition, fmt.Sprintf("attachment; filename=%s", fileName))
|
||||
}
|
||||
|
||||
// AddAuthTokenHeader adds an X-Auth-Token header to the response.
|
||||
func AddAuthTokenHeader(c *gin.Context, authToken string) {
|
||||
c.Header(header.XAuthToken, authToken)
|
||||
}
|
||||
|
||||
// AddContentTypeHeader adds a content type header to the response.
|
||||
// AddContentTypeHeader adds a "Content-Type" header to the response.
|
||||
func AddContentTypeHeader(c *gin.Context, contentType string) {
|
||||
c.Header(header.ContentType, contentType)
|
||||
}
|
||||
|
||||
18
internal/server/robots.go
Normal file
18
internal/server/robots.go
Normal file
@@ -0,0 +1,18 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"github.com/gin-gonic/gin"
|
||||
|
||||
"github.com/photoprism/photoprism/internal/config"
|
||||
"github.com/photoprism/photoprism/pkg/header"
|
||||
)
|
||||
|
||||
// Robots is a middleware that adds a "X-Robots-Tag" header to the response:
|
||||
// https://developers.google.com/search/docs/crawling-indexing/robots-meta-tag#xrobotstag
|
||||
var Robots = func(conf *config.Config) gin.HandlerFunc {
|
||||
return func(c *gin.Context) {
|
||||
// Block search engines until a public picture wall has been implemented,
|
||||
// see https://github.com/photoprism/photoprism/issues/669.
|
||||
c.Header(header.Robots, header.RobotsNone)
|
||||
}
|
||||
}
|
||||
@@ -44,10 +44,15 @@ func Start(ctx context.Context, conf *config.Config) {
|
||||
log.Warnf("server: %s", err)
|
||||
}
|
||||
|
||||
// Register recovery and logger middleware.
|
||||
router.Use(Recovery(), Logger())
|
||||
// Register panic recovery middleware.
|
||||
router.Use(Recovery())
|
||||
|
||||
// If enabled, register compression middleware.
|
||||
// Register logger middleware if debug mode is enabled.
|
||||
if conf.Debug() {
|
||||
router.Use(Logger())
|
||||
}
|
||||
|
||||
// Register compression middleware if enabled in the configuration.
|
||||
switch conf.HttpCompression() {
|
||||
case "br", "brotli":
|
||||
log.Infof("server: brotli compression is currently not supported")
|
||||
@@ -73,6 +78,9 @@ func Start(ctx context.Context, conf *config.Config) {
|
||||
// Register security middleware.
|
||||
router.Use(Security(conf))
|
||||
|
||||
// Register robots tag middleware.
|
||||
router.Use(Robots(conf))
|
||||
|
||||
// Create REST API router group.
|
||||
APIv1 = router.Group(conf.BaseUri(config.ApiUri), Api(conf))
|
||||
|
||||
|
||||
@@ -14,4 +14,26 @@ func TestHeader(t *testing.T) {
|
||||
assert.Equal(t, "Basic", AuthBasic)
|
||||
assert.Equal(t, "Bearer", AuthBearer)
|
||||
})
|
||||
t.Run("Cdn", func(t *testing.T) {
|
||||
assert.Equal(t, "Cdn-Host", CdnHost)
|
||||
assert.Equal(t, "Cdn-Mobiledevice", CdnMobileDevice)
|
||||
assert.Equal(t, "Cdn-Serverzone", CdnServerZone)
|
||||
assert.Equal(t, "Cdn-Serverid", CdnServerID)
|
||||
assert.Equal(t, "Cdn-Connectionid", CdnConnectionID)
|
||||
})
|
||||
t.Run("Content", func(t *testing.T) {
|
||||
assert.Equal(t, "Origin", Origin)
|
||||
assert.Equal(t, "Accept-Encoding", AcceptEncoding)
|
||||
assert.Equal(t, "Content-Type", ContentType)
|
||||
assert.Equal(t, "application/json; charset=utf-8", ContentTypeJsonUtf8)
|
||||
assert.Equal(t, "multipart/form-data", ContentTypeMultipart)
|
||||
})
|
||||
t.Run("Robots", func(t *testing.T) {
|
||||
assert.Equal(t, "X-Robots-Tag", Robots)
|
||||
assert.Equal(t, "all", RobotsAll)
|
||||
assert.Equal(t, "noindex, nofollow", RobotsNone)
|
||||
assert.Equal(t, "noimageindex", RobotsNoImages)
|
||||
assert.Equal(t, "noindex", RobotsNoIndex)
|
||||
assert.Equal(t, "nofollow", RobotsNoFollow)
|
||||
})
|
||||
}
|
||||
|
||||
18
pkg/header/robots.go
Normal file
18
pkg/header/robots.go
Normal file
@@ -0,0 +1,18 @@
|
||||
package header
|
||||
|
||||
type RobotsRule = string
|
||||
|
||||
// Robots controls how pages are indexed and crawled by search engines:
|
||||
// https://developers.google.com/search/docs/crawling-indexing/robots-meta-tag#xrobotstag
|
||||
const (
|
||||
Robots = "X-Robots-Tag"
|
||||
)
|
||||
|
||||
// Standard Robots header values.
|
||||
const (
|
||||
RobotsAll RobotsRule = "all"
|
||||
RobotsNone RobotsRule = "noindex, nofollow"
|
||||
RobotsNoIndex RobotsRule = "noindex"
|
||||
RobotsNoFollow RobotsRule = "nofollow"
|
||||
RobotsNoImages RobotsRule = "noimageindex"
|
||||
)
|
||||
Reference in New Issue
Block a user