/* Package nsfw provides detection of images that are "not safe for work" based on various categories. Copyright (c) 2018 - 2025 PhotoPrism UG. All rights reserved. This program is free software: you can redistribute it and/or modify it under Version 3 of the GNU Affero General Public License (the "AGPL"): This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. The AGPL is supplemented by our Trademark and Brand Guidelines, which describe how our Brand Assets may be used: Feel free to send an email to hello@photoprism.app if you have questions, want to support our work, or just want to say hello. Additional information can be found in our Developer Guide: */ package nsfw import ( "github.com/photoprism/photoprism/internal/event" ) // Thresholds for classifying NSFW scores. const ( ThresholdSafe = 0.75 ThresholdMedium = 0.85 ThresholdHigh = 0.98 ) var log = event.Log // Result represents the classification scores returned by the NSFW model. type Result struct { Drawing float32 Hentai float32 Neutral float32 Porn float32 Sexy float32 } // IsSafe returns true if the image is probably safe for work. func (l *Result) IsSafe() bool { return !l.IsNsfw(ThresholdSafe) } // IsNsfw returns true if the image is may not be safe for work. func (l *Result) IsNsfw(threshold float32) bool { if l.Neutral > 0.25 { return false } if l.Porn > threshold || l.Sexy > threshold || l.Hentai > threshold { return true } return false }