Backend: Disable NSFW detection by default to boost performance

Signed-off-by: Michael Mayer <michael@liquidbytes.net>
This commit is contained in:
Michael Mayer 2020-01-13 16:48:32 +01:00
parent 74a14e265f
commit b5424d3b22
10 changed files with 61 additions and 60 deletions

View file

@ -115,7 +115,6 @@ Thank you very much! <3
We spent weeks asking organizations like [The Prototype Fund](https://prototypefund.de/en/) for help
and also tried to cooperate with companies like Mapbox and Cewe.
Some conversations were good without leading to a sponsorship yet, others were a waste of time.
You would think it's easier to get a few dollars with [our background](http://docs.photoprism.org/en/latest/team/)
and [working code](https://demo.photoprism.org/).

View file

@ -1,8 +1,9 @@
package classify
import (
"github.com/stretchr/testify/assert"
"testing"
"github.com/stretchr/testify/assert"
)
func TestLabelRule_Find(t *testing.T) {

View file

@ -71,7 +71,7 @@ func configAction(ctx *cli.Context) error {
fmt.Printf("exiftool-bin %s\n", conf.ExifToolBin())
fmt.Printf("heifconvert-bin %s\n", conf.HeifConvertBin())
fmt.Printf("hide-nsfw %t\n", conf.HideNSFW())
fmt.Printf("detect-nsfw %t\n", conf.DetectNSFW())
fmt.Printf("upload-nsfw %t\n", conf.UploadNSFW())
fmt.Printf("geocoding-api %s\n", conf.GeoCodingApi())
fmt.Printf("thumb-quality %d\n", conf.ThumbQuality())

View file

@ -136,9 +136,9 @@ func (c *Config) ReadOnly() bool {
return c.config.ReadOnly
}
// HideNSFW returns true if NSFW photos are hidden by default.
func (c *Config) HideNSFW() bool {
return c.config.HideNSFW
// DetectNSFW returns true if NSFW photos should be detected and flagged.
func (c *Config) DetectNSFW() bool {
return c.config.DetectNSFW
}
// UploadNSFW returns true if NSFW photos can be uploaded.

View file

@ -262,28 +262,28 @@ func TestConfig_ResourcesPath(t *testing.T) {
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/assets/resources", path)
}
func TestConfig_HideNSFW(t *testing.T) {
func TestConfig_DetectNSFW(t *testing.T) {
ctx := CliTestContext()
c := NewConfig(ctx)
hideNSFW := c.HideNSFW()
assert.Equal(t, false, hideNSFW)
result := c.DetectNSFW()
assert.Equal(t, true, result)
}
func TestConfig_AdminPassword(t *testing.T) {
ctx := CliTestContext()
c := NewConfig(ctx)
hideNSFW := c.AdminPassword()
assert.Equal(t, "photoprism", hideNSFW)
result := c.AdminPassword()
assert.Equal(t, "photoprism", result)
}
func TestConfig_NSFWModelPath(t *testing.T) {
ctx := CliTestContext()
c := NewConfig(ctx)
hideNSFW := c.NSFWModelPath()
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/assets/resources/nsfw", hideNSFW)
result := c.NSFWModelPath()
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/assets/resources/nsfw", result)
}
func TestConfig_ExamplesPath(t *testing.T) {

View file

@ -210,9 +210,9 @@ var GlobalFlags = []cli.Flag{
EnvVar: "PHOTOPRISM_SQL_PASSWORD",
},
cli.BoolFlag{
Name: "hide-nsfw",
Usage: "hide photos that may be offensive",
EnvVar: "PHOTOPRISM_HIDE_NSFW",
Name: "detect-nsfw",
Usage: "flag photos that may be offensive",
EnvVar: "PHOTOPRISM_DETECT_NSFW",
},
cli.BoolFlag{
Name: "upload-nsfw",

View file

@ -65,17 +65,17 @@ type Params struct {
DarktableBin string `yaml:"darktable-bin" flag:"darktable-bin"`
ExifToolBin string `yaml:"exiftool-bin" flag:"exiftool-bin"`
HeifConvertBin string `yaml:"heifconvert-bin" flag:"heifconvert-bin"`
PIDFilename string `yaml:"pid-filename" flag:"pid-filename"`
LogFilename string `yaml:"log-filename" flag:"log-filename"`
DetachServer bool `yaml:"detach-server" flag:"detach-server"`
HideNSFW bool `yaml:"hide-nsfw" flag:"hide-nsfw"`
UploadNSFW bool `yaml:"upload-nsfw" flag:"upload-nsfw"`
DisableTensorFlow bool `yaml:"tf-disabled" flag:"tf-disabled"`
GeoCodingApi string `yaml:"geocoding-api" flag:"geocoding-api"`
ThumbQuality int `yaml:"thumb-quality" flag:"thumb-quality"`
ThumbSize int `yaml:"thumb-size" flag:"thumb-size"`
ThumbLimit int `yaml:"thumb-limit" flag:"thumb-limit"`
ThumbFilter string `yaml:"thumb-filter" flag:"thumb-filter"`
PIDFilename string `yaml:"pid-filename" flag:"pid-filename"`
LogFilename string `yaml:"log-filename" flag:"log-filename"`
DetachServer bool `yaml:"detach-server" flag:"detach-server"`
DetectNSFW bool `yaml:"detect-nsfw" flag:"detect-nsfw"`
UploadNSFW bool `yaml:"upload-nsfw" flag:"upload-nsfw"`
DisableTensorFlow bool `yaml:"tf-disabled" flag:"tf-disabled"`
GeoCodingApi string `yaml:"geocoding-api" flag:"geocoding-api"`
ThumbQuality int `yaml:"thumb-quality" flag:"thumb-quality"`
ThumbSize int `yaml:"thumb-size" flag:"thumb-size"`
ThumbLimit int `yaml:"thumb-limit" flag:"thumb-limit"`
ThumbFilter string `yaml:"thumb-filter" flag:"thumb-filter"`
}
// NewParams() creates a new configuration entity by using two methods:

View file

@ -37,7 +37,7 @@ func NewTestParams() *Params {
c := &Params{
Public: true,
ReadOnly: false,
HideNSFW: false,
DetectNSFW: true,
UploadNSFW: false,
DarktableBin: "/usr/bin/darktable-cli",
AssetsPath: assetsPath,
@ -125,6 +125,7 @@ func CliTestContext() *cli.Context {
globalSet.String("assets-path", config.AssetsPath, "doc")
globalSet.String("originals-path", config.OriginalsPath, "doc")
globalSet.String("darktable-cli", config.DarktableBin, "doc")
globalSet.Bool("detect-nsfw", config.DetectNSFW, "doc")
app := cli.NewApp()
@ -134,6 +135,7 @@ func CliTestContext() *cli.Context {
c.Set("assets-path", config.AssetsPath)
c.Set("originals-path", config.OriginalsPath)
c.Set("darktable-cli", config.DarktableBin)
c.Set("detect-nsfw", "true")
return c
}

View file

@ -76,8 +76,6 @@ func (t *Detector) Labels(img []byte) (result Labels, err error) {
return result, errors.New("result is empty")
}
log.Infof("output: %+v", output[0].Value())
// Return best labels
result = t.getLabels(output[0].Value().([][]float32)[0])

View file

@ -2,7 +2,6 @@ package photoprism
import (
"fmt"
"math"
"path/filepath"
"sort"
"strings"
@ -33,7 +32,6 @@ func (ind *Index) MediaFile(m *MediaFile, o IndexOptions) IndexResult {
var metaData meta.Data
var photoQuery, fileQuery *gorm.DB
var keywords []string
var isNSFW bool
labels := classify.Labels{}
fileBase := m.Basename()
@ -90,9 +88,9 @@ func (ind *Index) MediaFile(m *MediaFile, o IndexOptions) IndexResult {
if file.FilePrimary {
if !ind.conf.TensorFlowDisabled() && (fileChanged || o.UpdateKeywords || o.UpdateLabels || o.UpdateTitle) {
// Image classification labels
labels, isNSFW = ind.classifyImage(m)
photo.PhotoNSFW = isNSFW
// Image classification via TensorFlow
labels = ind.classifyImage(m)
photo.PhotoNSFW = ind.isNSFW(m)
}
if fileChanged || o.UpdateExif {
@ -248,8 +246,34 @@ func (ind *Index) MediaFile(m *MediaFile, o IndexOptions) IndexResult {
return indexResultAdded
}
// isNSFW returns true if media file might be offensive and detection is enabled.
func (ind *Index) isNSFW(jpeg *MediaFile) bool {
if !ind.conf.DetectNSFW() {
return false
}
filename, err := jpeg.Thumbnail(ind.thumbnailsPath(), "fit_720")
if err != nil {
log.Error(err)
return false
}
if nsfwLabels, err := ind.nsfwDetector.File(filename); err != nil {
log.Error(err)
return false
} else {
if nsfwLabels.NSFW() {
log.Warnf("index: \"%s\" might contain offensive content", jpeg.Filename())
return true
}
}
return false
}
// classifyImage returns all matching labels for a media file.
func (ind *Index) classifyImage(jpeg *MediaFile) (results classify.Labels, isNSFW bool) {
func (ind *Index) classifyImage(jpeg *MediaFile) (results classify.Labels) {
start := time.Now()
var thumbs []string
@ -280,25 +304,6 @@ func (ind *Index) classifyImage(jpeg *MediaFile) (results classify.Labels, isNSF
labels = append(labels, imageLabels...)
}
if filename, err := jpeg.Thumbnail(ind.thumbnailsPath(), "fit_720"); err != nil {
log.Error(err)
} else {
if nsfwLabels, err := ind.nsfwDetector.File(filename); err != nil {
log.Error(err)
} else {
log.Infof("nsfw: %+v", nsfwLabels)
if nsfwLabels.NSFW() {
isNSFW = true
}
if nsfwLabels.Sexy > 0.85 {
uncertainty := 100 - int(math.Round(float64(nsfwLabels.Sexy*100)))
labels = append(labels, classify.Label{Name: "sexy", Source: "nsfw", Uncertainty: uncertainty, Priority: -1})
}
}
}
// Sort by priority and uncertainty
sort.Sort(labels)
@ -314,15 +319,11 @@ func (ind *Index) classifyImage(jpeg *MediaFile) (results classify.Labels, isNSF
}
}
if isNSFW {
log.Info("index: image might contain offensive content")
}
elapsed := time.Since(start)
log.Debugf("index: image classification took %s", elapsed)
return results, isNSFW
return results
}
func (ind *Index) addLabels(photoId uint, labels classify.Labels) {