Backend: Disable NSFW detection by default to boost performance

Signed-off-by: Michael Mayer <michael@liquidbytes.net>
This commit is contained in:
Michael Mayer 2020-01-13 16:48:32 +01:00
parent 74a14e265f
commit b5424d3b22
10 changed files with 61 additions and 60 deletions

View file

@ -115,7 +115,6 @@ Thank you very much! <3
We spent weeks asking organizations like [The Prototype Fund](https://prototypefund.de/en/) for help We spent weeks asking organizations like [The Prototype Fund](https://prototypefund.de/en/) for help
and also tried to cooperate with companies like Mapbox and Cewe. and also tried to cooperate with companies like Mapbox and Cewe.
Some conversations were good without leading to a sponsorship yet, others were a waste of time. Some conversations were good without leading to a sponsorship yet, others were a waste of time.
You would think it's easier to get a few dollars with [our background](http://docs.photoprism.org/en/latest/team/) You would think it's easier to get a few dollars with [our background](http://docs.photoprism.org/en/latest/team/)
and [working code](https://demo.photoprism.org/). and [working code](https://demo.photoprism.org/).

View file

@ -1,8 +1,9 @@
package classify package classify
import ( import (
"github.com/stretchr/testify/assert"
"testing" "testing"
"github.com/stretchr/testify/assert"
) )
func TestLabelRule_Find(t *testing.T) { func TestLabelRule_Find(t *testing.T) {

View file

@ -71,7 +71,7 @@ func configAction(ctx *cli.Context) error {
fmt.Printf("exiftool-bin %s\n", conf.ExifToolBin()) fmt.Printf("exiftool-bin %s\n", conf.ExifToolBin())
fmt.Printf("heifconvert-bin %s\n", conf.HeifConvertBin()) fmt.Printf("heifconvert-bin %s\n", conf.HeifConvertBin())
fmt.Printf("hide-nsfw %t\n", conf.HideNSFW()) fmt.Printf("detect-nsfw %t\n", conf.DetectNSFW())
fmt.Printf("upload-nsfw %t\n", conf.UploadNSFW()) fmt.Printf("upload-nsfw %t\n", conf.UploadNSFW())
fmt.Printf("geocoding-api %s\n", conf.GeoCodingApi()) fmt.Printf("geocoding-api %s\n", conf.GeoCodingApi())
fmt.Printf("thumb-quality %d\n", conf.ThumbQuality()) fmt.Printf("thumb-quality %d\n", conf.ThumbQuality())

View file

@ -136,9 +136,9 @@ func (c *Config) ReadOnly() bool {
return c.config.ReadOnly return c.config.ReadOnly
} }
// HideNSFW returns true if NSFW photos are hidden by default. // DetectNSFW returns true if NSFW photos should be detected and flagged.
func (c *Config) HideNSFW() bool { func (c *Config) DetectNSFW() bool {
return c.config.HideNSFW return c.config.DetectNSFW
} }
// UploadNSFW returns true if NSFW photos can be uploaded. // UploadNSFW returns true if NSFW photos can be uploaded.

View file

@ -262,28 +262,28 @@ func TestConfig_ResourcesPath(t *testing.T) {
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/assets/resources", path) assert.Equal(t, "/go/src/github.com/photoprism/photoprism/assets/resources", path)
} }
func TestConfig_HideNSFW(t *testing.T) { func TestConfig_DetectNSFW(t *testing.T) {
ctx := CliTestContext() ctx := CliTestContext()
c := NewConfig(ctx) c := NewConfig(ctx)
hideNSFW := c.HideNSFW() result := c.DetectNSFW()
assert.Equal(t, false, hideNSFW) assert.Equal(t, true, result)
} }
func TestConfig_AdminPassword(t *testing.T) { func TestConfig_AdminPassword(t *testing.T) {
ctx := CliTestContext() ctx := CliTestContext()
c := NewConfig(ctx) c := NewConfig(ctx)
hideNSFW := c.AdminPassword() result := c.AdminPassword()
assert.Equal(t, "photoprism", hideNSFW) assert.Equal(t, "photoprism", result)
} }
func TestConfig_NSFWModelPath(t *testing.T) { func TestConfig_NSFWModelPath(t *testing.T) {
ctx := CliTestContext() ctx := CliTestContext()
c := NewConfig(ctx) c := NewConfig(ctx)
hideNSFW := c.NSFWModelPath() result := c.NSFWModelPath()
assert.Equal(t, "/go/src/github.com/photoprism/photoprism/assets/resources/nsfw", hideNSFW) assert.Equal(t, "/go/src/github.com/photoprism/photoprism/assets/resources/nsfw", result)
} }
func TestConfig_ExamplesPath(t *testing.T) { func TestConfig_ExamplesPath(t *testing.T) {

View file

@ -210,9 +210,9 @@ var GlobalFlags = []cli.Flag{
EnvVar: "PHOTOPRISM_SQL_PASSWORD", EnvVar: "PHOTOPRISM_SQL_PASSWORD",
}, },
cli.BoolFlag{ cli.BoolFlag{
Name: "hide-nsfw", Name: "detect-nsfw",
Usage: "hide photos that may be offensive", Usage: "flag photos that may be offensive",
EnvVar: "PHOTOPRISM_HIDE_NSFW", EnvVar: "PHOTOPRISM_DETECT_NSFW",
}, },
cli.BoolFlag{ cli.BoolFlag{
Name: "upload-nsfw", Name: "upload-nsfw",

View file

@ -65,17 +65,17 @@ type Params struct {
DarktableBin string `yaml:"darktable-bin" flag:"darktable-bin"` DarktableBin string `yaml:"darktable-bin" flag:"darktable-bin"`
ExifToolBin string `yaml:"exiftool-bin" flag:"exiftool-bin"` ExifToolBin string `yaml:"exiftool-bin" flag:"exiftool-bin"`
HeifConvertBin string `yaml:"heifconvert-bin" flag:"heifconvert-bin"` HeifConvertBin string `yaml:"heifconvert-bin" flag:"heifconvert-bin"`
PIDFilename string `yaml:"pid-filename" flag:"pid-filename"` PIDFilename string `yaml:"pid-filename" flag:"pid-filename"`
LogFilename string `yaml:"log-filename" flag:"log-filename"` LogFilename string `yaml:"log-filename" flag:"log-filename"`
DetachServer bool `yaml:"detach-server" flag:"detach-server"` DetachServer bool `yaml:"detach-server" flag:"detach-server"`
HideNSFW bool `yaml:"hide-nsfw" flag:"hide-nsfw"` DetectNSFW bool `yaml:"detect-nsfw" flag:"detect-nsfw"`
UploadNSFW bool `yaml:"upload-nsfw" flag:"upload-nsfw"` UploadNSFW bool `yaml:"upload-nsfw" flag:"upload-nsfw"`
DisableTensorFlow bool `yaml:"tf-disabled" flag:"tf-disabled"` DisableTensorFlow bool `yaml:"tf-disabled" flag:"tf-disabled"`
GeoCodingApi string `yaml:"geocoding-api" flag:"geocoding-api"` GeoCodingApi string `yaml:"geocoding-api" flag:"geocoding-api"`
ThumbQuality int `yaml:"thumb-quality" flag:"thumb-quality"` ThumbQuality int `yaml:"thumb-quality" flag:"thumb-quality"`
ThumbSize int `yaml:"thumb-size" flag:"thumb-size"` ThumbSize int `yaml:"thumb-size" flag:"thumb-size"`
ThumbLimit int `yaml:"thumb-limit" flag:"thumb-limit"` ThumbLimit int `yaml:"thumb-limit" flag:"thumb-limit"`
ThumbFilter string `yaml:"thumb-filter" flag:"thumb-filter"` ThumbFilter string `yaml:"thumb-filter" flag:"thumb-filter"`
} }
// NewParams() creates a new configuration entity by using two methods: // NewParams() creates a new configuration entity by using two methods:

View file

@ -37,7 +37,7 @@ func NewTestParams() *Params {
c := &Params{ c := &Params{
Public: true, Public: true,
ReadOnly: false, ReadOnly: false,
HideNSFW: false, DetectNSFW: true,
UploadNSFW: false, UploadNSFW: false,
DarktableBin: "/usr/bin/darktable-cli", DarktableBin: "/usr/bin/darktable-cli",
AssetsPath: assetsPath, AssetsPath: assetsPath,
@ -125,6 +125,7 @@ func CliTestContext() *cli.Context {
globalSet.String("assets-path", config.AssetsPath, "doc") globalSet.String("assets-path", config.AssetsPath, "doc")
globalSet.String("originals-path", config.OriginalsPath, "doc") globalSet.String("originals-path", config.OriginalsPath, "doc")
globalSet.String("darktable-cli", config.DarktableBin, "doc") globalSet.String("darktable-cli", config.DarktableBin, "doc")
globalSet.Bool("detect-nsfw", config.DetectNSFW, "doc")
app := cli.NewApp() app := cli.NewApp()
@ -134,6 +135,7 @@ func CliTestContext() *cli.Context {
c.Set("assets-path", config.AssetsPath) c.Set("assets-path", config.AssetsPath)
c.Set("originals-path", config.OriginalsPath) c.Set("originals-path", config.OriginalsPath)
c.Set("darktable-cli", config.DarktableBin) c.Set("darktable-cli", config.DarktableBin)
c.Set("detect-nsfw", "true")
return c return c
} }

View file

@ -76,8 +76,6 @@ func (t *Detector) Labels(img []byte) (result Labels, err error) {
return result, errors.New("result is empty") return result, errors.New("result is empty")
} }
log.Infof("output: %+v", output[0].Value())
// Return best labels // Return best labels
result = t.getLabels(output[0].Value().([][]float32)[0]) result = t.getLabels(output[0].Value().([][]float32)[0])

View file

@ -2,7 +2,6 @@ package photoprism
import ( import (
"fmt" "fmt"
"math"
"path/filepath" "path/filepath"
"sort" "sort"
"strings" "strings"
@ -33,7 +32,6 @@ func (ind *Index) MediaFile(m *MediaFile, o IndexOptions) IndexResult {
var metaData meta.Data var metaData meta.Data
var photoQuery, fileQuery *gorm.DB var photoQuery, fileQuery *gorm.DB
var keywords []string var keywords []string
var isNSFW bool
labels := classify.Labels{} labels := classify.Labels{}
fileBase := m.Basename() fileBase := m.Basename()
@ -90,9 +88,9 @@ func (ind *Index) MediaFile(m *MediaFile, o IndexOptions) IndexResult {
if file.FilePrimary { if file.FilePrimary {
if !ind.conf.TensorFlowDisabled() && (fileChanged || o.UpdateKeywords || o.UpdateLabels || o.UpdateTitle) { if !ind.conf.TensorFlowDisabled() && (fileChanged || o.UpdateKeywords || o.UpdateLabels || o.UpdateTitle) {
// Image classification labels // Image classification via TensorFlow
labels, isNSFW = ind.classifyImage(m) labels = ind.classifyImage(m)
photo.PhotoNSFW = isNSFW photo.PhotoNSFW = ind.isNSFW(m)
} }
if fileChanged || o.UpdateExif { if fileChanged || o.UpdateExif {
@ -248,8 +246,34 @@ func (ind *Index) MediaFile(m *MediaFile, o IndexOptions) IndexResult {
return indexResultAdded return indexResultAdded
} }
// isNSFW returns true if media file might be offensive and detection is enabled.
func (ind *Index) isNSFW(jpeg *MediaFile) bool {
if !ind.conf.DetectNSFW() {
return false
}
filename, err := jpeg.Thumbnail(ind.thumbnailsPath(), "fit_720")
if err != nil {
log.Error(err)
return false
}
if nsfwLabels, err := ind.nsfwDetector.File(filename); err != nil {
log.Error(err)
return false
} else {
if nsfwLabels.NSFW() {
log.Warnf("index: \"%s\" might contain offensive content", jpeg.Filename())
return true
}
}
return false
}
// classifyImage returns all matching labels for a media file. // classifyImage returns all matching labels for a media file.
func (ind *Index) classifyImage(jpeg *MediaFile) (results classify.Labels, isNSFW bool) { func (ind *Index) classifyImage(jpeg *MediaFile) (results classify.Labels) {
start := time.Now() start := time.Now()
var thumbs []string var thumbs []string
@ -280,25 +304,6 @@ func (ind *Index) classifyImage(jpeg *MediaFile) (results classify.Labels, isNSF
labels = append(labels, imageLabels...) labels = append(labels, imageLabels...)
} }
if filename, err := jpeg.Thumbnail(ind.thumbnailsPath(), "fit_720"); err != nil {
log.Error(err)
} else {
if nsfwLabels, err := ind.nsfwDetector.File(filename); err != nil {
log.Error(err)
} else {
log.Infof("nsfw: %+v", nsfwLabels)
if nsfwLabels.NSFW() {
isNSFW = true
}
if nsfwLabels.Sexy > 0.85 {
uncertainty := 100 - int(math.Round(float64(nsfwLabels.Sexy*100)))
labels = append(labels, classify.Label{Name: "sexy", Source: "nsfw", Uncertainty: uncertainty, Priority: -1})
}
}
}
// Sort by priority and uncertainty // Sort by priority and uncertainty
sort.Sort(labels) sort.Sort(labels)
@ -314,15 +319,11 @@ func (ind *Index) classifyImage(jpeg *MediaFile) (results classify.Labels, isNSF
} }
} }
if isNSFW {
log.Info("index: image might contain offensive content")
}
elapsed := time.Since(start) elapsed := time.Since(start)
log.Debugf("index: image classification took %s", elapsed) log.Debugf("index: image classification took %s", elapsed)
return results, isNSFW return results
} }
func (ind *Index) addLabels(photoId uint, labels classify.Labels) { func (ind *Index) addLabels(photoId uint, labels classify.Labels) {