server/image-hash: add image search engine
This commit is contained in:
parent
b9fa64317d
commit
1a59a74d63
9 changed files with 145 additions and 4 deletions
|
@ -9,6 +9,7 @@ user@host:~$ sudo pacman -S python
|
||||||
user@host:~$ sudo pacman -S python-pip
|
user@host:~$ sudo pacman -S python-pip
|
||||||
user@host:~$ sudo pacman -S ffmpeg
|
user@host:~$ sudo pacman -S ffmpeg
|
||||||
user@host:~$ sudo pacman -S npm
|
user@host:~$ sudo pacman -S npm
|
||||||
|
user@host:~$ sudo pacman -S elasticsearch
|
||||||
user@host:~$ sudo pip install virtualenv
|
user@host:~$ sudo pip install virtualenv
|
||||||
user@host:~$ python --version
|
user@host:~$ python --version
|
||||||
Python 3.5.1
|
Python 3.5.1
|
||||||
|
@ -43,6 +44,13 @@ user@host:~$ sudo -i -u postgres psql -c "ALTER USER szuru PASSWORD 'dog';"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### Setting up elasticsearch
|
||||||
|
|
||||||
|
```console
|
||||||
|
user@host:~$ sudo systemctl start elasticsearch
|
||||||
|
user@host:~$ sudo systemctl enable elasticsearch
|
||||||
|
```
|
||||||
|
|
||||||
### Preparing environment
|
### Preparing environment
|
||||||
|
|
||||||
Getting `szurubooru`:
|
Getting `szurubooru`:
|
||||||
|
|
|
@ -32,6 +32,11 @@ smtp:
|
||||||
user: # example: bot
|
user: # example: bot
|
||||||
pass: # example: groovy123
|
pass: # example: groovy123
|
||||||
|
|
||||||
|
# used for reverse image search
|
||||||
|
elasticsearch:
|
||||||
|
host: localhost
|
||||||
|
port: 9200
|
||||||
|
|
||||||
limits:
|
limits:
|
||||||
users_per_page: 20
|
users_per_page: 20
|
||||||
posts_per_page: 40
|
posts_per_page: 40
|
||||||
|
|
|
@ -7,3 +7,7 @@ pytest-cov>=2.2.1
|
||||||
freezegun>=0.3.6
|
freezegun>=0.3.6
|
||||||
coloredlogs==5.0
|
coloredlogs==5.0
|
||||||
pycodestyle>=2.0.0
|
pycodestyle>=2.0.0
|
||||||
|
image-match>=1.1.0
|
||||||
|
scipy>=0.18.1
|
||||||
|
elasticsearch>=5.0.0
|
||||||
|
elasticsearch-dsl>=5.0.0
|
||||||
|
|
|
@ -5,6 +5,7 @@ import logging
|
||||||
import coloredlogs
|
import coloredlogs
|
||||||
import sqlalchemy.orm.exc
|
import sqlalchemy.orm.exc
|
||||||
from szurubooru import config, errors, rest
|
from szurubooru import config, errors, rest
|
||||||
|
from szurubooru.func import posts
|
||||||
# pylint: disable=unused-import
|
# pylint: disable=unused-import
|
||||||
from szurubooru import api, middleware
|
from szurubooru import api, middleware
|
||||||
|
|
||||||
|
@ -87,6 +88,8 @@ def create_app():
|
||||||
if config.config['show_sql']:
|
if config.config['show_sql']:
|
||||||
logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
|
logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
|
||||||
|
|
||||||
|
posts.populate_reverse_search()
|
||||||
|
|
||||||
rest.errors.handle(errors.AuthError, _on_auth_error)
|
rest.errors.handle(errors.AuthError, _on_auth_error)
|
||||||
rest.errors.handle(errors.ValidationError, _on_validation_error)
|
rest.errors.handle(errors.ValidationError, _on_validation_error)
|
||||||
rest.errors.handle(errors.SearchError, _on_search_error)
|
rest.errors.handle(errors.SearchError, _on_search_error)
|
||||||
|
|
59
server/szurubooru/func/image_hash.py
Normal file
59
server/szurubooru/func/image_hash.py
Normal file
|
@ -0,0 +1,59 @@
|
||||||
|
import elasticsearch
|
||||||
|
import elasticsearch_dsl
|
||||||
|
from image_match.elasticsearch_driver import SignatureES
|
||||||
|
from szurubooru import config
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
es = elasticsearch.Elasticsearch([{
|
||||||
|
'host': config.config['elasticsearch']['host'],
|
||||||
|
'port': config.config['elasticsearch']['port'],
|
||||||
|
}])
|
||||||
|
session = SignatureES(es, index='szurubooru')
|
||||||
|
|
||||||
|
|
||||||
|
def add_image(path, image_content):
|
||||||
|
if not path or not image_content:
|
||||||
|
return
|
||||||
|
session.add_image(path=path, img=image_content, bytestream=True)
|
||||||
|
|
||||||
|
|
||||||
|
def delete_image(path):
|
||||||
|
if not path:
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
es.delete_by_query(
|
||||||
|
index=session.index,
|
||||||
|
doc_type=session.doc_type,
|
||||||
|
body={'query': {'term': {'path': path}}})
|
||||||
|
except elasticsearch.exceptions.NotFoundError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def search_by_image(image_content):
|
||||||
|
for result in session.search_image(
|
||||||
|
path=image_content, # sic
|
||||||
|
bytestream=True):
|
||||||
|
yield {
|
||||||
|
'score': result['score'],
|
||||||
|
'dist': result['dist'],
|
||||||
|
'path': result['path'],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def purge():
|
||||||
|
es.delete_by_query(
|
||||||
|
index=session.index,
|
||||||
|
doc_type=session.doc_type,
|
||||||
|
body={'query': {'match_all': {}}})
|
||||||
|
|
||||||
|
|
||||||
|
def get_all_paths():
|
||||||
|
try:
|
||||||
|
search = (
|
||||||
|
elasticsearch_dsl.Search(
|
||||||
|
using=es, index=session.index, doc_type=session.doc_type)
|
||||||
|
.source(['path']))
|
||||||
|
return set(h.path for h in search.scan())
|
||||||
|
except elasticsearch.exceptions.NotFoundError:
|
||||||
|
return set()
|
|
@ -2,7 +2,7 @@ import datetime
|
||||||
import sqlalchemy
|
import sqlalchemy
|
||||||
from szurubooru import config, db, errors
|
from szurubooru import config, db, errors
|
||||||
from szurubooru.func import (
|
from szurubooru.func import (
|
||||||
users, scores, comments, tags, util, mime, images, files)
|
users, scores, comments, tags, util, mime, images, files, image_hash)
|
||||||
|
|
||||||
|
|
||||||
EMPTY_PIXEL = \
|
EMPTY_PIXEL = \
|
||||||
|
@ -260,13 +260,22 @@ def _after_post_update(_mapper, _connection, post):
|
||||||
_sync_post_content(post)
|
_sync_post_content(post)
|
||||||
|
|
||||||
|
|
||||||
|
@sqlalchemy.events.event.listens_for(db.Post, 'before_delete')
|
||||||
|
def _before_post_delete(_mapper, _connection, post):
|
||||||
|
image_hash.delete_image(post.post_id)
|
||||||
|
|
||||||
|
|
||||||
def _sync_post_content(post):
|
def _sync_post_content(post):
|
||||||
regenerate_thumb = False
|
regenerate_thumb = False
|
||||||
|
|
||||||
if hasattr(post, '__content'):
|
if hasattr(post, '__content'):
|
||||||
files.save(get_post_content_path(post), getattr(post, '__content'))
|
content = getattr(post, '__content')
|
||||||
|
files.save(get_post_content_path(post), content)
|
||||||
delattr(post, '__content')
|
delattr(post, '__content')
|
||||||
regenerate_thumb = True
|
regenerate_thumb = True
|
||||||
|
if post.type in (db.Post.TYPE_IMAGE, db.Post.TYPE_ANIMATION):
|
||||||
|
image_hash.delete_image(post.post_id)
|
||||||
|
image_hash.add_image(post.post_id, content)
|
||||||
|
|
||||||
if hasattr(post, '__thumbnail'):
|
if hasattr(post, '__thumbnail'):
|
||||||
if getattr(post, '__thumbnail'):
|
if getattr(post, '__thumbnail'):
|
||||||
|
@ -521,3 +530,35 @@ def merge_posts(source_post, target_post, replace_content):
|
||||||
if replace_content:
|
if replace_content:
|
||||||
content = files.get(get_post_content_path(source_post))
|
content = files.get(get_post_content_path(source_post))
|
||||||
update_post_content(target_post, content)
|
update_post_content(target_post, content)
|
||||||
|
|
||||||
|
|
||||||
|
def search_by_image(image_content):
|
||||||
|
for result in image_hash.search_by_image(image_content):
|
||||||
|
yield {
|
||||||
|
'score': result['score'],
|
||||||
|
'dist': result['dist'],
|
||||||
|
'post': get_post_by_id(result['path'])
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def populate_reverse_search():
|
||||||
|
excluded_post_ids = image_hash.get_all_paths()
|
||||||
|
|
||||||
|
post_ids_to_hash = (db.session
|
||||||
|
.query(db.Post.post_id)
|
||||||
|
.filter(
|
||||||
|
(db.Post.type == db.Post.TYPE_IMAGE) |
|
||||||
|
(db.Post.type == db.Post.TYPE_ANIMATION))
|
||||||
|
.filter(~db.Post.post_id.in_(excluded_post_ids))
|
||||||
|
.order_by(db.Post.post_id.asc())
|
||||||
|
.all())
|
||||||
|
|
||||||
|
for post_ids_chunk in util.chunks(post_ids_to_hash, 100):
|
||||||
|
posts_chunk = (db.session
|
||||||
|
.query(db.Post)
|
||||||
|
.filter(db.Post.post_id.in_(post_ids_chunk))
|
||||||
|
.all())
|
||||||
|
for post in posts_chunk:
|
||||||
|
content_path = get_post_content_path(post)
|
||||||
|
if files.has(content_path):
|
||||||
|
image_hash.add_image(post.post_id, files.get(content_path))
|
||||||
|
|
|
@ -162,3 +162,8 @@ def value_exceeds_column_size(value, column):
|
||||||
if max_length is None:
|
if max_length is None:
|
||||||
return False
|
return False
|
||||||
return len(value) > max_length
|
return len(value) > max_length
|
||||||
|
|
||||||
|
|
||||||
|
def chunks(source_list, part_size):
|
||||||
|
for i in range(0, len(source_list), part_size):
|
||||||
|
yield source_list[i:i + part_size]
|
||||||
|
|
|
@ -3,6 +3,7 @@ import contextlib
|
||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
import string
|
import string
|
||||||
|
from unittest.mock import patch
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import pytest
|
import pytest
|
||||||
import freezegun
|
import freezegun
|
||||||
|
@ -154,6 +155,13 @@ def tag_factory():
|
||||||
return factory
|
return factory
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.yield_fixture(autouse=True)
|
||||||
|
def skip_post_hashing():
|
||||||
|
with patch('szurubooru.func.image_hash.add_image'), \
|
||||||
|
patch('szurubooru.func.image_hash.delete_image'):
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def post_factory():
|
def post_factory():
|
||||||
# pylint: disable=invalid-name
|
# pylint: disable=invalid-name
|
||||||
|
|
|
@ -3,7 +3,8 @@ from unittest.mock import patch
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import pytest
|
import pytest
|
||||||
from szurubooru import db
|
from szurubooru import db
|
||||||
from szurubooru.func import (posts, users, comments, tags, images, files, util)
|
from szurubooru.func import (
|
||||||
|
posts, users, comments, tags, images, files, util, image_hash)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('input_mime_type,expected_url', [
|
@pytest.mark.parametrize('input_mime_type,expected_url', [
|
||||||
|
@ -316,13 +317,20 @@ def test_update_post_content_for_new_post(
|
||||||
else:
|
else:
|
||||||
assert not post.post_id
|
assert not post.post_id
|
||||||
assert not os.path.exists(output_file_path)
|
assert not os.path.exists(output_file_path)
|
||||||
posts.update_post_content(post, read_asset(input_file))
|
content = read_asset(input_file)
|
||||||
|
posts.update_post_content(post, content)
|
||||||
assert not os.path.exists(output_file_path)
|
assert not os.path.exists(output_file_path)
|
||||||
db.session.flush()
|
db.session.flush()
|
||||||
assert post.mime_type == expected_mime_type
|
assert post.mime_type == expected_mime_type
|
||||||
assert post.type == expected_type
|
assert post.type == expected_type
|
||||||
assert post.checksum == 'crc'
|
assert post.checksum == 'crc'
|
||||||
assert os.path.exists(output_file_path)
|
assert os.path.exists(output_file_path)
|
||||||
|
if post.type in (db.Post.TYPE_IMAGE, db.Post.TYPE_ANIMATION):
|
||||||
|
image_hash.delete_image.assert_called_once_with(post.post_id)
|
||||||
|
image_hash.add_image.assert_called_once_with(post.post_id, content)
|
||||||
|
else:
|
||||||
|
image_hash.delete_image.assert_not_called()
|
||||||
|
image_hash.add_image.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
def test_update_post_content_to_existing_content(
|
def test_update_post_content_to_existing_content(
|
||||||
|
|
Loading…
Reference in a new issue