瀏覽代碼

Merge branch 'ammod' into django_backend

Dimitri Korsch 2 年之前
父節點
當前提交
a5c0c87b53
共有 100 個文件被更改,包括 3171 次插入429 次删除
  1. 5 0
      .dockerignore
  2. 8 2
      .gitignore
  3. 2 1
      .pylintrc
  4. 7 0
      Makefile
  5. 8 1
      app.py
  6. 4 2
      docker-compose.yml
  7. 964 0
      labels/LepiForum_PandasVersion/LepiForum_Species_edited_by_GBrehm.csv
  8. 135 0
      labels/LepiForum_PandasVersion/Provider.py
  9. 17 0
      labels/LepiForum_PandasVersion/configuration1.json
  10. 17 0
      labels/LepiForum_PandasVersion/configuration2.json
  11. 17 0
      labels/LepiForum_PandasVersion/configuration3.json
  12. 14 0
      labels/LepiForum_PandasVersion/configuration4.json
  13. 36 0
      migrations/versions/8f2554c45633_.py
  14. 9 1
      migrations/versions/b03df3e31b8d_.py
  15. 4 0
      notebooks/.gitignore
  16. 59 0
      notebooks/show_results.ipynb
  17. 11 0
      package-lock.json
  18. 21 3
      pycs/__init__.py
  19. 1 1
      pycs/database/Collection.py
  20. 63 18
      pycs/database/File.py
  21. 5 0
      pycs/database/Label.py
  22. 2 2
      pycs/database/LabelProvider.py
  23. 1 1
      pycs/database/Model.py
  24. 55 9
      pycs/database/Project.py
  25. 101 3
      pycs/database/Result.py
  26. 7 0
      pycs/database/__init__.py
  27. 0 16
      pycs/database/util/JSONEncoder.py
  28. 95 50
      pycs/frontend/WebServer.py
  29. 2 1
      pycs/frontend/endpoints/ListJobs.py
  30. 2 1
      pycs/frontend/endpoints/ListLabelProviders.py
  31. 2 1
      pycs/frontend/endpoints/ListModels.py
  32. 2 1
      pycs/frontend/endpoints/ListProjects.py
  33. 15 0
      pycs/frontend/endpoints/additional/Authenticate.py
  34. 1 1
      pycs/frontend/endpoints/additional/FolderInformation.py
  35. 14 4
      pycs/frontend/endpoints/data/GetPreviousAndNextFile.py
  36. 2 1
      pycs/frontend/endpoints/data/RemoveFile.py
  37. 3 1
      pycs/frontend/endpoints/data/UploadFile.py
  38. 2 1
      pycs/frontend/endpoints/jobs/RemoveJob.py
  39. 2 1
      pycs/frontend/endpoints/labels/CreateLabel.py
  40. 2 1
      pycs/frontend/endpoints/labels/EditLabelName.py
  41. 2 1
      pycs/frontend/endpoints/labels/EditLabelParent.py
  42. 2 1
      pycs/frontend/endpoints/labels/ListLabelTree.py
  43. 2 1
      pycs/frontend/endpoints/labels/ListLabels.py
  44. 2 1
      pycs/frontend/endpoints/labels/RemoveLabel.py
  45. 136 0
      pycs/frontend/endpoints/pipelines/EstimateBoundingBox.py
  46. 2 1
      pycs/frontend/endpoints/pipelines/FitModel.py
  47. 6 5
      pycs/frontend/endpoints/pipelines/PredictBoundingBox.py
  48. 2 1
      pycs/frontend/endpoints/pipelines/PredictFile.py
  49. 23 13
      pycs/frontend/endpoints/pipelines/PredictModel.py
  50. 2 1
      pycs/frontend/endpoints/projects/CreateProject.py
  51. 2 1
      pycs/frontend/endpoints/projects/EditProjectDescription.py
  52. 2 1
      pycs/frontend/endpoints/projects/EditProjectName.py
  53. 2 1
      pycs/frontend/endpoints/projects/ExecuteExternalStorage.py
  54. 2 1
      pycs/frontend/endpoints/projects/ExecuteLabelProvider.py
  55. 2 1
      pycs/frontend/endpoints/projects/GetProjectModel.py
  56. 2 1
      pycs/frontend/endpoints/projects/ListProjectCollections.py
  57. 16 2
      pycs/frontend/endpoints/projects/ListProjectFiles.py
  58. 2 1
      pycs/frontend/endpoints/projects/RemoveProject.py
  59. 35 0
      pycs/frontend/endpoints/results/ConfirmAllResults.py
  60. 5 4
      pycs/frontend/endpoints/results/ConfirmResult.py
  61. 49 0
      pycs/frontend/endpoints/results/CopyResults.py
  62. 3 2
      pycs/frontend/endpoints/results/CreateResult.py
  63. 2 2
      pycs/frontend/endpoints/results/EditResultData.py
  64. 3 3
      pycs/frontend/endpoints/results/EditResultLabel.py
  65. 2 1
      pycs/frontend/endpoints/results/GetProjectResults.py
  66. 2 1
      pycs/frontend/endpoints/results/GetResults.py
  67. 2 1
      pycs/frontend/endpoints/results/RemoveResult.py
  68. 2 1
      pycs/frontend/endpoints/results/ResetResults.py
  69. 1 2
      pycs/frontend/notifications/NotificationManager.py
  70. 0 31
      pycs/frontend/util/JSONEncoder.py
  71. 0 0
      pycs/frontend/util/__init__.py
  72. 24 3
      pycs/interfaces/MediaFile.py
  73. 3 1
      pycs/interfaces/MediaImageLabel.py
  74. 2 1
      pycs/interfaces/Pipeline.py
  75. 1 1
      pycs/jobs/JobRunner.py
  76. 0 17
      pycs/jobs/util/JSONEncoder.py
  77. 0 0
      pycs/jobs/util/__init__.py
  78. 7 0
      pycs/management/__init__.py
  79. 43 0
      pycs/management/project.py
  80. 230 0
      pycs/management/result.py
  81. 25 1
      pycs/util/FileOperations.py
  82. 22 0
      pycs/util/JSONEncoder.py
  83. 1 1
      pycs/util/PipelineUtil.py
  84. 2 1
      pycs/util/ProgressFileWriter.py
  85. 4 0
      requirements.txt
  86. 1 1
      settings.json
  87. 17 2
      tests/base.py
  88. 1 1
      tests/client/__init__.py
  89. 67 14
      tests/client/file_tests.py
  90. 26 3
      tests/client/pipeline_tests.py
  91. 1 1
      tests/client/project_tests.py
  92. 1 1
      tests/client/result_tests.py
  93. 84 81
      webui/package-lock.json
  94. 2 1
      webui/package.json
  95. 81 6
      webui/src/App.vue
  96. 69 0
      webui/src/assets/icons/check-all.svg
  97. 64 0
      webui/src/assets/icons/double-chevron-left.svg
  98. 64 0
      webui/src/assets/icons/double-chevron-right.svg
  99. 60 0
      webui/src/assets/icons/untag.svg
  100. 237 86
      webui/src/components/media/annotated-image.vue

+ 5 - 0
.dockerignore

@@ -1,11 +1,16 @@
 env/
 venv/
 webui/node_modules/
+frontend/node_modules/
+db.backups/
 projects/
 htmlcov/
 labels/
+labels.ammod/
 models/
+models.ammod/
 tests/
+notebooks/
 
 *.sqlite
 *.sqlite-journal

+ 8 - 2
.gitignore

@@ -37,11 +37,17 @@ htmlcov/
 projects
 db
 external_data
-/models/
-/labels/
+models*
+labels*
 dist/
 
+.htpasswd
+.test-htpasswd
+
 *.sqlite
 *.sqlite-journal
 *.sqlite3
 *.sqlite3-journal
+
+output*.json
+settings.local.json

+ 2 - 1
.pylintrc

@@ -155,7 +155,8 @@ disable=print-statement,
         comprehension-escape,
         duplicate-code,
         missing-module-docstring,
-        too-many-instance-attributes
+        too-many-instance-attributes,
+        no-member
 
 # Enable the message, report, category or checker with the given id(s). You can
 # either give multiple identifier separated by comma (,) or put this option

+ 7 - 0
Makefile

@@ -2,6 +2,10 @@
 UID := $(shell id -u)
 GID := $(shell id -g)
 
+today := $(shell date +%Y-%m-%d)
+
+DB_BACKUPS := db.backups
+
 export UID
 export GID
 
@@ -31,6 +35,9 @@ build_docker:
 		--tag pycs:latest \
 		--build-arg UID=${UID} \
 		--build-arg GID=${GID}
+backup_db:
+	@mkdir -p ${DB_BACKUPS}
+	cp db/data.sqlite3 ${DB_BACKUPS}/data.${today}.sqlite3
 
 run_docker: build_docker
 	@mkdir -p projects db external_data

+ 8 - 1
app.py

@@ -1,9 +1,16 @@
 #!/usr/bin/env python
 
+import logging.config
+
 from pycs import app
+from pycs import htpasswd
 from pycs import settings
 from pycs.frontend.WebServer import WebServer
+from pycs.management import setup_commands
+
+logging.config.dictConfig(settings.logging)
+setup_commands(app)
 
 if __name__ == '__main__':
-    server = WebServer(app, settings)
+    server = WebServer(app, htpasswd, settings)
     server.run()

+ 4 - 2
docker-compose.yml

@@ -12,11 +12,12 @@ services:
       - ${PORT:-5000}:5000
 
     volumes:
-      - ./models:/pycs/models
-      - ./labels:/pycs/labels
+      - ./models.ammod:/pycs/models
+      - ./labels.ammod:/pycs/labels
       - ./projects:/pycs/projects
       - ./db:/pycs/db
       - ./settings.json:/pycs/settings.json
+      - ./.htpasswd:/pycs/.htpasswd
       - ./external_data:/data
 
     depends_on:
@@ -31,3 +32,4 @@ services:
     volumes:
       - ./db:/pycs/db
       - ./settings.json:/pycs/settings.json
+      - ./.htpasswd:/pycs/.htpasswd

File diff suppressed because it is too large
+ 964 - 0
labels/LepiForum_PandasVersion/LepiForum_Species_edited_by_GBrehm.csv


+ 135 - 0
labels/LepiForum_PandasVersion/Provider.py

@@ -0,0 +1,135 @@
+import re
+import numpy as np
+import pandas as pd
+import typing as T
+
+from pathlib import Path
+from munch import munchify
+
+from pycs import app
+from pycs.interfaces.LabelProvider import LabelProvider
+
+class Provider(LabelProvider):
+
+    names = [
+        'is_local',
+        'rarity',
+        'super_family',
+        'family',
+        'sub_family',
+        'tribe',
+        'german',
+        'swiss',
+        'austrian',
+        'kr_nr',
+        'genus',
+        'species',
+        'species_group',
+        'authors',
+        'comment',
+        'remove_me',
+        'changed',
+        'version1_comment',
+        'misc', # 'D-CH-A / non-KR / Kaukasus',
+        'german_name',
+    ]
+
+    dtype = {
+        'is_local': pd.CategoricalDtype(['nur lokal', 'tagaktiv']),
+        'rarity': np.float32,
+        'super_family': "category",
+        'family': "category",
+        'sub_family': "category",
+        'tribe': "category",
+        'german': pd.CategoricalDtype(['D', 'e', '?']),
+        'swiss': pd.CategoricalDtype(['C', 'e', '?']),
+        'austrian': pd.CategoricalDtype(['A', 'e', '?']),
+        'kr_nr': "object",
+        'genus': "category",
+        'species': "category",
+        'species_group': "category",
+        'authors': "object",
+        'comment': "object",
+        'remove_me': "category",
+        'changed': "object",
+        'version1_comment': "object",
+        'misc': "object",
+        'german_name': str,
+    }
+
+    KR_REGEX = re.compile(r"^[\d\-a-zA-Z]+")
+
+
+    def __init__(self, root_folder: str, configuration: T.Dict):
+        config = munchify(configuration)
+        self.root = Path(root_folder)
+
+        self.label_file = self.root / config.filename
+        self.min_rarity = config.minimumRarity
+        self.hierarchy_levels = config.hierarchyLevels
+        self.only_german = config.onlyGerman
+
+    def close(self):
+        pass
+
+    def get_labels(self) -> T.List[dict]:
+        result = []
+
+        lepi_list = pd.read_csv(self.label_file,
+                        names=self.names,
+                        dtype=self.dtype,
+                        sep="\t", header=0
+                       )
+        app.logger.info(f"Found {len(lepi_list)} labels in {self.label_file}")
+
+        if self.min_rarity is not None:
+            mask = lepi_list.rarity >= self.min_rarity
+            lepi_list = lepi_list[mask]
+            app.logger.info(f"Labels {len(lepi_list):,d} with {self.min_rarity=}")
+
+        if self.only_german:
+            mask = (
+                lepi_list.german.eq("D") |
+                lepi_list.austrian.eq("A") |
+                lepi_list.swiss.eq("C")
+                ) & \
+                lepi_list["remove_me"].isin([np.nan])
+
+            lepi_list = lepi_list[mask]
+            app.logger.info(f"Labels {len(lepi_list):,d} for german-speaking countries")
+
+
+        parents = set()
+        for i, entry in lepi_list.iterrows():
+            parent_reference = None
+
+            for level, level_name in self.hierarchy_levels:
+                level_entry = entry[level]
+                if level_entry is None or (isinstance(level_entry, float) and np.isnan(level_entry)):
+                    continue
+
+                reference, name = f'{level}_{level_entry.lower()}', level_entry
+
+                # parents should be added once
+                if reference not in parents:
+                    result.append(self.create_label(reference, name, parent_reference, level_name))
+                    parents.add(reference)
+
+                parent_reference = reference
+
+
+            # add label itself
+            if self.KR_REGEX.match(entry.kr_nr):
+                name = f'{entry.genus} {entry.species} ({entry.kr_nr})'
+                reference = entry.kr_nr
+
+            else:
+                name = f'{entry.genus} {entry.species}'
+                reference = f'_{name.lower()}'
+            result.append(self.create_label(reference, name, parent_reference))
+
+
+        app.logger.info(f"Finally, provided {len(result):,d} labels")
+        return result
+
+

+ 17 - 0
labels/LepiForum_PandasVersion/configuration1.json

@@ -0,0 +1,17 @@
+{
+  "name": "LepiForum (Alle Spezies)",
+  "description": "Stand: 01.12.2021, bearbeitet GBrehm",
+  "code": {
+    "module": "Provider",
+    "class": "Provider"
+  },
+
+  "filename": "LepiForum_Species_edited_by_GBrehm.csv",
+  "minimumRarity": null,
+  "onlyGerman": false,
+  "hierarchyLevels": [
+    ["family", "Familie"],
+    ["genus", "Gattung"],
+    ["species_group", "Artkomplex"]
+  ]
+}

+ 17 - 0
labels/LepiForum_PandasVersion/configuration2.json

@@ -0,0 +1,17 @@
+{
+  "name": "LepiForum (Alle Spezies aus D/A/CH)",
+  "description": "Stand: 01.12.2021, bearbeitet GBrehm",
+  "code": {
+    "module": "Provider",
+    "class": "Provider"
+  },
+
+  "filename": "LepiForum_Species_edited_by_GBrehm.csv",
+  "minimumRarity": null,
+  "onlyGerman": true,
+  "hierarchyLevels": [
+    ["family", "Familie"],
+    ["genus", "Gattung"],
+    ["species_group", "Artkomplex"]
+  ]
+}

+ 17 - 0
labels/LepiForum_PandasVersion/configuration3.json

@@ -0,0 +1,17 @@
+{
+  "name": "LepiForum (Nur häufige Spezies aus D/A/CH)",
+  "description": "Stand: 01.12.2021, bearbeitet GBrehm",
+  "code": {
+    "module": "Provider",
+    "class": "Provider"
+  },
+
+  "filename": "LepiForum_Species_edited_by_GBrehm.csv",
+  "minimumRarity": 0,
+  "onlyGerman": true,
+  "hierarchyLevels": [
+    ["family", "Familie"],
+    ["genus", "Gattung"],
+    ["species_group", "Artkomplex"]
+  ]
+}

+ 14 - 0
labels/LepiForum_PandasVersion/configuration4.json

@@ -0,0 +1,14 @@
+{
+  "name": "LepiForum (Alle Spezies aus D/A/CH, ohne Hierarchie)",
+  "description": "Stand: 01.12.2021, bearbeitet GBrehm",
+  "code": {
+    "module": "Provider",
+    "class": "Provider"
+  },
+
+  "filename": "LepiForum_Species_edited_by_GBrehm.csv",
+  "minimumRarity": null,
+  "onlyGerman": true,
+  "hierarchyLevels": [
+  ]
+}

+ 36 - 0
migrations/versions/8f2554c45633_.py

@@ -0,0 +1,36 @@
+"""empty message
+
+Revision ID: 8f2554c45633
+Revises: b03df3e31b8d
+Create Date: 2022-02-09 13:55:07.389805
+
+"""
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = '8f2554c45633'
+down_revision = 'b03df3e31b8d'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+    # ### commands auto generated by Alembic - please adjust! ###
+    op.create_table('result_confirmation',
+    sa.Column('id', sa.Integer(), nullable=False),
+    sa.Column('result_id', sa.Integer(), nullable=False),
+    sa.Column('confirming_user', sa.String(), nullable=False),
+    sa.ForeignKeyConstraint(['result_id'], ['result.id'], ondelete='CASCADE'),
+    sa.PrimaryKeyConstraint('id')
+    )
+    op.add_column('result', sa.Column('origin_user', sa.String(), nullable=True))
+    # ### end Alembic commands ###
+
+
+def downgrade():
+    # ### commands auto generated by Alembic - please adjust! ###
+    op.drop_column('result', 'origin_user')
+    op.drop_table('result_confirmation')
+    # ### end Alembic commands ###

+ 9 - 1
migrations/versions/b03df3e31b8d_.py

@@ -1,7 +1,7 @@
 """empty message
 
 Revision ID: b03df3e31b8d
-Revises: 
+Revises:
 Create Date: 2021-08-11 12:46:17.757283
 
 """
@@ -102,12 +102,20 @@ def upgrade():
     sa.Column('file_id', sa.Integer(), nullable=False),
     sa.Column('origin', sa.String(), nullable=False),
     sa.Column('type', sa.String(), nullable=False),
+    sa.Column('origin_user', sa.String(), nullable=True),
     sa.Column('label_id', sa.Integer(), nullable=True),
     sa.Column('data_encoded', sa.String(), nullable=True),
     sa.ForeignKeyConstraint(['file_id'], ['file.id'], ondelete='CASCADE'),
     sa.ForeignKeyConstraint(['label_id'], ['label.id'], ondelete='SET NULL'),
     sa.PrimaryKeyConstraint('id')
     )
+    op.create_table('result_confirmation',
+    sa.Column('id', sa.Integer(), nullable=False),
+    sa.Column('result_id', sa.Integer(), nullable=False),
+    sa.Column('confirming_user', sa.String(), nullable=False),
+    sa.ForeignKeyConstraint(['result_id'], ['result.id'], ondelete='CASCADE'),
+    sa.PrimaryKeyConstraint('id')
+    )
     # ### end Alembic commands ###
 
 

+ 4 - 0
notebooks/.gitignore

@@ -0,0 +1,4 @@
+export*
+.ipynb_checkpoints
+*.zip
+*.tar*

File diff suppressed because it is too large
+ 59 - 0
notebooks/show_results.ipynb


+ 11 - 0
package-lock.json

@@ -0,0 +1,11 @@
+{
+  "requires": true,
+  "lockfileVersion": 1,
+  "dependencies": {
+    "vue-debounce": {
+      "version": "3.0.2",
+      "resolved": "https://registry.npmjs.org/vue-debounce/-/vue-debounce-3.0.2.tgz",
+      "integrity": "sha512-+shuc9Ry+AFqJbN7BMfagazB81/bTiPWvUZ4KBjambgrd3B5EQBojxeGzeNZ21xRflnwB098BG1d0HtWv8WyzA=="
+    }
+  }
+}

+ 21 - 3
pycs/__init__.py

@@ -9,26 +9,44 @@ from munch import munchify
 from pathlib import Path
 
 from flask import Flask
+from flask_htpasswd import HtPasswdAuth
 from flask_migrate import Migrate
 from flask_sqlalchemy import SQLAlchemy
 from sqlalchemy import event
 from sqlalchemy import pool
 from sqlalchemy.engine import Engine
 
-print('=== Loading settings ===')
-with open('settings.json') as file:
+from pycs.util.JSONEncoder import JSONEncoder
+
+
+settings_file = os.environ.get("PYCS_SETTINGS", "settings.json")
+
+print(f'=== Loading settings from "{settings_file}" ===')
+with open(settings_file, encoding='utf8') as file:
     settings = munchify(json.load(file))
 
 # create projects folder
 if not os.path.exists(settings.projects_folder):
-    os.mkdir(settings.projects_folder)
+    os.mkdir(settings.projects_folder) # pragma: no-cover
 
 DB_FILE = Path.cwd() / settings.database
 
+
 app = Flask(__name__)
 app.config["SQLALCHEMY_DATABASE_URI"] = f"sqlite:///{DB_FILE}"
 app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
 
+# Protect via http basic authentication
+app.config['FLASK_HTPASSWD_PATH'] = '.htpasswd'
+if not os.path.isfile(app.config['FLASK_HTPASSWD_PATH']):
+    raise FileNotFoundError("You need to specify a .htpasswd-file."
+        f"The following file could not be located: {app.config['FLASK_HTPASSWD_PATH']}!")
+app.config['FLASK_SECRET'] = 'Hey Hey Kids, secure me!'
+htpasswd = HtPasswdAuth(app)
+
+# set json encoder so database objects are serialized correctly
+app.json_encoder = JSONEncoder
+
 # pylint: disable=unused-argument
 @event.listens_for(Engine, "connect")
 def set_sqlite_pragma(dbapi_connection, connection_record):

+ 1 - 1
pycs/database/Collection.py

@@ -58,7 +58,7 @@ class Collection(NamedBaseModel):
 
         # pylint: disable=import-outside-toplevel, cyclic-import
         from pycs.database.File import File
-        return self.files.filter(*filters).order_by(File.id).offset(offset).limit(limit)
+        return self.files.filter(*filters).order_by(File.path).offset(offset).limit(limit)
 
     # pylint: disable=too-many-arguments
     @commit_on_return

+ 63 - 18
pycs/database/File.py

@@ -66,6 +66,7 @@ class File(NamedBaseModel):
         "created",
         "path",
         "frames",
+        "has_annotations",
         "fps",
         "project_id",
         "collection_id",
@@ -76,6 +77,11 @@ class File(NamedBaseModel):
         """ filename consisting of a name and an extension """
         return f"{self.name}{self.extension}"
 
+    @property
+    def has_annotations(self):
+        """ check if there are any referenced results """
+        return self.results.count() != 0
+
     @property
     def absolute_path(self) -> str:
         """ returns an absolute of the file """
@@ -128,26 +134,37 @@ class File(NamedBaseModel):
         collection = Collection.query.filter_by(reference=collection_reference).one()
         self.collection_id = collection.id
 
-    def _get_another_file(self, *query) -> T.Optional[File]:
+    def _get_another_file(self, *query, with_annotations=None) -> T.Optional[File]:
         """
         get the first file matching the query ordered by descending id
 
         :return: another file or None
         """
-        return File.query.filter(File.project_id == self.project_id, *query)
+        result = File.query.filter(File.project_id == self.project_id, *query)
+
+        if with_annotations is None:
+            return result
+
+        annot_query = File.results.any()
+
+        if with_annotations is False:
+            annot_query = ~annot_query
 
-    def next(self) -> T.Optional[File]:
+        return result.filter(annot_query)
+
+    def next(self, **kwargs) -> T.Optional[File]:
         """
         get the successor of this file
 
         :return: another file or None
         """
 
-        return self._get_another_file(File.id > self.id)\
-            .order_by(File.id).first()
+        res = self._get_another_file(File.path > self.path, **kwargs)\
+            .order_by(File.path)
+        return res.first()
 
 
-    def previous(self) -> T.Optional[File]:
+    def previous(self, **kwargs) -> T.Optional[File]:
         """
         get the predecessor of this file
 
@@ -155,22 +172,23 @@ class File(NamedBaseModel):
         """
 
         # pylint: disable=no-member
-        return self._get_another_file(File.id < self.id)\
-            .order_by(File.id.desc()).first()
+        res = self._get_another_file(File.path < self.path, **kwargs)\
+            .order_by(File.path.desc())
+        return res.first()
 
 
-    def next_in_collection(self) -> T.Optional[File]:
+    def next_in_collection(self, **kwargs) -> T.Optional[File]:
         """
         get the predecessor of this file
 
         :return: another file or None
         """
         return self._get_another_file(
-            File.id > self.id, File.collection_id == self.collection_id)\
-            .order_by(File.id).first()
+            File.path > self.path, File.collection_id == self.collection_id, **kwargs)\
+            .order_by(File.path).first()
 
 
-    def previous_in_collection(self) -> T.Optional[File]:
+    def previous_in_collection(self, **kwargs) -> T.Optional[File]:
         """
         get the predecessor of this file
 
@@ -179,8 +197,8 @@ class File(NamedBaseModel):
 
         # pylint: disable=no-member
         return self._get_another_file(
-            File.id < self.id, File.collection_id == self.collection_id)\
-            .order_by(File.id.desc()).first()
+            File.path < self.path, File.collection_id == self.collection_id, **kwargs)\
+            .order_by(File.path.desc()).first()
 
 
     def result(self, identifier: int) -> T.Optional[Result]:
@@ -196,31 +214,58 @@ class File(NamedBaseModel):
     def create_result(self,
                       origin: str,
                       result_type: str,
-                      label: T.Optional[T.Union[Label, int]] = None,
+                      origin_user: str = None,
+                      label: T.Optional[T.Union[Label, int, str]] = None,
                       data: T.Optional[dict] = None) -> Result:
         """
         Creates a result and returns the created object
 
         :return: result object
         """
+        if origin == "pipeline" and not origin_user is None:
+            raise ValueError("If an annotation was made by the pipeline no username"\
+                "can be specified!")
 
         result = Result.new(commit=False,
                             file_id=self.id,
                             origin=origin,
-                            type=result_type)
+                            type=result_type,
+                            origin_user=origin_user)
 
         result.data = data
 
         if label is not None:
-            assert isinstance(label, (int, Label)), f"Wrong label type: {type(label)}"
+            assert isinstance(label, (int, Label, str)), \
+                f"Label \"{label}\" has invalid type: {type(label)}"
+
+            if isinstance(label, str):
+                label = Label.query.filter(
+                    Label.project_id == self.project_id,
+                    Label.reference == label).one_or_none()
 
             if isinstance(label, Label):
                 label = label.id
 
-            result.label_id = label
+            result.set_label(label, commit=False)
 
         return result
 
+    def remove_result(self, result_id: int) -> T.List[Result]:
+        """
+            Remove the result with the given id.
+
+            :param result_id: id of the result to delete
+            :return: list of result objects
+        """
+
+        results = Result.query.filter(
+            Result.file_id == self.id,
+            Result.id == result_id)
+
+        _results = [r.serialize() for r in results.all()]
+        results.delete()
+
+        return _results
 
     def remove_results(self, origin='pipeline') -> T.List[Result]:
         """

+ 5 - 0
pycs/database/Label.py

@@ -66,11 +66,16 @@ class Label(NamedBaseModel):
     serialize_only = NamedBaseModel.serialize_only + (
         "project_id",
         "parent_id",
+        "parent_reference",
         "reference",
         "hierarchy_level",
         # "children",
     )
 
+    @property
+    def parent_reference(self):
+        return None if self.parent is None else self.parent.reference
+
     @commit_on_return
     def set_parent(self, parent: T.Optional[T.Union[int, str, Label]] = None) -> None:
 

+ 2 - 2
pycs/database/LabelProvider.py

@@ -40,7 +40,7 @@ class LabelProvider(NamedBaseModel):
         """
 
         for folder, conf_path in _find_files(root):
-            with open(conf_path) as conf_file:
+            with open(conf_path, encoding='utf8') as conf_file:
                 config = json.load(conf_file)
 
             provider, _ = cls.get_or_create(
@@ -73,7 +73,7 @@ class LabelProvider(NamedBaseModel):
         :return: LabelProvider instance
         """
         # load configuration.json
-        with open(self.configuration_file_path) as configuration_file:
+        with open(self.configuration_file_path, encoding='utf8') as configuration_file:
             configuration = json.load(configuration_file)
 
         # load code

+ 1 - 1
pycs/database/Model.py

@@ -37,7 +37,7 @@ class Model(NamedBaseModel):
             and stores them in the database
         """
         for folder in Path(root).glob("*"):
-            with open(folder / config_name) as config_file:
+            with open(folder / config_name, encoding='utf8') as config_file:
                 config = json.load(config_file)
 
             # extract data

+ 55 - 9
pycs/database/Project.py

@@ -3,6 +3,8 @@ import shutil
 import typing as T
 
 from datetime import datetime
+from eventlet import tpool
+from sqlalchemy.sql import case
 
 from pycs import app
 from pycs import db
@@ -12,6 +14,7 @@ from pycs.database.Collection import Collection
 from pycs.database.File import File
 from pycs.database.Label import Label
 from pycs.database.util import commit_on_return
+from pycs.util.FileOperations import resize_file
 
 
 class Project(NamedBaseModel):
@@ -105,7 +108,7 @@ class Project(NamedBaseModel):
         """
         return self.labels.filter(Label.reference == reference).one_or_none()
 
-    def file(self, identifier: int) -> T.Optional[Label]:
+    def file(self, identifier: int) -> T.Optional[File]:
         """
         get a file using its unique identifier
 
@@ -164,7 +167,7 @@ class Project(NamedBaseModel):
         return label, is_new
 
     @commit_on_return
-    def bulk_create_labels(self, labels: T.List[T.Dict], clean_old_labels: bool = True):
+    def bulk_create_labels(self, labels: T.List[T.Dict]):
         """
             Inserts a all labels at once.
 
@@ -172,15 +175,45 @@ class Project(NamedBaseModel):
                 - AssertionError if project_id and reference are not unique
                 - ValueError if a cycle in the hierarchy is found
         """
-        if clean_old_labels:
-            self.labels.delete()
+        if len(labels) == 0:
+            return labels
 
         for label in labels:
             label["project_id"] = self.id
 
         self.__check_labels(labels)
-        app.logger.info(f"Inserting {len(labels):,d} labels")
-        db.engine.execute(Label.__table__.insert(), labels)
+
+
+        # first update existing labels
+        fields_to_update = (
+            ("name", Label.name),
+            ("hierarchy_level", Label.hierarchy_level),
+        )
+
+        updates = {
+            field: case(
+                {lab["reference"]: lab[key] for lab in labels},
+                value=Label.reference)
+
+            for key, field in fields_to_update
+        }
+
+        existing_labs = self.labels.filter(
+            Label.reference.in_([lab["reference"] for lab in labels])
+        )
+        app.logger.info(f"Updating {existing_labs.count():,d} labels")
+        existing_labs.update(updates, synchronize_session=False)
+
+        # then add new labels
+        references = {lab.reference for lab in self.labels.all()}
+        new_labels = [lab for lab in labels
+            if lab["reference"] not in references]
+
+        if len(new_labels) > 0:
+            app.logger.info(f"Inserting {len(new_labels):,d} new labels")
+            db.engine.execute(Label.__table__.insert(), new_labels)
+
+        # finally set parents correctly
         self.__set_parents(labels)
 
         return labels
@@ -203,7 +236,7 @@ class Project(NamedBaseModel):
     def __check_labels(self, labels):
         """ check labels for unique keys and cycles """
 
-        unique_keys = dict()
+        unique_keys = {}
 
         for label in labels:
             key = (label["project_id"], label["reference"])
@@ -280,9 +313,15 @@ class Project(NamedBaseModel):
         file.frames = frames
         file.fps = fps
 
+        # Pre-load common thumbnail sizes if the given file is an image.
+        if file.type == 'image' and os.path.isfile(path):
+            for max_width, max_height in [(200, 200), (2000, 800)]:
+                tpool.execute(resize_file, file, self.root_folder, max_width, max_height)
+
         return file, is_new
 
-    def get_files(self, *filters, offset: int = 0, limit: int = -1) -> T.List[File]:
+    def get_files(self, *filters, offset: int = 0, limit: int = -1,
+                  with_annotations: T.Optional[bool] = None) -> T.List[File]:
         """
         get an iterator of files associated with this project
 
@@ -290,8 +329,15 @@ class Project(NamedBaseModel):
         :param limit: file limit
         :return: iterator of files
         """
+        if with_annotations is not None:
+            annot_query = File.results.any()
+
+            if with_annotations is False:
+                annot_query = ~annot_query
+
+            filters = filters + (annot_query,)
 
-        return self.files.filter(*filters).order_by(File.id).offset(offset).limit(limit)
+        return self.files.filter(*filters).order_by(File.path).offset(offset).limit(limit)
 
     def _files_without_results(self):
         """

+ 101 - 3
pycs/database/Result.py

@@ -5,6 +5,21 @@ from pycs import db
 from pycs.database.base import BaseModel
 from pycs.database.util import commit_on_return
 
+class ResultConfirmation(BaseModel):
+    """ DB Model for user confirmations of results """
+
+    result_id = db.Column(
+        db.Integer,
+        db.ForeignKey("result.id", ondelete="CASCADE"),
+        nullable=False)
+
+    confirming_user = db.Column(db.String, nullable=False)
+
+    serialize_only = BaseModel.serialize_only + (
+        "result_id",
+        "confirming_user",
+    )
+
 class Result(BaseModel):
     """ DB Model for projects """
 
@@ -14,6 +29,7 @@ class Result(BaseModel):
         nullable=False)
 
     origin = db.Column(db.String, nullable=False)
+    origin_user = db.Column(db.String, nullable=True)
     type = db.Column(db.String, nullable=False)
 
     label_id = db.Column(
@@ -23,18 +39,28 @@ class Result(BaseModel):
 
     data_encoded = db.Column(db.String)
 
+    result_confirmations = db.relationship("ResultConfirmation",
+        backref="result",
+        lazy="dynamic",
+        passive_deletes=True,
+    )
+
     serialize_only = BaseModel.serialize_only + (
         "file_id",
         "origin",
+        "origin_user",
         "type",
         "label_id",
         "data",
+        "confirmations"
     )
 
     def serialize(self):
         """ extends the default serialize with the decoded data attribute """
         result = super().serialize()
         result["data"] = self.data
+        result["confirmations"] = self.confirmations
+
         return result
 
     @property
@@ -58,15 +84,21 @@ class Result(BaseModel):
             raise ValueError(f"Not supported type: {type(value)}")
 
     @commit_on_return
-    def set_origin(self, origin: str):
+    def set_origin(self, origin: str, origin_user: str = None):
         """
         set this results origin
 
         :param origin: either 'user' or 'pipeline'
+        :param origin_user: None if origin is 'pipeline' else name of the user
         :return:
         """
-        self.origin = origin
+        if origin == "pipeline" and not origin_user is None:
+            raise ValueError("If an annotation was made by the pipeline no user"\
+                "can be specified!")
 
+        self.origin = origin
+        self.origin_user = origin_user
+        self.reset_confirmations(commit=False)
 
     @commit_on_return
     def set_label(self, label: int):
@@ -76,4 +108,70 @@ class Result(BaseModel):
         :param label: label ID
         :return:
         """
-        self.label_id = label
+        if self.label_id != label:
+            self.reset_confirmations(commit=False)
+            self.label_id = label
+
+    @property
+    def confirmations(self) -> T.List[ResultConfirmation]:
+        """
+            Returns all confirmations for this results
+
+            :return: list of result confirmations
+        """
+
+        confirmations = db.session.query(ResultConfirmation).filter(
+                            ResultConfirmation.result.has(Result.id==self.id))
+        _confirmations = [c.serialize() for c in confirmations.all()]
+
+        _confirmations = [{k:v for k, v in c.items()
+                            if k in ('id', 'confirming_user')}
+                            for c in _confirmations]
+
+        return _confirmations
+
+    @commit_on_return
+    def reset_confirmations(self) -> T.List[ResultConfirmation]:
+        """
+        Resets all confirmations
+
+        :return: list of result confirmation objects
+        """
+        confirmations = ResultConfirmation.query.filter(
+                ResultConfirmation.result_id == self.id)
+
+        # delete returns the serialized object
+        _confirmations = [c.delete(commit=False) for c in confirmations.all()]
+
+        return _confirmations
+
+    @commit_on_return
+    def confirm(self, user: str):
+        """
+            Result is confirmed by the given user. This sets the origin to "user".
+            If no username was specified before, the given username is used.
+            A confirmation is only added if it does not already exist. The result
+            has be labeled to be confirmed.
+
+            :param user: username
+        """
+        if user is None:
+            raise ValueError("When confirming a result the username has to" \
+                            "be specified.")
+
+        if self.origin == "pipeline":
+            self.set_origin(origin="user", origin_user=user)
+
+        # Get current confirmations by given user.
+        confirmations_by_user = ResultConfirmation.query.filter(
+                                    ResultConfirmation.result_id == self.id,
+                                    ResultConfirmation.confirming_user == user)
+        _confirmations_by_user = [c.serialize() for c in confirmations_by_user.all()]
+
+        # Results can only be confirmed if the result is labeled.
+        # Also, the original annotator cannot confirm the result and we want
+        # to avoid duplicates.
+        if self.label_id is not None and self.origin_user != user and not len(_confirmations_by_user) > 0:
+            ResultConfirmation.new(commit=False,
+                                   result_id=self.id,
+                                   confirming_user=user)

+ 7 - 0
pycs/database/__init__.py

@@ -0,0 +1,7 @@
+from pycs.database.Collection import Collection
+from pycs.database.File import File
+from pycs.database.Label import Label
+from pycs.database.LabelProvider import LabelProvider
+from pycs.database.Model import Model
+from pycs.database.Project import Project
+from pycs.database.Result import Result

+ 0 - 16
pycs/database/util/JSONEncoder.py

@@ -1,16 +0,0 @@
-from typing import Any
-
-from flask.json import JSONEncoder as Base
-
-from pycs.database.base import BaseModel
-
-class JSONEncoder(Base):
-    """
-    prepares database objects to be json encoded
-    """
-
-    def default(self, o: Any) -> Any:
-        if isinstance(o, BaseModel):
-            return o.serialize()
-
-        return o.__dict__.copy()

+ 95 - 50
pycs/frontend/WebServer.py

@@ -1,4 +1,3 @@
-import logging.config
 import typing as T
 
 from glob import glob
@@ -10,13 +9,14 @@ import socketio
 
 from flask import send_from_directory
 
-from pycs.database.Model import Model
 from pycs.database.LabelProvider import LabelProvider
+from pycs.database.Model import Model
 from pycs.frontend.endpoints.ListJobs import ListJobs
 from pycs.frontend.endpoints.ListLabelProviders import ListLabelProviders
 from pycs.frontend.endpoints.ListModels import ListModels
 from pycs.frontend.endpoints.ListProjects import ListProjects
 from pycs.frontend.endpoints.additional.FolderInformation import FolderInformation
+from pycs.frontend.endpoints.additional.Authenticate import Authenticate
 from pycs.frontend.endpoints.data.GetCroppedFile import GetCroppedFile
 from pycs.frontend.endpoints.data.GetFile import GetFile
 from pycs.frontend.endpoints.data.GetPreviousAndNextFile import GetPreviousAndNextFile
@@ -30,6 +30,7 @@ from pycs.frontend.endpoints.labels.EditLabelParent import EditLabelParent
 from pycs.frontend.endpoints.labels.ListLabelTree import ListLabelTree
 from pycs.frontend.endpoints.labels.ListLabels import ListLabels
 from pycs.frontend.endpoints.labels.RemoveLabel import RemoveLabel
+from pycs.frontend.endpoints.pipelines.EstimateBoundingBox import EstimateBoundingBox
 from pycs.frontend.endpoints.pipelines.FitModel import FitModel
 from pycs.frontend.endpoints.pipelines.PredictBoundingBox import PredictBoundingBox
 from pycs.frontend.endpoints.pipelines.PredictFile import PredictFile
@@ -44,6 +45,8 @@ from pycs.frontend.endpoints.projects.ListProjectCollections import ListProjectC
 from pycs.frontend.endpoints.projects.ListProjectFiles import ListProjectFiles
 from pycs.frontend.endpoints.projects.RemoveProject import RemoveProject
 from pycs.frontend.endpoints.results.ConfirmResult import ConfirmResult
+from pycs.frontend.endpoints.results.ConfirmAllResults import ConfirmAllResults
+from pycs.frontend.endpoints.results.CopyResults import CopyResults
 from pycs.frontend.endpoints.results.CreateResult import CreateResult
 from pycs.frontend.endpoints.results.EditResultData import EditResultData
 from pycs.frontend.endpoints.results.EditResultLabel import EditResultLabel
@@ -52,7 +55,6 @@ from pycs.frontend.endpoints.results.GetResults import GetResults
 from pycs.frontend.endpoints.results.RemoveResult import RemoveResult
 from pycs.frontend.endpoints.results.ResetResults import ResetResults
 from pycs.frontend.notifications.NotificationManager import NotificationManager
-from pycs.frontend.util.JSONEncoder import JSONEncoder
 from pycs.jobs.JobRunner import JobRunner
 from pycs.util.PipelineCache import PipelineCache
 
@@ -64,12 +66,10 @@ class WebServer:
 
     index: Path = Path.cwd() / 'webui' / 'index.html'
 
-    def __init__(self, app, settings: munch.Munch, discovery: bool = True):
+    def __init__(self, app, htpasswd, settings: munch.Munch, discovery: bool = True):
 
-        logging.config.dictConfig(settings.logging)
         self.app = app
-        # set json encoder so database objects are serialized correctly
-        self.app.json_encoder = JSONEncoder
+        self.htpasswd = htpasswd
 
         # initialize web server
         if self.is_production:
@@ -88,7 +88,10 @@ class WebServer:
             @self.app.after_request
             def after_request(response):
                 # pylint: disable=unused-variable
-                response.headers['Access-Control-Allow-Origin'] = '*'
+                response.headers['Access-Control-Allow-Origin'] = 'http://localhost:8080'
+                response.headers['Access-Control-Allow-Credentials'] = 'true'
+                response.headers['Access-Control-Allow-Methods'] = 'POST, GET'
+                response.headers['Access-Control-Allow-Headers'] = 'Authorization'
                 return response
 
         # create service objects
@@ -158,88 +161,101 @@ class WebServer:
     def define_routes(self):
         """ defines app routes """
 
+        # authentication
+        # additional
+        self.app.add_url_rule(
+            '/authenticate',
+            view_func=self.htpasswd.required( Authenticate.as_view('authenticate') )
+        )
+
         # additional
         self.app.add_url_rule(
             '/folder',
-            view_func=FolderInformation.as_view('folder_information')
+            view_func=self.htpasswd.required( FolderInformation.as_view('folder_information') )
         )
 
         # jobs
         self.app.add_url_rule(
             '/jobs',
-            view_func=ListJobs.as_view('list_jobs', self.jobs)
+            view_func=self.htpasswd.required( ListJobs.as_view('list_jobs', self.jobs) )
         )
         self.app.add_url_rule(
             '/jobs/<job_id>/remove',
-            view_func=RemoveJob.as_view('remove_job', self.jobs)
+            view_func=self.htpasswd.required( RemoveJob.as_view('remove_job', self.jobs) )
         )
 
         # models
         self.app.add_url_rule(
             '/models',
-            view_func=ListModels.as_view('list_models')
+            view_func=self.htpasswd.required( ListModels.as_view('list_models') )
         )
         self.app.add_url_rule(
             '/projects/<int:project_id>/model',
-            view_func=GetProjectModel.as_view('get_project_model')
+            view_func=self.htpasswd.required( GetProjectModel.as_view('get_project_model') )
         )
 
         # labels
         self.app.add_url_rule(
             '/label_providers',
-            view_func=ListLabelProviders.as_view('label_providers')
+            view_func=self.htpasswd.required( ListLabelProviders.as_view('label_providers') )
         )
         self.app.add_url_rule(
             '/projects/<int:project_id>/labels',
-            view_func=ListLabels.as_view('list_labels')
+            view_func=self.htpasswd.required( ListLabels.as_view('list_labels') )
         )
         self.app.add_url_rule(
             '/projects/<int:project_id>/labels/tree',
-            view_func=ListLabelTree.as_view('list_label_tree')
+            view_func=self.htpasswd.required( ListLabelTree.as_view('list_label_tree') )
         )
         self.app.add_url_rule(
             '/projects/<int:project_id>/labels',
-            view_func=CreateLabel.as_view('create_label', self.notifications)
+            view_func=self.htpasswd.required( CreateLabel.as_view('create_label',
+                self.notifications) )
         )
         self.app.add_url_rule(
             '/projects/<int:project_id>/labels/<int:label_id>/remove',
-            view_func=RemoveLabel.as_view('remove_label', self.notifications)
+            view_func=self.htpasswd.required( RemoveLabel.as_view('remove_label',
+                self.notifications) )
         )
         self.app.add_url_rule(
             '/projects/<int:project_id>/labels/<int:label_id>/name',
-            view_func=EditLabelName.as_view('edit_label_name', self.notifications)
+            view_func=self.htpasswd.required( EditLabelName.as_view('edit_label_name',
+                self.notifications) )
         )
         self.app.add_url_rule(
             '/projects/<int:project_id>/labels/<int:label_id>/parent',
-            view_func=EditLabelParent.as_view('edit_label_parent', self.notifications)
+            view_func=self.htpasswd.required( EditLabelParent.as_view('edit_label_parent',
+                self.notifications) )
         )
 
         # collections
         self.app.add_url_rule(
             '/projects/<int:project_id>/collections',
-            view_func=ListProjectCollections.as_view('list_collections')
+            view_func=self.htpasswd.required( ListProjectCollections.as_view('list_collections') )
         )
         self.app.add_url_rule(
             '/projects/<int:project_id>/data/<int:collection_id>/<int:start>/<int:length>',
-            view_func=ListProjectFiles.as_view('list_collection_files')
+            view_func=self.htpasswd.required( ListProjectFiles.as_view('list_collection_files') )
         )
 
         # data
         self.app.add_url_rule(
             '/projects/<int:project_id>/data',
-            view_func=UploadFile.as_view('upload_file', self.notifications)
+            view_func=self.htpasswd.required( UploadFile.as_view('upload_file',
+                self.notifications) )
         )
         self.app.add_url_rule(
             '/projects/<int:project_id>/data',
-            view_func=ListProjectFiles.as_view('list_all_files')
+            view_func=self.htpasswd.required( ListProjectFiles.as_view('list_all_files') )
         )
         self.app.add_url_rule(
             '/projects/<int:project_id>/data/<int:start>/<int:length>',
-            view_func=ListProjectFiles.as_view('list_files')
+            view_func=self.htpasswd.required( ListProjectFiles.as_view('list_files') )
         )
         self.app.add_url_rule(
             '/data/<int:file_id>/remove',
-            view_func=RemoveFile.as_view('remove_file', self.notifications)
+            view_func=self.htpasswd.required( RemoveFile.as_view('remove_file',
+                self.notifications) )
         )
         self.app.add_url_rule(
             '/data/<int:file_id>',
@@ -261,89 +277,118 @@ class WebServer:
         # results
         self.app.add_url_rule(
             '/projects/<int:project_id>/results',
-            view_func=GetProjectResults.as_view('get_project_results')
+            view_func=self.htpasswd.required( GetProjectResults.as_view('get_project_results') )
         )
         self.app.add_url_rule(
             '/data/<int:file_id>/results',
-            view_func=GetResults.as_view('get_results')
+            view_func=self.htpasswd.required( GetResults.as_view('get_results') )
         )
         self.app.add_url_rule(
             '/data/<int:file_id>/results',
-            view_func=CreateResult.as_view('create_result', self.notifications)
+            view_func=self.htpasswd.required( CreateResult.as_view('create_result',
+                self.notifications) )
+        )
+        self.app.add_url_rule(
+            '/data/<int:file_id>/copy_results',
+            view_func=self.htpasswd.required( CopyResults.as_view('copy_results',
+                self.notifications) )
+        )
+        self.app.add_url_rule(
+            '/data/<int:file_id>/confirm_all',
+            view_func=self.htpasswd.required( ConfirmAllResults.as_view('confirm_all',
+                self.notifications) )
         )
         self.app.add_url_rule(
             '/data/<int:file_id>/reset',
-            view_func=ResetResults.as_view('reset_results', self.notifications)
+            view_func=self.htpasswd.required( ResetResults.as_view('reset_results',
+                self.notifications) )
         )
         self.app.add_url_rule(
             '/results/<int:result_id>/remove',
-            view_func=RemoveResult.as_view('remove_result', self.notifications)
+            view_func=self.htpasswd.required( RemoveResult.as_view('remove_result',
+                self.notifications) )
         )
         self.app.add_url_rule(
             '/results/<int:result_id>/confirm',
-            view_func=ConfirmResult.as_view('confirm_result', self.notifications)
+            view_func=self.htpasswd.required( ConfirmResult.as_view('confirm_result',
+                self.notifications) )
         )
 
         self.app.add_url_rule(
             '/results/<int:result_id>/label',
-            view_func=EditResultLabel.as_view('edit_result_label', self.notifications)
+            view_func=self.htpasswd.required( EditResultLabel.as_view('edit_result_label',
+                self.notifications) )
         )
         self.app.add_url_rule(
             '/results/<int:result_id>/data',
-            view_func=EditResultData.as_view('edit_result_data', self.notifications)
+            view_func=self.htpasswd.required( EditResultData.as_view('edit_result_data',
+                self.notifications) )
         )
 
         # projects
         self.app.add_url_rule(
             '/projects',
-            view_func=ListProjects.as_view('list_projects')
+            view_func=self.htpasswd.required( ListProjects.as_view('list_projects') )
         )
         self.app.add_url_rule(
             '/projects',
-            view_func=CreateProject.as_view('create_project', self.notifications, self.jobs)
+            view_func=self.htpasswd.required( CreateProject.as_view('create_project',
+                self.notifications, self.jobs) )
         )
         self.app.add_url_rule(
             '/projects/<int:project_id>/label_provider',
-            view_func=ExecuteLabelProvider.as_view('execute_label_provider',
-                                                   self.notifications, self.jobs)
+            view_func=self.htpasswd.required( ExecuteLabelProvider.as_view('execute_label_provider',
+                                                   self.notifications, self.jobs) )
         )
         self.app.add_url_rule(
             '/projects/<int:project_id>/external_storage',
-            view_func=ExecuteExternalStorage.as_view('execute_external_storage',
-                                                     self.notifications, self.jobs)
+            view_func=self.htpasswd.required(
+                ExecuteExternalStorage.as_view('execute_external_storage',
+                    self.notifications, self.jobs) )
         )
         self.app.add_url_rule(
             '/projects/<int:project_id>/remove',
-            view_func=RemoveProject.as_view('remove_project', self.notifications)
+            view_func=self.htpasswd.required( RemoveProject.as_view('remove_project',
+                self.notifications) )
         )
         self.app.add_url_rule(
             '/projects/<int:project_id>/name',
-            view_func=EditProjectName.as_view('edit_project_name', self.notifications)
+            view_func=self.htpasswd.required( EditProjectName.as_view('edit_project_name',
+                self.notifications) )
         )
         self.app.add_url_rule(
             '/projects/<int:project_id>/description',
-            view_func=EditProjectDescription.as_view('edit_project_description', self.notifications)
+            view_func=self.htpasswd.required(
+                EditProjectDescription.as_view('edit_project_description',
+                    self.notifications) )
         )
 
         # pipelines
         self.app.add_url_rule(
             '/projects/<int:project_id>/pipelines/fit',
-            view_func=FitModel.as_view('fit_model', self.jobs, self.pipelines)
+            view_func=self.htpasswd.required( FitModel.as_view('fit_model', self.jobs,
+                self.pipelines) )
         )
         self.app.add_url_rule(
             '/projects/<int:project_id>/pipelines/predict',
-            view_func=PredictModel.as_view('predict_model', self.notifications, self.jobs,
-                                           self.pipelines)
+            view_func=self.htpasswd.required( PredictModel.as_view('predict_model',
+                self.notifications, self.jobs, self.pipelines) )
         )
         self.app.add_url_rule(
             '/data/<int:file_id>/predict',
-            view_func=PredictFile.as_view('predict_file', self.notifications,
-                                          self.jobs, self.pipelines)
+            view_func=self.htpasswd.required( PredictFile.as_view('predict_file',
+                self.notifications, self.jobs, self.pipelines) )
         )
         self.app.add_url_rule(
             '/data/<int:file_id>/<int:bbox_id>/predict_bounding_box',
-            view_func=PredictBoundingBox.as_view('predict_bounding_box', self.notifications,
-                                          self.jobs, self.pipelines)
+            view_func=self.htpasswd.required( PredictBoundingBox.as_view('predict_bounding_box',
+                self.notifications, self.jobs, self.pipelines) )
+        )
+
+        self.app.add_url_rule(
+            '/data/<int:file_id>/estimate',
+            view_func=EstimateBoundingBox.as_view('estimate_result', self.notifications,
+                                          self.jobs)
         )
 
     def run(self):

+ 2 - 1
pycs/frontend/endpoints/ListJobs.py

@@ -15,5 +15,6 @@ class ListJobs(View):
         # pylint: disable=invalid-name
         self.jobs = jobs
 
-    def dispatch_request(self):
+    def dispatch_request(self, user: str):
+        # pylint: disable=unused-argument
         return jsonify(self.jobs.list())

+ 2 - 1
pycs/frontend/endpoints/ListLabelProviders.py

@@ -12,5 +12,6 @@ class ListLabelProviders(View):
     methods = ['GET']
 
 
-    def dispatch_request(self):
+    def dispatch_request(self, user: str):
+        # pylint: disable=unused-argument
         return jsonify(LabelProvider.query.all())

+ 2 - 1
pycs/frontend/endpoints/ListModels.py

@@ -12,5 +12,6 @@ class ListModels(View):
     methods = ['GET']
 
 
-    def dispatch_request(self):
+    def dispatch_request(self, user: str):
+        # pylint: disable=unused-argument
         return jsonify(Model.query.all())

+ 2 - 1
pycs/frontend/endpoints/ListProjects.py

@@ -12,5 +12,6 @@ class ListProjects(View):
     methods = ['GET']
 
 
-    def dispatch_request(self):
+    def dispatch_request(self, user: str):
+        # pylint: disable=unused-argument
         return jsonify(Project.query.all())

+ 15 - 0
pycs/frontend/endpoints/additional/Authenticate.py

@@ -0,0 +1,15 @@
+import os
+
+from flask import make_response
+from flask.views import View
+
+class Authenticate(View):
+    """
+    Always returns a success code.
+    """
+    # pylint: disable=arguments-differ
+    methods = ['GET']
+
+    def dispatch_request(self, user: str):
+        # Always return a success code, since authentication is already handled.
+        return make_response()

+ 1 - 1
pycs/frontend/endpoints/additional/FolderInformation.py

@@ -14,7 +14,7 @@ class FolderInformation(View):
     """
     methods = ['POST']
 
-    def dispatch_request(self):
+    def dispatch_request(self, user: str):
         # extract request data
         data = request.get_json(force=True)
 

+ 14 - 4
pycs/frontend/endpoints/data/GetPreviousAndNextFile.py

@@ -1,4 +1,5 @@
 from flask import jsonify
+from flask import request
 from flask.views import View
 
 from pycs.database.File import File
@@ -16,12 +17,21 @@ class GetPreviousAndNextFile(View):
         # get file from database
         file = File.get_or_404(file_id)
 
+
+        with_annotations = request.args.get("only_with_annotations")
+
+        kwargs = dict(with_annotations=None)
+
+        if with_annotations is not None:
+            kwargs["with_annotations"] = with_annotations == "1"
+
         # get previous and next
         result = {
-            'previous': file.previous(),
-            'next': file.next(),
-            'previousInCollection': file.previous_in_collection(),
-            'nextInCollection': file.next_in_collection()
+            'current': file,
+            'previous': file.previous(**kwargs),
+            'next': file.next(**kwargs),
+            'previousInCollection': file.previous_in_collection(**kwargs),
+            'nextInCollection': file.next_in_collection(**kwargs)
         }
 
         # return data

+ 2 - 1
pycs/frontend/endpoints/data/RemoveFile.py

@@ -17,7 +17,8 @@ class RemoveFile(View):
         # pylint: disable=invalid-name
         self.nm = nm
 
-    def dispatch_request(self, file_id: int):
+    def dispatch_request(self, user: str, file_id: int):
+        # pylint: disable=unused-argument
         # extract request data
         data = request.get_json(force=True)
 

+ 3 - 1
pycs/frontend/endpoints/data/UploadFile.py

@@ -28,7 +28,8 @@ class UploadFile(View):
         self.extension = None
         self.size = None
 
-    def dispatch_request(self, project_id: int):
+    def dispatch_request(self, user: str, project_id: int):
+        # pylint: disable=unused-argument
         # find project
         project = Project.get_or_404(project_id)
 
@@ -48,6 +49,7 @@ class UploadFile(View):
         # abort if there is no file entry in uploaded data
         if 'file' not in files.keys():
             return abort(400, "No file entry was found in uploaded data")
+        files['file'].close()
 
         # detect file type
         try:

+ 2 - 1
pycs/frontend/endpoints/jobs/RemoveJob.py

@@ -17,7 +17,8 @@ class RemoveJob(View):
         # pylint: disable=invalid-name
         self.jobs = jobs
 
-    def dispatch_request(self, job_id):
+    def dispatch_request(self, user: str, job_id):
+        # pylint: disable=unused-argument
         # extract request data
         data = request.get_json(force=True)
 

+ 2 - 1
pycs/frontend/endpoints/labels/CreateLabel.py

@@ -20,7 +20,8 @@ class CreateLabel(View):
         self.nm = nm
 
 
-    def dispatch_request(self, project_id):
+    def dispatch_request(self, user: str, project_id):
+        # pylint: disable=unused-argument
         # extract request data
         data = request.get_json(force=True)
         name = data.get('name')

+ 2 - 1
pycs/frontend/endpoints/labels/EditLabelName.py

@@ -19,7 +19,8 @@ class EditLabelName(View):
         self.nm = nm
 
 
-    def dispatch_request(self, project_id: int, label_id: int):
+    def dispatch_request(self, user: str, project_id: int, label_id: int):
+        # pylint: disable=unused-argument
         # extract request data
         data = request.get_json(force=True)
         name = data.get('name')

+ 2 - 1
pycs/frontend/endpoints/labels/EditLabelParent.py

@@ -19,7 +19,8 @@ class EditLabelParent(View):
         self.nm = nm
 
 
-    def dispatch_request(self, project_id: int, label_id: int):
+    def dispatch_request(self, user: str, project_id: int, label_id: int):
+        # pylint: disable=unused-argument
         # extract request data
         data = request.get_json(force=True)
         parent = data.get('parent')

+ 2 - 1
pycs/frontend/endpoints/labels/ListLabelTree.py

@@ -12,7 +12,8 @@ class ListLabelTree(View):
     methods = ['GET']
 
 
-    def dispatch_request(self, project_id):
+    def dispatch_request(self, user: str, project_id):
+        # pylint: disable=unused-argument
         # find project
         project = Project.get_or_404(project_id)
 

+ 2 - 1
pycs/frontend/endpoints/labels/ListLabels.py

@@ -12,7 +12,8 @@ class ListLabels(View):
     methods = ['GET']
 
 
-    def dispatch_request(self, project_id):
+    def dispatch_request(self, user: str, project_id):
+        # pylint: disable=unused-argument
         # find project
         project = Project.get_or_404(project_id)
 

+ 2 - 1
pycs/frontend/endpoints/labels/RemoveLabel.py

@@ -19,7 +19,8 @@ class RemoveLabel(View):
         # pylint: disable=invalid-name
         self.nm = nm
 
-    def dispatch_request(self, project_id: int, label_id: int):
+    def dispatch_request(self, user: str, project_id: int, label_id: int):
+        # pylint: disable=unused-argument
         # extract request data
         data = request.get_json(force=True)
 

+ 136 - 0
pycs/frontend/endpoints/pipelines/EstimateBoundingBox.py

@@ -0,0 +1,136 @@
+import typing as T
+import uuid
+
+import cv2
+import numpy as np
+
+from flask import abort
+from flask import make_response
+from flask import request
+from flask.views import View
+
+from pycs.database.File import File
+from pycs.database.Result import Result
+from pycs.frontend.notifications.NotificationManager import NotificationManager
+from pycs.jobs.JobGroupBusyException import JobGroupBusyException
+from pycs.jobs.JobRunner import JobRunner
+
+class EstimateBoundingBox(View):
+    """
+    create a result for a file
+    """
+    # pylint: disable=arguments-differ
+    methods = ['POST']
+
+    def __init__(self, nm: NotificationManager, jobs: JobRunner,):
+        # pylint: disable=invalid-name
+        self.nm = nm
+        self.jobs = jobs
+
+    def dispatch_request(self, file_id: int):
+
+        file = File.get_or_404(file_id)
+        request_data = request.get_json(force=True)
+        if 'x' not in request_data or 'y' not in request_data:
+            abort(400, "coordinates for the estimation are missing")
+
+        x, y = map(request_data.get, "xy")
+
+        # get project
+        project = file.project
+        try:
+            rnd = str(uuid.uuid4())[:10]
+            self.jobs.run(project,
+                          "Estimation",
+                          f'{project.name} (create predictions)',
+                          f"{project.id}/estimation/{rnd}",
+                          estimate,
+                          file.id, x, y,
+                          result=self.nm.create_result
+                          )
+
+        except JobGroupBusyException:
+            abort(400, "Job is already running!")
+
+        return make_response()
+
+
+def estimate(file_id: int, x: float, y: float) -> Result:
+    """ estimation function """
+
+    file = File.query.get(file_id)
+
+    image = cv2.imread(file.absolute_path, cv2.IMREAD_GRAYSCALE)
+
+    h, w = image.shape
+    pos = int(x * w), int(y * h)
+    x0, y0, x1, y1 = detect(image, pos,
+                            window_size=1000,
+                            pixel_delta=50,
+                            enlarge=1e-2,
+                           )
+
+    data = dict(
+       x=x0 / w,
+       y=y0 / h,
+       w=(x1-x0) / w,
+       h=(y1-y0) / h
+    )
+
+    return file.create_result('pipeline', 'bounding-box', label=None, data=data)
+
+def detect(image: np.ndarray,
+           pos: T.Tuple[int, int],
+           window_size: int = 1000,
+           pixel_delta: int = 0,
+           enlarge: float = -1) -> T.Tuple[int, int, int, int]:
+    """ detection function """
+    # image = blur(image, 3)
+    x, y = pos
+    pixel = image[y, x]
+
+    min_pix, max_pix = pixel - pixel_delta, pixel + pixel_delta
+
+    mask = np.logical_and(min_pix < image, image < max_pix).astype(np.float32)
+    # mask = open_close(mask)
+    # mask = blur(mask)
+
+    pad = window_size // 2
+    mask = np.pad(mask, pad, mode="constant")
+    window = mask[y: y + window_size, x: x + window_size]
+
+    sum_x, sum_y = window.sum(axis=0), window.sum(axis=1)
+
+    enlarge = int(enlarge * max(image.shape))
+    (x0, x1), (y0, y1) = get_borders(sum_x, enlarge), get_borders(sum_y, enlarge)
+
+    x0 = max(x + x0 - pad, 0)
+    y0 = max(y + y0 - pad, 0)
+
+    x1 = min(x + x1 - pad, image.shape[1])
+    y1 = min(y + y1 - pad, image.shape[0])
+
+    return x0, y0, x1, y1
+
+def get_borders(arr, enlarge: int, eps=5e-1):
+    """ returns borders based on coordinate extrema """
+    mid = len(arr) // 2
+
+    arr0, arr1 = arr[:mid], arr[mid:]
+
+    thresh = arr[mid] * eps
+
+    lowers = np.where(arr0 < thresh)[0]
+    lower = 0 if len(lowers) == 0 else lowers[-1]
+
+    uppers = np.where(arr1 < thresh)[0]
+    upper = arr1.argmin() if len(uppers) == 0 else uppers[0]
+
+    # since the second half starts after the first
+    upper = len(arr0) + upper
+
+    if enlarge > 0:
+        lower = max(lower - enlarge, 0)
+        upper = min(upper + enlarge, len(arr)-1)
+
+    return int(lower), int(upper)

+ 2 - 1
pycs/frontend/endpoints/pipelines/FitModel.py

@@ -22,7 +22,8 @@ class FitModel(View):
         self.jobs = jobs
         self.pipelines = pipelines
 
-    def dispatch_request(self, project_id):
+    def dispatch_request(self, user: str, project_id):
+        # pylint: disable=unused-argument
         project = Project.get_or_404(project_id)
 
         # extract request data

+ 6 - 5
pycs/frontend/endpoints/pipelines/PredictBoundingBox.py

@@ -26,11 +26,12 @@ class PredictBoundingBox(View):
         self.jobs = jobs
         self.pipelines = pipelines
 
-    def dispatch_request(self, file_id, bbox_id):
+    def dispatch_request(self, user: str, file_id, bbox_id):
         # find file and result (=bounding box)
-        # We need the result to get (x,y,w,h)
+        # We will later need the result to get (x,y,w,h). Here, we just check
+        # whether the result is valid.
         file = File.get_or_404(file_id)
-        result = Result.get_or_404(bbox_id)
+        Result.get_or_404(bbox_id)
 
         # extract request data
         data = request.get_json(force=True)
@@ -51,8 +52,8 @@ class PredictBoundingBox(View):
                           f'{project.id}/model-interaction',
                           Predict.load_and_pure_inference,
                           self.pipelines, notifications, self.nm,
-                          project.id, [file.id], {file.id: [result]},
-                          progress=Predict.progress)
+                          project.id, [file.id], {file.id: [bbox_id]},
+                          user, progress=Predict.progress)
 
         except JobGroupBusyException:
             abort(400, "File prediction is already running")

+ 2 - 1
pycs/frontend/endpoints/pipelines/PredictFile.py

@@ -25,7 +25,8 @@ class PredictFile(View):
         self.jobs = jobs
         self.pipelines = pipelines
 
-    def dispatch_request(self, file_id):
+    def dispatch_request(self, user: str, file_id):
+        # pylint: disable=unused-argument
         # find file
         file = File.get_or_404(file_id)
 

+ 23 - 13
pycs/frontend/endpoints/pipelines/PredictModel.py

@@ -32,7 +32,8 @@ class PredictModel(View):
         self.jobs = jobs
         self.pipelines = pipelines
 
-    def dispatch_request(self, project_id):
+    def dispatch_request(self, user: str, project_id):
+        # pylint: disable=unused-argument
         project = Project.get_or_404(project_id)
 
         # extract request data
@@ -129,7 +130,7 @@ class PredictModel(View):
                          notifications: NotificationList,
                          notification_manager: NotificationManager,
                          project_id: int, file_filter: List[int],
-                         result_filter: dict[int, List[Result]]):
+                         bbox_id_filter: dict[int, List[int]], user: str):
         """
         load the pipeline and call the execute function
 
@@ -139,7 +140,8 @@ class PredictModel(View):
         :param notification_manager: notification manager
         :param project_id: project id
         :param file_filter: list of file ids
-        :param result_filter: dict of file id and list of results to classify
+        :param bbox_id_filter: dict of file id and list of bbox_ids to classify
+        :param user: username of the user asking to predict the bounding box
         :return:
         """
         pipeline = None
@@ -149,32 +151,40 @@ class PredictModel(View):
         model_root = project.model.root_folder
         storage = MediaStorage(project_id, notifications)
 
-        # create a list of MediaFile
-        # Also convert dict to the same key type.
-        length = len(file_filter)
-
-
         # load pipeline
         try:
             pipeline = pipelines.load_from_root_folder(project_id, model_root)
 
             # iterate over media files
             index = 0
+            length = len(file_filter)
             for file_id in file_filter:
                 file = project.file(file_id)
                 file = MediaFile(file, notifications)
-                bounding_boxes = [MediaBoundingBox(result) for result in result_filter[file_id]]
+                bounding_boxes = [MediaBoundingBox(Result.get_or_404(bbox_id))
+                                for bbox_id in bbox_id_filter[file_id]]
 
                 # Perform inference.
                 bbox_labels = pipeline.pure_inference(storage, file, bounding_boxes)
 
                 # Add the labels determined in the inference process.
-                for i, result in enumerate(result_filter[file_id]):
-                    result.label_id = bbox_labels[i].identifier
-                    result.set_origin('user', commit=True)
+
+                # for i, result in enumerate(result_filter[file_id]):
+                #     bbox_label = bbox_labels[i]
+                #     if isinstance(bbox_label, MediaLabel):
+                #         result.label_id = bbox_label.identifier
+
+                #     result.set_origin('user', commit=True)
+
+                for i, bbox_id in enumerate(bbox_id_filter[file_id]):
+                    result = Result.get_or_404(bbox_id)
+                    result.set_label(bbox_labels[i].identifier, commit=True)
+                    result.set_origin('user', origin_user=user, commit=True)
+
                     notifications.add(notification_manager.edit_result, result)
 
-                # yield progress
+                # commit changes and yield progress
+                db.session.commit()
                 yield index / length, notifications
 
                 index += 1

+ 2 - 1
pycs/frontend/endpoints/projects/CreateProject.py

@@ -33,7 +33,8 @@ class CreateProject(View):
         self.nm = nm
         self.jobs = jobs
 
-    def dispatch_request(self):
+    def dispatch_request(self, user: str):
+        # pylint: disable=unused-argument
         # extract request data
         data = request.get_json(force=True)
 

+ 2 - 1
pycs/frontend/endpoints/projects/EditProjectDescription.py

@@ -19,7 +19,8 @@ class EditProjectDescription(View):
         self.nm = nm
 
 
-    def dispatch_request(self, project_id: int):
+    def dispatch_request(self, user: str, project_id: int):
+        # pylint: disable=unused-argument
         # extract request data
         data = request.get_json(force=True)
         description = data.get('description')

+ 2 - 1
pycs/frontend/endpoints/projects/EditProjectName.py

@@ -19,7 +19,8 @@ class EditProjectName(View):
         self.nm = nm
 
 
-    def dispatch_request(self, project_id: int):
+    def dispatch_request(self, user: str, project_id: int):
+        # pylint: disable=unused-argument
         # extract request data
         data = request.get_json(force=True)
         name = data.get('name')

+ 2 - 1
pycs/frontend/endpoints/projects/ExecuteExternalStorage.py

@@ -27,7 +27,8 @@ class ExecuteExternalStorage(View):
         self.nm = nm
         self.jobs = jobs
 
-    def dispatch_request(self, project_id: int):
+    def dispatch_request(self, user: str, project_id: int):
+        # pylint: disable=unused-argument
         # extract request data
         data = request.get_json(force=True)
 

+ 2 - 1
pycs/frontend/endpoints/projects/ExecuteLabelProvider.py

@@ -25,7 +25,8 @@ class ExecuteLabelProvider(View):
         self.nm = nm
         self.jobs = jobs
 
-    def dispatch_request(self, project_id: int):
+    def dispatch_request(self, user: str, project_id: int):
+        # pylint: disable=unused-argument
         project = Project.get_or_404(project_id)
 
         # extract request data

+ 2 - 1
pycs/frontend/endpoints/projects/GetProjectModel.py

@@ -12,7 +12,8 @@ class GetProjectModel(View):
     methods = ['GET']
 
 
-    def dispatch_request(self, project_id: int):
+    def dispatch_request(self, user: str, project_id: int):
+        # pylint: disable=unused-argument
         # find project
         project = Project.get_or_404(project_id)
 

+ 2 - 1
pycs/frontend/endpoints/projects/ListProjectCollections.py

@@ -13,7 +13,8 @@ class ListProjectCollections(View):
     methods = ['GET']
 
 
-    def dispatch_request(self, project_id: int):
+    def dispatch_request(self, user: str, project_id: int):
+        # pylint: disable=unused-argument
         # find project
         project = Project.get_or_404(project_id)
 

+ 16 - 2
pycs/frontend/endpoints/projects/ListProjectFiles.py

@@ -1,5 +1,6 @@
 from flask import abort
 from flask import jsonify
+from flask import request
 from flask.views import View
 
 from pycs.database.Project import Project
@@ -14,10 +15,12 @@ class ListProjectFiles(View):
 
 
     def dispatch_request(self,
+                         user: str,
                          project_id: int,
                          start: int = 0,
                          length: int = -1,
                          collection_id: int = None):
+        # pylint: disable=unused-argument
         # find project
 
         project = Project.get_or_404(project_id)
@@ -37,8 +40,19 @@ class ListProjectFiles(View):
                 files = collection.get_files(offset=start, limit=length).all()
 
         else:
-            count = project.files.count()
-            files = project.get_files(offset=start, limit=length).all()
+
+            with_annotations = request.args.get("only_with_annotations")
+            kwargs = dict(with_annotations=None)
+
+            if with_annotations is not None:
+                kwargs["with_annotations"] = with_annotations == "1"
+
+            # first get all files without specific limit
+            files = project.get_files(**kwargs)
+            # get the count of those
+            count = files.count()
+            # finally, limit to the desired offset and number of files
+            files = files.offset(start).limit(length).all()
 
         # return files
         return jsonify({

+ 2 - 1
pycs/frontend/endpoints/projects/RemoveProject.py

@@ -18,7 +18,8 @@ class RemoveProject(View):
         # pylint: disable=invalid-name
         self.nm = nm
 
-    def dispatch_request(self, project_id: int):
+    def dispatch_request(self, user: str, project_id: int):
+        # pylint: disable=unused-argument
         # extract request data
         data = request.get_json(force=True)
 

+ 35 - 0
pycs/frontend/endpoints/results/ConfirmAllResults.py

@@ -0,0 +1,35 @@
+from flask import abort
+from flask import make_response
+from flask import request
+from flask.views import View
+
+from pycs.database.File import File
+from pycs.frontend.notifications.NotificationManager import NotificationManager
+
+
+class ConfirmAllResults(View):
+    """
+    confirm a result (change its origin to user)
+    """
+    # pylint: disable=arguments-differ
+    methods = ['POST']
+
+    def __init__(self, nm: NotificationManager):
+        # pylint: disable=invalid-name
+        self.nm = nm
+
+    def dispatch_request(self, user: str, file_id: int):
+        # find file
+        file = File.get_or_404(file_id)
+
+        # extract request data
+        data = request.get_json(force=True)
+
+        if not data.get('confirm', False):
+            return abort(400, "confirm flag is missing")
+
+        for result in file.results:
+            result.confirm(user)
+            self.nm.edit_result(result)
+
+        return make_response()

+ 5 - 4
pycs/frontend/endpoints/results/ConfirmResult.py

@@ -1,4 +1,6 @@
-from flask import make_response, request, abort
+from flask import abort
+from flask import make_response
+from flask import request
 from flask.views import View
 
 from pycs.database.Result import Result
@@ -16,7 +18,7 @@ class ConfirmResult(View):
         # pylint: disable=invalid-name
         self.nm = nm
 
-    def dispatch_request(self, result_id: int):
+    def dispatch_request(self, user: str, result_id: int):
         # find result
         result = Result.get_or_404(result_id)
 
@@ -27,8 +29,7 @@ class ConfirmResult(View):
         if not data.get('confirm', False):
             return abort(400, "confirm flag is missing")
 
-
-        result.set_origin('user')
+        result.confirm(user)
 
         self.nm.edit_result(result)
         return make_response()

+ 49 - 0
pycs/frontend/endpoints/results/CopyResults.py

@@ -0,0 +1,49 @@
+from flask import abort
+from flask import make_response
+from flask import request
+from flask.views import View
+
+from pycs import db
+from pycs.database.File import File
+from pycs.frontend.notifications.NotificationManager import NotificationManager
+
+
+class CopyResults(View):
+    """
+    copy all results for one file to another
+    """
+    # pylint: disable=arguments-differ
+    methods = ['POST']
+
+    def __init__(self, nm: NotificationManager):
+        # pylint: disable=invalid-name
+        self.nm = nm
+
+    def dispatch_request(self, user: str, file_id: int):
+
+        request_data = request.get_json(force=True)
+
+        if 'copy_from' not in request_data:
+            abort(400, "copy_from argument is missing")
+
+        new = []
+        # start transaction
+        with db.session.begin_nested():
+            file = File.get_or_404(file_id)
+            other_file = File.get_or_404(request_data.get('copy_from'))
+
+            for result in other_file.results.all():
+                new_result = file.create_result(
+                    origin='pipeline',
+                    result_type=result.type,
+                    origin_user=None,
+                    label=result.label_id,
+                    data=result.data,
+                    commit=False)
+                new.append(new_result)
+
+
+        for new_result in new:
+            self.nm.create_result(new_result)
+
+        return make_response()

+ 3 - 2
pycs/frontend/endpoints/results/CreateResult.py

@@ -47,7 +47,7 @@ class CreateResult(View):
         return result_type, label, data
 
 
-    def dispatch_request(self, file_id: int):
+    def dispatch_request(self, user: str, file_id: int):
 
         file = File.get_or_404(file_id)
 
@@ -68,10 +68,11 @@ class CreateResult(View):
             # insert into database
             result = file.create_result(
                 origin='user',
+                origin_user=user,
                 result_type=result_type,
                 label=label,
                 data=data)
 
-            self.nm.create_result(result)
+        self.nm.create_result(result)
 
         return jsonify(result)

+ 2 - 2
pycs/frontend/endpoints/results/EditResultData.py

@@ -18,7 +18,7 @@ class EditResultData(View):
         # pylint: disable=invalid-name
         self.nm = nm
 
-    def dispatch_request(self, result_id: int):
+    def dispatch_request(self, user: str, result_id: int):
         # find result
         result = Result.get_or_404(result_id)
 
@@ -30,7 +30,7 @@ class EditResultData(View):
             abort(400, "Could not find data argument!")
 
         result.data = data
-        result.set_origin('user', commit=True)
+        result.set_origin('user', origin_user=user, commit=True)
 
         self.nm.edit_result(result)
         return make_response()

+ 3 - 3
pycs/frontend/endpoints/results/EditResultLabel.py

@@ -18,7 +18,7 @@ class EditResultLabel(View):
         # pylint: disable=invalid-name
         self.nm = nm
 
-    def dispatch_request(self, result_id: int):
+    def dispatch_request(self, user: str, result_id: int):
         # find result
         result = Result.get_or_404(result_id)
 
@@ -34,8 +34,8 @@ class EditResultLabel(View):
         if result.type == 'labeled-image' and label is None:
             abort(400, "Label is required for 'labeled-images' results")
 
-        result.label_id = label
-        result.set_origin('user', commit=True)
+        result.set_label(label)
+        result.set_origin('user', origin_user=user, commit=True)
 
         self.nm.edit_result(result)
         return make_response()

+ 2 - 1
pycs/frontend/endpoints/results/GetProjectResults.py

@@ -13,7 +13,8 @@ class GetProjectResults(View):
     methods = ['GET']
 
 
-    def dispatch_request(self, project_id: int):
+    def dispatch_request(self, user: str, project_id: int):
+        # pylint: disable=unused-argument
         # get project from database
         project = Project.get_or_404(project_id)
 

+ 2 - 1
pycs/frontend/endpoints/results/GetResults.py

@@ -12,7 +12,8 @@ class GetResults(View):
     methods = ['GET']
 
 
-    def dispatch_request(self, file_id: int):
+    def dispatch_request(self, user: str, file_id: int):
+        # pylint: disable=unused-argument
         # get file from database
         file = File.get_or_404(file_id)
 

+ 2 - 1
pycs/frontend/endpoints/results/RemoveResult.py

@@ -18,7 +18,8 @@ class RemoveResult(View):
         # pylint: disable=invalid-name
         self.nm = nm
 
-    def dispatch_request(self, result_id: int):
+    def dispatch_request(self, user: str, result_id: int):
+        # pylint: disable=unused-argument
         result = Result.get_or_404(result_id)
 
         # extract request data

+ 2 - 1
pycs/frontend/endpoints/results/ResetResults.py

@@ -18,7 +18,8 @@ class ResetResults(View):
         # pylint: disable=invalid-name
         self.nm = nm
 
-    def dispatch_request(self, file_id: int):
+    def dispatch_request(self, user: str, file_id: int):
+        # pylint: disable=unused-argument
         file = File.get_or_404(file_id)
 
         # extract request data

+ 1 - 2
pycs/frontend/notifications/NotificationManager.py

@@ -6,7 +6,6 @@ from pycs.database.Label import Label
 from pycs.database.Model import Model
 from pycs.database.Project import Project
 from pycs.database.Result import Result
-from pycs.frontend.util.JSONEncoder import JSONEncoder
 from pycs.jobs.Job import Job
 
 
@@ -17,7 +16,7 @@ class NotificationManager:
 
     def __init__(self, sio: Server):
         self.sio = sio
-        self.json = JSONEncoder()
+        self.json = app.json_encoder()
 
     def __emit(self, name, obj):
         enc = self.json.default(obj)

+ 0 - 31
pycs/frontend/util/JSONEncoder.py

@@ -1,31 +0,0 @@
-import datetime
-
-from typing import Any
-
-from flask.json import JSONEncoder as Base
-
-from pycs.database.util.JSONEncoder import JSONEncoder as DatabaseEncoder
-from pycs.jobs.util.JSONEncoder import JSONEncoder as JobsEncoder
-
-
-class JSONEncoder(Base):
-    """
-    prepares job and DB objects to be json encoded
-    """
-
-    def default(self, o: Any) -> Any:
-        module = o.__class__.__module__
-
-        if module.startswith('pycs.database'):
-            return DatabaseEncoder().default(o)
-
-        if module.startswith('pycs.jobs'):
-            return JobsEncoder().default(o)
-
-        if isinstance(o, datetime.datetime):
-            return str(o)
-
-        if isinstance(o, dict):
-            return o
-
-        return o.__dict__

+ 0 - 0
pycs/frontend/util/__init__.py


+ 24 - 3
pycs/interfaces/MediaFile.py

@@ -50,11 +50,13 @@ class MediaFile:
         else:
             data = None
 
-        created = self.__file.create_result('pipeline', 'labeled-image', label, data)
+        created = self.__file.create_result(origin='pipeline',
+            result_type='labeled-image', label=label, data=data)
         self.__notifications.add(self.__notifications.notifications.create_result, created)
 
     def add_bounding_box(self, x: float, y: float, w: float, h: float,
-                         label: Union[int, MediaLabel] = None, frame: int = None):
+                         label: Union[int, MediaLabel] = None, frame: int = None,
+                         origin:str = None, origin_user: str = None) -> Result:
         """
         create a bounding-box result
 
@@ -64,6 +66,9 @@ class MediaFile:
         :param h: relative height [0, 1]
         :param label: label
         :param frame: frame index (only set for videos)
+        :param origin: Either pipeline or user
+        :param origin_user: Username of the user that provided the bounding box
+        :return: Created Result
         """
         result = {
             'x': x,
@@ -77,9 +82,14 @@ class MediaFile:
         if label is not None and isinstance(label, MediaLabel):
             label = label.identifier
 
-        created = self.__file.create_result('pipeline', 'bounding-box', label, result)
+        if origin is None:
+            origin = 'pipeline'
+        created = self.__file.create_result(origin=origin, origin_user=origin_user,
+            result_type='bounding-box', label=label, data=result)
         self.__notifications.add(self.__notifications.notifications.create_result, created)
 
+        return created
+
     def remove_predictions(self):
         """
         remove and return all predictions added from pipelines
@@ -88,6 +98,17 @@ class MediaFile:
         for result in removed:
             self.__notifications.add(self.__notifications.notifications.remove_result, result)
 
+    def remove_result(self, result_id):
+        """
+        Removes the result with the given id.
+
+        :param result_id: id of the result to delete
+        """
+
+        removed = self.__file.remove_result(id=result_id)
+        for result in removed:
+            self.__notifications.add(self.__notifications.notifications.remove_result, result)
+
     def __get_results(self, origin: str) -> List[Union[MediaImageLabel, MediaBoundingBox]]:
 
         def result_to_media(result: Result) -> Union[MediaImageLabel, MediaBoundingBox]:

+ 3 - 1
pycs/interfaces/MediaImageLabel.py

@@ -9,7 +9,9 @@ class MediaImageLabel:
 
     def __init__(self, result: Result):
         self.label = result.label
-        self.frame = result.data['frame'] if 'frame' in result.data else None
+        self.frame = None
+        if result.data is not None and 'frame' in result.data:
+            self.frame = result.data['frame']
 
     def serialize(self) -> dict:
         """

+ 2 - 1
pycs/interfaces/Pipeline.py

@@ -70,7 +70,8 @@ class Pipeline:
         """
         raise NotImplementedError
 
-    def pure_inference(self, storage: MediaStorage, file: MediaFile, bounding_boxes: List[MediaBoundingBox]):
+    def pure_inference(self, storage: MediaStorage, file: MediaFile,
+                       bounding_boxes: List[MediaBoundingBox]):
         """
         receive a file and a list of bounding boxes and only create a
         classification for the given bounding boxes.

+ 1 - 1
pycs/jobs/JobRunner.py

@@ -100,7 +100,7 @@ class JobRunner:
         :param identifier: job identifier
         :return:
         """
-        for i in range(len(self.__jobs)):
+        for i, job in enumerate(self.__jobs):
             if self.__jobs[i].identifier == identifier:
                 if self.__jobs[i].finished is not None:
                     job = self.__jobs[i]

+ 0 - 17
pycs/jobs/util/JSONEncoder.py

@@ -1,17 +0,0 @@
-from typing import Any
-
-from flask.json import JSONEncoder as Base
-
-
-class JSONEncoder(Base):
-    """
-    prepares job objects to be json encoded
-    """
-
-    def default(self, o: Any) -> Any:
-        # copy = o.__dict__.copy()
-        # del copy['runner']
-        # del copy['group']
-        # return copy
-
-        return o.__dict__.copy()

+ 0 - 0
pycs/jobs/util/__init__.py


+ 7 - 0
pycs/management/__init__.py

@@ -0,0 +1,7 @@
+from pycs.management.project import project_cli
+from pycs.management.result import result_cli
+
+def setup_commands(app):
+    """ adds commands to app's CLI """
+    app.cli.add_command(project_cli)
+    app.cli.add_command(result_cli)

+ 43 - 0
pycs/management/project.py

@@ -0,0 +1,43 @@
+import click
+from tabulate import tabulate
+
+from flask.cli import AppGroup
+
+from pycs import app
+from pycs.database.Project import Project
+from pycs.util import FileOperations
+
+
+project_cli = AppGroup("project", short_help="Project operations")
+
+@project_cli.command()
+@click.argument("project_id")
+def generate_thumbnails(project_id):
+    """ Generates thumbnails for a specific project or all project """
+
+    if project_id == "all":
+        projects = Project.query.all()
+        app.logger.info(f"Generating thumbnails for all projects ({len(projects)})!")
+    else:
+        project = Project.query.get(project_id)
+        if project is None:
+            app.logger.error(f"Could not find project with ID {project_id}!")
+            return
+        app.logger.info(f"Generating thumbnails for project {project}!")
+        projects = [project]
+
+    for project in projects:
+        FileOperations.generate_thumbnails(project)
+
+@project_cli.command("list")
+def list_projects():
+    """ List information about existing projects """
+    projects = Project.query.all()
+
+    print(f"Got {len(projects)} projects")
+    rows = [(p.id, p.name, p.description) for p in projects]
+
+    print(tabulate(rows,
+        headers=["id", "name", "description"],
+        tablefmt="fancy_grid"
+    ))

+ 230 - 0
pycs/management/result.py

@@ -0,0 +1,230 @@
+import click
+import flask
+import simplejson as json
+
+from flask.cli import AppGroup
+
+from pycs import app
+from pycs import database as db
+
+result_cli = AppGroup("result", short_help="Result operations")
+
+
+
+@result_cli.command("export")
+@click.argument("project_id")
+@click.argument("indent", required=False)
+@click.argument("output", required=False)
+def export(project_id, output, indent):
+    """ Export results for a specific project or for all projects """
+    if project_id == "all":
+        projects = db.Project.query.all()
+        app.logger.info(f"Exporting results for all projects ({len(projects)})!")
+        if output is None:
+            output = "output.json"
+
+    else:
+        project = db.Project.query.get(project_id)
+        if project is None:
+            app.logger.error(f"Could not find project with ID {project_id}!")
+            return
+        app.logger.info(f"Exporting results for project {project}!")
+        projects = [project]
+        if output is None:
+            output = f"output_project_{int(project_id):04d}.json"
+
+    app.logger.info(f"Exporting to {output}")
+
+    results = []
+
+    for project in projects:
+        project_files = [
+            dict(**f.serialize(),
+                results=[
+                    dict(**r.serialize(), label=r.label.serialize() if r.label is not None else None)
+                        for r in f.results.all()
+                ])
+                for f in project.files.all() if f.results.count() != 0
+            ]
+
+        results.append(dict(
+            project_id=project.id,
+            files=project_files,
+            labels=[lab.serialize() for lab in project.labels.all()],
+        ))
+
+
+    if indent is not None:
+        indent = int(indent)
+
+    with open(output, "w", encoding="utf-8") as out_f:
+        flask.json.dump(results, out_f, app=app, indent=indent)
+
+
+
+
+@result_cli.command("restore")
+@click.argument("infile")
+@click.option("--dry-run", is_flag=True)
+def restore(infile, dry_run):
+
+    with open(infile) as f:
+        results = json.load(f)
+
+    for project_results in results:
+        project = db.Project.get_or_404(project_results["project_id"])
+        for file_results in project_results["files"]:
+            file = db.File.get_or_404(file_results["id"])
+
+            assert file.path == file_results["path"]
+
+            # first check for new and changed results
+            for _result in file_results["results"]:
+
+                if not _is_data_valid(**_result):
+                    continue
+
+                result = get_result_or_none(file, **_result)
+
+                user1 = _result["origin_user"]
+                data1 = _result["data"]
+                ref1 = (_result["label"] or {}).get("reference")
+                # lab1 = (_result["label"] or {}).get("id")
+
+
+                if result is None:
+                    # we have a new result entry
+                    if not dry_run:
+                        file.create_result(
+                            result_type="bounding-box",
+                            origin="user",
+                            origin_user=user1,
+                            label=ref1,
+                            data=data1,
+                            commit=True
+                        )
+                    print(" | ".join([
+                        f"Project #{project.id:< 6d}"
+                        f"File #{file.id:< 6d} [{file.name:^30s}]",
+                        "[New Result]",
+                        f"User: {user1 or '':<10s}",
+                        f"Data: {data1}, Label-Ref: {ref1}",
+                        ])
+                    )
+
+                    continue
+
+                assert result.file_id == _result["file_id"]
+                user0 = result.origin_user
+                data0 = result.data
+                ref0 = getattr(result.label, "reference", None)
+                # lab0 = getattr(result.label, "id", None)
+
+                is_same_data = _check_data(data0, data1)
+
+                if is_same_data and (ref0 == ref1 or ref1 is None):
+                    # nothing to change
+                    continue
+
+                print(" | ".join([
+                    f"Project #{project.id:< 6d}"
+                    f"File #{file.id:< 6d} [{file.name:^30s}]",
+                    ]), end=" | "
+                )
+                if not is_same_data:
+                    # data was updated
+                    print(" | ".join([
+                        "[Data updated]",
+                        f"User: {user1 or '':<10s}",
+                        f"Data: {data0} -> {data1}"
+                        ]), end=" | "
+                    )
+                    assert user1 is not None
+                    if not dry_run:
+                        result.origin_user = user1
+                        result.data = data1
+
+                if ref0 != ref1:
+                    assert user1 is not None
+                    if not dry_run:
+                        result.origin_user = user1
+                    if ref1 is None:
+                        # label was deleted
+                        print("[Label Deleted]")
+                        if not dry_run:
+                            result.label_id = None
+                    else:
+                        # label was updated
+                        print(" | ".join([
+                            "[Label updated]",
+                            f"User: {user0 or '':<10s} -> {user1 or '':<10s}",
+                            f"{ref0 or 'UNK':<6s} -> {ref1 or 'UNK':<6s}"
+                            ])
+                        )
+                        label = project.label_by_reference(ref1)
+                        if not dry_run:
+                            result.label_id = label.id
+                else:
+                    print()
+
+                if not dry_run:
+                    result.commit()
+
+            # then check for deleted results
+            for result in file.results.all():
+                if result.origin != "user" or result.type != "bounding-box":
+                    continue
+
+                found = False
+                for _result in file_results["results"]:
+                    if not _is_data_valid(**_result):
+                        continue
+
+                    if _check_data(result.data, _result["data"]):
+                        found = True
+                        break
+
+                if not found:
+                    print(" | ".join([
+                        f"Project #{project.id:< 6d}"
+                        f"File #{file.id:< 6d} [{file.name:^30s}]",
+                        "[Result deleted]",
+                        f"{result.data}",
+                        f"{result.label}",
+                        ])
+                    )
+
+                    if not dry_run:
+                        result.delete()
+
+def _is_data_valid(*, data, type, origin, **kwargs):
+
+    wh = (None, None) if data is None else (data["w"], data["h"])
+
+    return (type != "labeled-image" and
+        origin == "user" and
+        0 not in wh)
+
+def _check_data(data0, data1):
+
+    if None in (data0, data1):
+        return data0 == data1 == None
+
+    for key in data0:
+        if data1.get(key) != data0.get(key):
+            return False
+
+    return True
+
+def get_result_or_none(file: db.File, id: int, data: dict, **kwargs):
+
+    result = db.Result.query.filter(
+        db.Result.id==id, db.Result.file_id==file.id).one_or_none()
+
+    if result is not None:
+        return result
+
+    for other_results in file.results.all():
+        if _check_data(data, other_results.data):
+            # import pdb; pdb.set_trace()
+            return other_results

+ 25 - 1
pycs/util/FileOperations.py

@@ -7,13 +7,16 @@ from pathlib import Path
 import cv2
 
 from PIL import Image
+from tqdm import tqdm
 
+from pycs import app
 from pycs.database.File import File
 
 DEFAULT_JPEG_QUALITY = 80
 
 
 BoundingBox = namedtuple("BoundingBox", "x y w h")
+Size = namedtuple("Size", "max_width max_height")
 
 
 def file_info(data_folder: str, file_name: str, file_ext: str):
@@ -184,6 +187,7 @@ def resize_image(file_path: str, target_path: str, max_width: int, max_height: i
 
     # abort if file is smaller than desired
     if img_width < max_width and img_height < max_height:
+        image.close()
         return False
 
     # calculate target size
@@ -199,6 +203,9 @@ def resize_image(file_path: str, target_path: str, max_width: int, max_height: i
 
     # save to file
     resized_image.save(target_path, quality=DEFAULT_JPEG_QUALITY)
+
+    # close opened files.
+    image.close()
     return True
 
 
@@ -244,7 +251,7 @@ def find_images(folder,
     """ walk recursively the folder and find images """
 
     suffixes = suffixes if suffixes is not None else [".jpg", ".jpeg", ".png"]
-    images: T.List[Path] = list()
+    images: T.List[Path] = []
     for root, _, files in os.walk(folder):
         for file in files:
             fpath = Path(root, file)
@@ -254,3 +261,20 @@ def find_images(folder,
             images.append(fpath)
 
     return images
+
+
+def generate_thumbnails(project: "Project", sizes = None):
+    """ generates thumbnails for all image files in the given  """
+
+    if sizes is None:
+        sizes = [Size(200, 200), Size(2000, 1200)]
+
+    app.logger.info(f"Generating thumbnails for project \"{project.name}\"")
+
+    files = list(project.files)
+    for file in tqdm(files):
+        for size in sizes:
+            resize_file(file,
+                project.root_folder,
+                size.max_width,
+                size.max_height)

+ 22 - 0
pycs/util/JSONEncoder.py

@@ -0,0 +1,22 @@
+import datetime
+import typing as T
+
+from flask import json
+
+class JSONEncoder(json.JSONEncoder):
+    """
+    prepares job and DB objects to be json encoded
+    """
+
+    def default(self, o: T.Any) -> T.Any:
+
+        if hasattr(o, "serialize") and callable(o.serialize):
+            return o.serialize()
+
+        if isinstance(o, datetime.datetime):
+            return str(o)
+
+        if isinstance(o, dict):
+            return o
+
+        return o.__dict__.copy()

+ 1 - 1
pycs/util/PipelineUtil.py

@@ -13,7 +13,7 @@ def load_from_root_folder(root_folder: str) -> Pipeline:
     """
     # load configuration.json
     configuration_path = path.join(root_folder, 'configuration.json')
-    with open(configuration_path, 'r') as configuration_file:
+    with open(configuration_path, 'r', encoding='utf8') as configuration_file:
         configuration = load(configuration_file)
 
     # load code

+ 2 - 1
pycs/util/ProgressFileWriter.py

@@ -7,7 +7,8 @@ class ProgressFileWriter(BufferedWriter):
     """
 
     def __init__(self, path, mode, callback=None):
-        self.file_handler = open(path, mode)
+        # pylint: disable=consider-using-with
+        self.file_handler = open(path, mode, encoding='utf8')
 
         self.progress = 0
         self.callback = callback

+ 4 - 0
requirements.txt

@@ -8,9 +8,13 @@ flask-socketio
 flask-sqlalchemy
 sqlalchemy_serializer
 flask-migrate
+flask-htpasswd
+itsdangerous~=2.0.1
 python-socketio
 munch
 scikit-image
+pandas
+tqdm
 
 chainer~=7.8
 chainer-addons~=0.10

+ 1 - 1
settings.json

@@ -1,7 +1,7 @@
 {
   "host": "",
   "port": 5000,
-  "allowedOrigins": ["https://ammod.inf-cv.uni-jena.de", "https://deimos.inf-cv.uni-jena.de"],
+  "allowedOrigins": ["https://ammod.inf-cv.uni-jena.de", "https://deimos.inf-cv.uni-jena.de", "http://localhost:5000"],
   "projects_folder": "projects",
   "database": "db/data.sqlite3",
   "pipeline_cache_time": 120,

+ 17 - 2
tests/base.py

@@ -5,6 +5,9 @@ import shutil
 import typing as T
 import unittest
 
+import base64
+from flask_htpasswd import HtPasswdAuth
+
 from pathlib import Path
 from unittest import mock
 
@@ -44,9 +47,17 @@ class BaseTestCase(unittest.TestCase):
         app.config["DEBUG"] = False
         app.config["SQLALCHEMY_DATABASE_URI"] = f"sqlite:///{cls.DB_FILE}"
 
+        # Set dummy password protection.
+        # This allows authentication with the credentials user:password.
+        app.config['FLASK_HTPASSWD_PATH'] = '.test-htpasswd'
+        if not os.path.isfile(app.config['FLASK_HTPASSWD_PATH']):
+            with open(app.config['FLASK_HTPASSWD_PATH'], 'w') as f:
+                f.write('user:$apr1$fmi16nrq$3C4MfxW3ChrUNjSLLTB3x.')
+        htpasswd = HtPasswdAuth(app)
+
         if server is None:
             settings["pipeline_cache_time"] = 2
-            server = WebServer(app, settings, discovery)
+            server = WebServer(app, htpasswd, settings, discovery)
 
         if cls.server is None:
             cls.server = server
@@ -101,6 +112,9 @@ class BaseTestCase(unittest.TestCase):
 
         db.create_all()
 
+        credentials = base64.b64encode("user:password".encode()).decode()
+        self.headers = { 'Authorization' : 'Basic %s' %  credentials }
+
         self.client = app.test_client()
         self.context = app.test_request_context()
         self.context.push()
@@ -145,6 +159,7 @@ class BaseTestCase(unittest.TestCase):
             status_code=status_code,
             json=json,
             data=data,
+            headers=self.headers,
             **kwargs
         )
 
@@ -161,5 +176,5 @@ class BaseTestCase(unittest.TestCase):
             status_code=status_code,
             json=json,
             data=data,
+            headers=self.headers
         )
-

+ 1 - 1
tests/client/__init__.py

@@ -32,7 +32,7 @@ class FolderInformationTest(BaseTestCase):
             for i in range(10):
                 self._check(url, folder, dict(exists=True, count=i))
 
-                tempfile.NamedTemporaryFile(dir=folder, delete=False, suffix=".jpg")
+                tempfile.NamedTemporaryFile(dir=folder, delete=False, suffix=".jpg").close()
 
 
 class ListModelsAndLabelProviders(BaseTestCase):

+ 67 - 14
tests/client/file_tests.py

@@ -25,12 +25,27 @@ class _BaseFileTests(_BaseLabelTests):
         for folder in [data_root, root / "temp"]:
             folder.mkdir(exist_ok=True, parents=True)
 
+    def _get_dummy_image_bytes(self, size=(4000, 6000, 3)):
+        byteImgIO = io.BytesIO()
+        byteImg = Image.fromarray(np.zeros(size).astype(np.uint8))
+        byteImg.save(byteImgIO, "JPEG")
+        byteImgIO.seek(0)
+        file_content = byteImgIO.read()
+
+        return file_content
+
+    def _create_dummy_image(self, file_name, size=(4000, 6000, 3)):
+        absolute_path = os.path.join(self.project.data_folder, file_name)
+        file_content = self._get_dummy_image_bytes(size=size)
+        with open(absolute_path, "wb") as f:
+            f.write(file_content)
+
+        return absolute_path, file_content
 
 class FileCreationTests(_BaseFileTests):
 
     @pаtch_tpool_execute
     def test_file_upload_project_with_external_data(self, mocked_execute=None):
-
         file_content = b"some content+1"
         url = url_for("upload_file", project_id=self.project.id)
 
@@ -53,7 +68,8 @@ class FileCreationTests(_BaseFileTests):
         url = url_for("upload_file", project_id=4242)
         self.post(url, data=dict(), status_code=404)
 
-        file_content = b"some content+1"
+        # Creating a dummy image with proper dummy content.
+        file_content = self._get_dummy_image_bytes()
         url = url_for("upload_file", project_id=self.project.id)
 
         self.assertEqual(0, File.query.count())
@@ -78,6 +94,8 @@ class FileDeletionTests(_BaseFileTests):
 
     def test_file_removal(self):
 
+        self._create_dummy_image("image.jpg")
+
         file_uuid = str(uuid.uuid1())
         file, is_new = self.project.add_file(
             uuid=file_uuid,
@@ -92,9 +110,6 @@ class FileDeletionTests(_BaseFileTests):
 
         self.assertEqual(1, self.project.files.count())
 
-        with open(file.absolute_path, "w"):
-            pass
-
         self.assertTrue(os.path.exists(file.absolute_path))
 
         url = url_for("remove_file", file_id=file.id)
@@ -109,6 +124,8 @@ class FileDeletionTests(_BaseFileTests):
 
     def test_file_removal_from_project_with_external_data(self):
 
+        self._create_dummy_image("image.jpg")
+
         file_uuid = str(uuid.uuid1())
         file, is_new = self.project.add_file(
             uuid=file_uuid,
@@ -121,9 +138,6 @@ class FileDeletionTests(_BaseFileTests):
 
         self.assertTrue(is_new)
 
-        with open(file.absolute_path, "w"):
-            pass
-
         self.project.external_data = True
         self.assertTrue(os.path.exists(file.absolute_path))
         url = url_for("remove_file", file_id=file.id)
@@ -135,7 +149,6 @@ class FileDeletionTests(_BaseFileTests):
 
 class FileGettingTests(_BaseFileTests):
 
-
     def test_get_file_getting(self):
 
         file_uuid = str(uuid.uuid1())
@@ -156,15 +169,15 @@ class FileGettingTests(_BaseFileTests):
         # without an actual file, this GET request returns 404
         self.get(url, status_code=404)
 
-        content = b"some text"
-        with open(file.absolute_path, "wb") as f:
-            f.write(content)
+        _, content = self._create_dummy_image("image.jpg")
 
         response = self.get(url)
 
         self.assertFalse(response.is_json)
         self.assertEqual(content, response.data)
 
+        response.close()
+
     def test_get_prev_next_file(self):
 
         for i in range(1, 6):
@@ -204,6 +217,7 @@ class FileGettingTests(_BaseFileTests):
             self.assertTrue(response.is_json)
 
             content_should = dict(
+                current=file.serialize(),
                 next=n_file,
                 nextInCollection=n_file,
                 previous=p_file,
@@ -221,6 +235,7 @@ class FileGettingTests(_BaseFileTests):
         self.assertTrue(response.is_json)
 
         content_should = dict(
+            current=file.serialize(),
             next=n_file.serialize(),
             nextInCollection=n_file.serialize(),
             previous=p_file.serialize(),
@@ -238,6 +253,7 @@ class FileGettingTests(_BaseFileTests):
         self.assertTrue(response.is_json)
 
         content_should = dict(
+            current=file.serialize(),
             next=n_file.serialize(),
             nextInCollection=n_file.serialize(),
             previous=p_file.serialize(),
@@ -267,7 +283,7 @@ class FileResizingTests(_BaseFileTests):
     @pаtch_tpool_execute
     def test_resize_image(self, mocked_execute):
 
-        self.get(url_for("get_resized_file", file_id=4242, resolution=300), status_code=404)
+        self.get(url_for("get_resized_file", file_id=4242, resolution=300), status_code=404).close()
 
         file_uuid = str(uuid.uuid1())
         file, is_new = self.project.add_file(
@@ -290,6 +306,7 @@ class FileResizingTests(_BaseFileTests):
             self.assertFalse(response.is_json)
 
             returned_im = _im_from_bytes(response.data)
+            response.close()
 
             self.assertEqual(image.shape, returned_im.shape)
             self._compare_images(image, returned_im)
@@ -305,6 +322,7 @@ class FileResizingTests(_BaseFileTests):
             self.assertFalse(response.is_json)
 
             returned_im = _im_from_bytes(response.data)
+            response.close()
 
             self.assertEqual(sm_image.shape, returned_im.shape)
             self._compare_images(sm_image, returned_im)
@@ -360,11 +378,11 @@ class FileResizingTests(_BaseFileTests):
         url = url_for("get_cropped_file", file_id=file.id,
             resolution=300, crop_box="0x0x1x1")
         response = self.get(url, status_code=404)
+        response.close()
 
         file.path = save
         file.commit()
 
-
     @pаtch_tpool_execute
     def test_crop_image(self, mocked_execute):
 
@@ -390,11 +408,46 @@ class FileResizingTests(_BaseFileTests):
             self.assertFalse(response.is_json)
 
             returned_im = _im_from_bytes(response.data)
+            response.close()
 
             crop = _crop(image, BoundingBox(*box))
             self.assertEqual(crop.shape, returned_im.shape)
             self._compare_images(crop, returned_im)
 
+    def test_automatic_thumbnail_generation(self):
+
+        img_size = (4000, 6000, 3)
+        self._create_dummy_image("image.jpg", size=img_size)
+
+        file_uuid = str(uuid.uuid1())
+        file, is_new = self.project.add_file(
+            uuid=file_uuid,
+            file_type="image",
+            name=f"name",
+            filename=f"image",
+            extension=".jpg",
+            size=32*1024,
+        )
+
+        self.assertTrue(is_new)
+
+        self.assertEqual(1, self.project.files.count())
+
+        self.assertTrue(os.path.exists(file.absolute_path))
+
+        temp_folder = os.path.join(self.project.root_folder, "temp")
+        for max_width, max_height in [(200, 200), (2000, 800)]:
+            img_path = os.path.join(temp_folder, file_uuid + "_" + str(max_width) + "_" + str(max_height) + ".jpg")
+
+            self.assertTrue(os.path.exists(img_path))
+
+            with Image.open(img_path) as img:
+                width, height = img.size
+
+            self.assertTrue(width == max_width or height == max_height)
+            self.assertLessEqual(width, max_width)
+            self.assertLessEqual(height, max_height)
+            self.assertLessEqual(abs(img_size[1] / img_size[0] - width / height), 0.1)
 
 def _im_from_bytes(data: bytes) -> np.ndarray:
     return np.asarray(Image.open(io.BytesIO(data)))

+ 26 - 3
tests/client/pipeline_tests.py

@@ -179,10 +179,33 @@ class LabelProviderPipelineTests:
     def test_label_loading_multiple(self):
 
         for i in range(3):
-            self.post(self.url, json=dict(execute=True))
-            self.wait_for_bg_jobs()
+            self.test_label_loading()
+
+    def test_multiple_loading_does_not_delete_existing_labels(self):
+        self.test_label_loading()
+
+        file = self.project.files.first()
+
+        def _check():
+            for res in file.results.all():
+                self.assertIsNotNone(res.label_id)
+
+        for label in self.project.labels:
+            file.create_result(
+                origin="user",
+                result_type="bounding-box",
+                label=label,
+                data=dict(x=0, y=0, w=0.2, h=0.3),
+            )
+
+        file.commit()
+
+        _check()
+
+        for i in range(3):
+            self.test_label_loading()
+            _check()
 
-            self.assertEqual(self.n_labels, self.project.labels.count())
 
 class SimpleLabelProviderPipelineTests(LabelProviderPipelineTests, _BasePipelineTests):
 

+ 1 - 1
tests/client/project_tests.py

@@ -426,6 +426,7 @@ class ProjectListTests(_BaseProjectTests):
 
             file.create_result(
                 origin="user",
+                origin_user="dummy_username",
                 result_type="bounding-box",
                 label=None,
                 data=dict(x=0, y=0, w=1, h=1)
@@ -516,4 +517,3 @@ class ProjectEditTests(_BaseProjectTests):
         self.post(url, json=dict(), status_code=400)
 
         self.assertEqual("Project for a test case", self.project.description)
-

+ 1 - 1
tests/client/result_tests.py

@@ -121,7 +121,7 @@ class ResultGettingTests(_BaseResultTests):
 
         for i in range(n):
             box = dict(x=0, y=0, w=0.9, h=1.0)
-            another_file.create_result("user", "bounding-box", data=box)
+            another_file.create_result("user", "bounding-box", origin_user="dummy_username", data=box)
 
         self.assertEqual(10, Result.query.count())
 

+ 84 - 81
webui/package-lock.json

@@ -1715,6 +1715,16 @@
           "integrity": "sha1-/q7SVZc9LndVW4PbwIhRpsY1IPo=",
           "dev": true
         },
+        "ansi-styles": {
+          "version": "4.3.0",
+          "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
+          "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
+          "dev": true,
+          "optional": true,
+          "requires": {
+            "color-convert": "^2.0.1"
+          }
+        },
         "cacache": {
           "version": "13.0.1",
           "resolved": "https://registry.npm.taobao.org/cacache/download/cacache-13.0.1.tgz?cache=0&other_urls=https%3A%2F%2Fregistry.npm.taobao.org%2Fcacache%2Fdownload%2Fcacache-13.0.1.tgz",
@@ -1741,6 +1751,53 @@
             "unique-filename": "^1.1.1"
           }
         },
+        "chalk": {
+          "version": "4.1.2",
+          "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
+          "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
+          "dev": true,
+          "optional": true,
+          "requires": {
+            "ansi-styles": "^4.1.0",
+            "supports-color": "^7.1.0"
+          }
+        },
+        "color-convert": {
+          "version": "2.0.1",
+          "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
+          "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
+          "dev": true,
+          "optional": true,
+          "requires": {
+            "color-name": "~1.1.4"
+          }
+        },
+        "color-name": {
+          "version": "1.1.4",
+          "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
+          "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
+          "dev": true,
+          "optional": true
+        },
+        "has-flag": {
+          "version": "4.0.0",
+          "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
+          "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
+          "dev": true,
+          "optional": true
+        },
+        "loader-utils": {
+          "version": "2.0.2",
+          "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.2.tgz",
+          "integrity": "sha512-TM57VeHptv569d/GKh6TAYdzKblwDNiumOdkFnejjD0XwTH87K90w3O7AiJRqdQoXygvi1VQTJTLGhJl7WqA7A==",
+          "dev": true,
+          "optional": true,
+          "requires": {
+            "big.js": "^5.2.2",
+            "emojis-list": "^3.0.0",
+            "json5": "^2.1.2"
+          }
+        },
         "source-map": {
           "version": "0.6.1",
           "resolved": "https://registry.npm.taobao.org/source-map/download/source-map-0.6.1.tgz",
@@ -1757,6 +1814,16 @@
             "minipass": "^3.1.1"
           }
         },
+        "supports-color": {
+          "version": "7.2.0",
+          "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
+          "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
+          "dev": true,
+          "optional": true,
+          "requires": {
+            "has-flag": "^4.0.0"
+          }
+        },
         "terser-webpack-plugin": {
           "version": "2.3.8",
           "resolved": "https://registry.npm.taobao.org/terser-webpack-plugin/download/terser-webpack-plugin-2.3.8.tgz?cache=0&sync_timestamp=1610194258495&other_urls=https%3A%2F%2Fregistry.npm.taobao.org%2Fterser-webpack-plugin%2Fdownload%2Fterser-webpack-plugin-2.3.8.tgz",
@@ -1773,6 +1840,18 @@
             "terser": "^4.6.12",
             "webpack-sources": "^1.4.3"
           }
+        },
+        "vue-loader-v16": {
+          "version": "npm:vue-loader@16.8.3",
+          "resolved": "https://registry.npmjs.org/vue-loader/-/vue-loader-16.8.3.tgz",
+          "integrity": "sha512-7vKN45IxsKxe5GcVCbc2qFU5aWzyiLrYJyUuMz4BQLKctCj/fmCa0w6fGiiQ2cLFetNcek1ppGJQDCup0c1hpA==",
+          "dev": true,
+          "optional": true,
+          "requires": {
+            "chalk": "^4.1.0",
+            "hash-sum": "^2.0.0",
+            "loader-utils": "^2.0.0"
+          }
         }
       }
     },
@@ -10974,6 +11053,11 @@
       "resolved": "https://registry.npm.taobao.org/vue/download/vue-2.6.12.tgz?cache=0&sync_timestamp=1609359675074&other_urls=https%3A%2F%2Fregistry.npm.taobao.org%2Fvue%2Fdownload%2Fvue-2.6.12.tgz",
       "integrity": "sha1-9evU+mvShpQD4pqJau1JBEVskSM="
     },
+    "vue-debounce": {
+      "version": "3.0.2",
+      "resolved": "https://registry.npmjs.org/vue-debounce/-/vue-debounce-3.0.2.tgz",
+      "integrity": "sha512-+shuc9Ry+AFqJbN7BMfagazB81/bTiPWvUZ4KBjambgrd3B5EQBojxeGzeNZ21xRflnwB098BG1d0HtWv8WyzA=="
+    },
     "vue-eslint-parser": {
       "version": "7.3.0",
       "resolved": "https://registry.npm.taobao.org/vue-eslint-parser/download/vue-eslint-parser-7.3.0.tgz?cache=0&sync_timestamp=1608031066427&other_urls=https%3A%2F%2Fregistry.npm.taobao.org%2Fvue-eslint-parser%2Fdownload%2Fvue-eslint-parser-7.3.0.tgz",
@@ -11027,87 +11111,6 @@
         }
       }
     },
-    "vue-loader-v16": {
-      "version": "npm:vue-loader@16.1.2",
-      "resolved": "https://registry.npm.taobao.org/vue-loader/download/vue-loader-16.1.2.tgz?cache=0&sync_timestamp=1608188078235&other_urls=https%3A%2F%2Fregistry.npm.taobao.org%2Fvue-loader%2Fdownload%2Fvue-loader-16.1.2.tgz",
-      "integrity": "sha1-XAO2xQ0qX5g8fOuhXFDXjKKymPQ=",
-      "dev": true,
-      "optional": true,
-      "requires": {
-        "chalk": "^4.1.0",
-        "hash-sum": "^2.0.0",
-        "loader-utils": "^2.0.0"
-      },
-      "dependencies": {
-        "ansi-styles": {
-          "version": "4.3.0",
-          "resolved": "https://registry.npm.taobao.org/ansi-styles/download/ansi-styles-4.3.0.tgz?cache=0&sync_timestamp=1606792436886&other_urls=https%3A%2F%2Fregistry.npm.taobao.org%2Fansi-styles%2Fdownload%2Fansi-styles-4.3.0.tgz",
-          "integrity": "sha1-7dgDYornHATIWuegkG7a00tkiTc=",
-          "dev": true,
-          "optional": true,
-          "requires": {
-            "color-convert": "^2.0.1"
-          }
-        },
-        "chalk": {
-          "version": "4.1.0",
-          "resolved": "https://registry.npm.taobao.org/chalk/download/chalk-4.1.0.tgz?cache=0&sync_timestamp=1591687018980&other_urls=https%3A%2F%2Fregistry.npm.taobao.org%2Fchalk%2Fdownload%2Fchalk-4.1.0.tgz",
-          "integrity": "sha1-ThSHCmGNni7dl92DRf2dncMVZGo=",
-          "dev": true,
-          "optional": true,
-          "requires": {
-            "ansi-styles": "^4.1.0",
-            "supports-color": "^7.1.0"
-          }
-        },
-        "color-convert": {
-          "version": "2.0.1",
-          "resolved": "https://registry.npm.taobao.org/color-convert/download/color-convert-2.0.1.tgz",
-          "integrity": "sha1-ctOmjVmMm9s68q0ehPIdiWq9TeM=",
-          "dev": true,
-          "optional": true,
-          "requires": {
-            "color-name": "~1.1.4"
-          }
-        },
-        "color-name": {
-          "version": "1.1.4",
-          "resolved": "https://registry.npm.taobao.org/color-name/download/color-name-1.1.4.tgz",
-          "integrity": "sha1-wqCah6y95pVD3m9j+jmVyCbFNqI=",
-          "dev": true,
-          "optional": true
-        },
-        "has-flag": {
-          "version": "4.0.0",
-          "resolved": "https://registry.npm.taobao.org/has-flag/download/has-flag-4.0.0.tgz",
-          "integrity": "sha1-lEdx/ZyByBJlxNaUGGDaBrtZR5s=",
-          "dev": true,
-          "optional": true
-        },
-        "loader-utils": {
-          "version": "2.0.0",
-          "resolved": "https://registry.npm.taobao.org/loader-utils/download/loader-utils-2.0.0.tgz?cache=0&sync_timestamp=1598867216219&other_urls=https%3A%2F%2Fregistry.npm.taobao.org%2Floader-utils%2Fdownload%2Floader-utils-2.0.0.tgz",
-          "integrity": "sha1-5MrOW4FtQloWa18JfhDNErNgZLA=",
-          "dev": true,
-          "optional": true,
-          "requires": {
-            "big.js": "^5.2.2",
-            "emojis-list": "^3.0.0",
-            "json5": "^2.1.2"
-          }
-        },
-        "supports-color": {
-          "version": "7.2.0",
-          "resolved": "https://registry.npm.taobao.org/supports-color/download/supports-color-7.2.0.tgz?cache=0&sync_timestamp=1608035619713&other_urls=https%3A%2F%2Fregistry.npm.taobao.org%2Fsupports-color%2Fdownload%2Fsupports-color-7.2.0.tgz",
-          "integrity": "sha1-G33NyzK4E4gBs+R4umpRyqiWSNo=",
-          "dev": true,
-          "optional": true,
-          "requires": {
-            "has-flag": "^4.0.0"
-          }
-        }
-      }
-    },
     "vue-style-loader": {
       "version": "4.1.2",
       "resolved": "https://registry.npm.taobao.org/vue-style-loader/download/vue-style-loader-4.1.2.tgz",

+ 2 - 1
webui/package.json

@@ -10,7 +10,8 @@
   "dependencies": {
     "core-js": "^3.6.5",
     "socket.io-client": "^3.0.5",
-    "vue": "^2.6.11"
+    "vue": "^2.6.11",
+    "vue-debounce": "^3.0.2"
   },
   "devDependencies": {
     "@vue/cli-plugin-babel": "~4.5.0",

+ 81 - 6
webui/src/App.vue

@@ -5,22 +5,54 @@
 
     <!-- top navigation bar -->
     <top-navigation-bar :window="window"
-                        v-on:side="window.menu = !window.menu"
+                        @side="window.menu = !window.menu"
                         @close="closeProject"></top-navigation-bar>
 
+    <!-- login -->
+    <div class="login" v-if="!$root.socket.authenticated">
+      <div class="login-form">
+        <h1>Login</h1>
+        <div>
+          <input v-model="userName"
+                 type="username"
+                 placeholder="Username"
+                 required>
+        </div>
+        <div>
+          <input v-model="passwordLogin"
+                 type="password"
+                 placeholder="Password"
+                 required>
+        </div>
+        <div>
+          <button type="button"
+                  @click="login()"
+                  :class="{disabled: loginButtonDisabled}"
+                  :disabled="loginButtonDisabled">
+            Login
+          </button>
+        </div>
+        <div>
+          <span class="login-errors">
+            {{ $root.socket.latestErrorTxt }}
+          </span>
+        </div>
+      </div>
+    </div>
+
     <!-- bottom content -->
-    <div class="bottom">
+    <div class="bottom" v-else>
       <!-- side navigation bar -->
       <side-navigation-bar :window="window"
-                           v-on:close="window.menu = false"/>
+                           @close="window.menu = false"/>
 
       <!-- actual content -->
       <div class="content">
         <project-open-window v-if="currentPage === 'projects'"
-                             v-on:open="openProject"/>
+                             @open="openProject"/>
 
         <project-settings-window v-if="currentPage === 'settings'"
-                                 v-on:close="closeProject"/>
+                                 @close="closeProject"/>
 
         <project-data-add-window v-if="currentPage === 'add_data'"/>
 
@@ -64,7 +96,10 @@ export default {
         wide: true,
         menu: false,
         content: 'settings'
-      }
+      },
+      loginButtonDisabled: false,
+      userName: "",
+      passwordLogin: "",
     }
   },
   created: function () {
@@ -86,6 +121,11 @@ export default {
     }
   },
   methods: {
+    login() {
+      this.loginButtonDisabled = true;
+      this.$root.authenticate(this.userName.toLowerCase(), this.passwordLogin);
+      this.loginButtonDisabled = false;
+    },
     resize: function () {
       this.window.wide = (document.body.offsetWidth > 1024);
     },
@@ -127,6 +167,41 @@ export default {
   position: relative;
 }
 
+.login {
+  align-items: center;
+  justify-content: center;
+  display: flex;
+  flex-wrap: wrap;
+  flex-grow: 1;
+  flex-direction: row;
+  width: 100%;
+  overflow: hidden;
+  position: relative;
+}
+
+.login-form {
+  text-align: center;
+}
+
+.login-form input {
+  text-align: right;
+  margin: 0.2rem;
+  padding: 0.4rem;
+  border: 1px solid gray;
+  border-radius: 0.5rem;
+}
+
+.login-form button {
+  margin: 0.2rem;
+  padding: 0.4rem 2rem;
+  border: 1px solid gray;
+  border-radius: 0.5rem;
+}
+
+.login-form .login-errors {
+  color: red
+}
+
 .content {
   flex-grow: 1;
 }

+ 69 - 0
webui/src/assets/icons/check-all.svg

@@ -0,0 +1,69 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<svg
+   xmlns:dc="http://purl.org/dc/elements/1.1/"
+   xmlns:cc="http://creativecommons.org/ns#"
+   xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+   xmlns:svg="http://www.w3.org/2000/svg"
+   xmlns="http://www.w3.org/2000/svg"
+   xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+   xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+   viewBox="0 0 16 16"
+   width="16"
+   height="16"
+   version="1.1"
+   id="svg4"
+   sodipodi:docname="check_all.svg"
+   inkscape:version="0.92.3 (2405546, 2018-03-11)">
+  <metadata
+     id="metadata10">
+    <rdf:RDF>
+      <cc:Work
+         rdf:about="">
+        <dc:format>image/svg+xml</dc:format>
+        <dc:type
+           rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+      </cc:Work>
+    </rdf:RDF>
+  </metadata>
+  <defs
+     id="defs8" />
+  <sodipodi:namedview
+     pagecolor="#ffffff"
+     bordercolor="#666666"
+     borderopacity="1"
+     objecttolerance="10"
+     gridtolerance="10"
+     guidetolerance="10"
+     inkscape:pageopacity="0"
+     inkscape:pageshadow="2"
+     inkscape:window-width="1920"
+     inkscape:window-height="1135"
+     id="namedview6"
+     showgrid="false"
+     inkscape:zoom="14.75"
+     inkscape:cx="8"
+     inkscape:cy="8"
+     inkscape:window-x="1200"
+     inkscape:window-y="536"
+     inkscape:window-maximized="1"
+     inkscape:current-layer="svg4" />
+  <g
+     id="g875"
+     transform="translate(0.00792471,-0.35699187)">
+    <path
+       style="fill-rule:evenodd"
+       inkscape:connector-curvature="0"
+       id="path2"
+       d="m 13.78,4.2019913 a 0.75,0.75 0 0 1 0,1.06 L 6.53,12.511991 a 0.75,0.75 0 0 1 -1.06,0 L 2.22,9.2619913 a 0.75,0.75 0 0 1 1.06,-1.06 L 6,10.921991 12.72,4.2019913 a 0.75,0.75 0 0 1 1.06,0 z" />
+    <path
+       id="path2-6"
+       d="m 13.746104,6.9318645 a 0.75,0.75 0 0 1 0,1.06 L 6.4961017,15.241865 a 0.75,0.75 0 0 1 -1.0600001,0 l -3.25,-3.25 a 0.75,0.75 0 0 1 1.0599999,-1.06 l 2.7200001,2.72 6.7200024,-6.7200005 a 0.75,0.75 0 0 1 1.06,0 z"
+       style="fill-rule:evenodd"
+       inkscape:connector-curvature="0" />
+    <path
+       id="path2-6-7"
+       d="m 13.77098,1.4721187 a 0.75,0.75 0 0 1 0,1.0599999 L 6.5209771,9.7821182 a 0.75,0.75 0 0 1 -1.06,0 L 2.2109772,6.532119 a 0.75,0.75 0 0 1 1.06,-1.0600006 L 5.9909771,8.1921182 12.71098,1.4721187 a 0.75,0.75 0 0 1 1.06,0 z"
+       style="fill-rule:evenodd"
+       inkscape:connector-curvature="0" />
+  </g>
+</svg>

+ 64 - 0
webui/src/assets/icons/double-chevron-left.svg

@@ -0,0 +1,64 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<svg
+   xmlns:dc="http://purl.org/dc/elements/1.1/"
+   xmlns:cc="http://creativecommons.org/ns#"
+   xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+   xmlns:svg="http://www.w3.org/2000/svg"
+   xmlns="http://www.w3.org/2000/svg"
+   xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+   xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+   viewBox="0 0 16 16"
+   width="16"
+   height="16"
+   version="1.1"
+   id="svg4"
+   sodipodi:docname="double-chevron-left.svg"
+   inkscape:version="0.92.3 (2405546, 2018-03-11)">
+  <metadata
+     id="metadata10">
+    <rdf:RDF>
+      <cc:Work
+         rdf:about="">
+        <dc:format>image/svg+xml</dc:format>
+        <dc:type
+           rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+      </cc:Work>
+    </rdf:RDF>
+  </metadata>
+  <defs
+     id="defs8" />
+  <sodipodi:namedview
+     pagecolor="#ffffff"
+     bordercolor="#666666"
+     borderopacity="1"
+     objecttolerance="10"
+     gridtolerance="10"
+     guidetolerance="10"
+     inkscape:pageopacity="0"
+     inkscape:pageshadow="2"
+     inkscape:window-width="1920"
+     inkscape:window-height="1135"
+     id="namedview6"
+     showgrid="false"
+     inkscape:zoom="14.75"
+     inkscape:cx="-6.6779661"
+     inkscape:cy="8"
+     inkscape:window-x="1200"
+     inkscape:window-y="536"
+     inkscape:window-maximized="1"
+     inkscape:current-layer="svg4" />
+  <g
+     id="g834"
+     transform="matrix(-1,0,0,1,18.776483,0.00902426)">
+    <path
+       id="path2"
+       d="m 6.22,3.22 a 0.75,0.75 0 0 1 1.06,0 l 4.25,4.25 a 0.75,0.75 0 0 1 0,1.06 L 7.28,12.78 A 0.75,0.75 0 0 1 6.22,11.72 L 9.94,8 6.22,4.28 a 0.75,0.75 0 0 1 0,-1.06 z"
+       inkscape:connector-curvature="0"
+       style="fill-rule:evenodd" />
+    <path
+       id="path2-3"
+       d="m 10.022966,3.2199998 a 0.75,0.75 0 0 1 1.06,0 l 4.25,4.25 a 0.75,0.75 0 0 1 0,1.06 l -4.25,4.2500002 a 0.75,0.75 0 0 1 -1.06,-1.06 l 3.72,-3.7200002 -3.72,-3.72 a 0.75,0.75 0 0 1 0,-1.06 z"
+       style="fill-rule:evenodd"
+       inkscape:connector-curvature="0" />
+  </g>
+</svg>

+ 64 - 0
webui/src/assets/icons/double-chevron-right.svg

@@ -0,0 +1,64 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<svg
+   xmlns:dc="http://purl.org/dc/elements/1.1/"
+   xmlns:cc="http://creativecommons.org/ns#"
+   xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+   xmlns:svg="http://www.w3.org/2000/svg"
+   xmlns="http://www.w3.org/2000/svg"
+   xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+   xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+   viewBox="0 0 16 16"
+   width="16"
+   height="16"
+   version="1.1"
+   id="svg4"
+   sodipodi:docname="double-chevron-right.svg"
+   inkscape:version="0.92.3 (2405546, 2018-03-11)">
+  <metadata
+     id="metadata10">
+    <rdf:RDF>
+      <cc:Work
+         rdf:about="">
+        <dc:format>image/svg+xml</dc:format>
+        <dc:type
+           rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+      </cc:Work>
+    </rdf:RDF>
+  </metadata>
+  <defs
+     id="defs8" />
+  <sodipodi:namedview
+     pagecolor="#ffffff"
+     bordercolor="#666666"
+     borderopacity="1"
+     objecttolerance="10"
+     gridtolerance="10"
+     guidetolerance="10"
+     inkscape:pageopacity="0"
+     inkscape:pageshadow="2"
+     inkscape:window-width="1920"
+     inkscape:window-height="1135"
+     id="namedview6"
+     showgrid="false"
+     inkscape:zoom="14.75"
+     inkscape:cx="8"
+     inkscape:cy="8"
+     inkscape:window-x="1200"
+     inkscape:window-y="536"
+     inkscape:window-maximized="1"
+     inkscape:current-layer="svg4" />
+  <g
+     id="g834"
+     transform="translate(-2.776483,0.00902426)">
+    <path
+       id="path2"
+       d="m 6.22,3.22 a 0.75,0.75 0 0 1 1.06,0 l 4.25,4.25 a 0.75,0.75 0 0 1 0,1.06 L 7.28,12.78 A 0.75,0.75 0 0 1 6.22,11.72 L 9.94,8 6.22,4.28 a 0.75,0.75 0 0 1 0,-1.06 z"
+       inkscape:connector-curvature="0"
+       style="fill-rule:evenodd" />
+    <path
+       id="path2-3"
+       d="m 10.022966,3.2199998 a 0.75,0.75 0 0 1 1.06,0 l 4.25,4.25 a 0.75,0.75 0 0 1 0,1.06 l -4.25,4.2500002 a 0.75,0.75 0 0 1 -1.06,-1.06 l 3.72,-3.7200002 -3.72,-3.72 a 0.75,0.75 0 0 1 0,-1.06 z"
+       style="fill-rule:evenodd"
+       inkscape:connector-curvature="0" />
+  </g>
+</svg>

+ 60 - 0
webui/src/assets/icons/untag.svg

@@ -0,0 +1,60 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<svg
+   xmlns:dc="http://purl.org/dc/elements/1.1/"
+   xmlns:cc="http://creativecommons.org/ns#"
+   xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+   xmlns:svg="http://www.w3.org/2000/svg"
+   xmlns="http://www.w3.org/2000/svg"
+   xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+   xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+   viewBox="0 0 16 16"
+   width="16"
+   height="16"
+   version="1.1"
+   id="svg4"
+   sodipodi:docname="untag.svg"
+   inkscape:version="0.92.3 (2405546, 2018-03-11)">
+  <metadata
+     id="metadata10">
+    <rdf:RDF>
+      <cc:Work
+         rdf:about="">
+        <dc:format>image/svg+xml</dc:format>
+        <dc:type
+           rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+        <dc:title></dc:title>
+      </cc:Work>
+    </rdf:RDF>
+  </metadata>
+  <defs
+     id="defs8" />
+  <sodipodi:namedview
+     pagecolor="#ffffff"
+     bordercolor="#666666"
+     borderopacity="1"
+     objecttolerance="10"
+     gridtolerance="10"
+     guidetolerance="10"
+     inkscape:pageopacity="0"
+     inkscape:pageshadow="2"
+     inkscape:window-width="1920"
+     inkscape:window-height="1135"
+     id="namedview6"
+     showgrid="false"
+     inkscape:zoom="14.75"
+     inkscape:cx="8"
+     inkscape:cy="8"
+     inkscape:window-x="1200"
+     inkscape:window-y="536"
+     inkscape:window-maximized="1"
+     inkscape:current-layer="svg4" />
+  <path
+     fill-rule="evenodd"
+     d="M2.5 7.775V2.75a.25.25 0 01.25-.25h5.025a.25.25 0 01.177.073l6.25 6.25a.25.25 0 010 .354l-5.025 5.025a.25.25 0 01-.354 0l-6.25-6.25a.25.25 0 01-.073-.177zm-1.5 0V2.75C1 1.784 1.784 1 2.75 1h5.025c.464 0 .91.184 1.238.513l6.25 6.25a1.75 1.75 0 010 2.474l-5.026 5.026a1.75 1.75 0 01-2.474 0l-6.25-6.25A1.75 1.75 0 011 7.775zM6 5a1 1 0 100 2 1 1 0 000-2z"
+     id="path2" />
+  <path
+     inkscape:connector-curvature="0"
+     d="m 13.675781,1.0898446 c -0.188805,-0.00885 -0.387816,0.061085 -0.564453,0.2441406 L 7.7519531,6.691407 6.6914062,7.7519539 1.3320312,13.111328 c -0.73155592,0.707236 0.3540009,1.792769 1.0605469,1.060547 L 7.7519531,8.8125008 8.8125,7.7519539 14.171875,2.394532 c 0.539682,-0.5395655 0.07032,-1.2781454 -0.496094,-1.3046874 z"
+     id="path2-3"
+     sodipodi:nodetypes="cccccccccc" />
+</svg>

+ 237 - 86
webui/src/components/media/annotated-image.vue

@@ -6,23 +6,28 @@
                    @interaction="interaction = $event"
                    :filter="filter"
                    @filter="filter = $event"
-                   :label="label"
-                   @label="label = $event"
-                   :labels="labels"
-                   :zoomBox="zoomBox"
-                   :infoBox="infoBox !== false"
-                   @infoBox="infoBox = $event; interaction=false"
-                   @unzoom="zoomBox=false; interaction=false"
-                   @prevzoom="$refs.overlay.prevZoom()"
-                   @nextzoom="$refs.overlay.nextZoom()"/>
+                   :labelsEnabled="labelsEnabled"
+                   @labelSelector="openLabelSelector()"
+                   @predict="predictImage"
+                   @confirmAll="confirmAll"/>
+
+      <label-selector v-if="labelSelector"
+                      :labels="labels"
+                      @close="closeLabelSelector()"
+                      @label="labelSelected($event);"
+                      />
 
       <div class="media">
+        <h3>{{current.path}}</h3>
+        <div class="mode-tooltip">{{modeTooltip}}</div>
+
         <!-- image -->
         <img v-if="current.type === 'image'"
              ref="media" :src="src" alt="media"
-             :style="cropPosition"
-             v-on:load="change" v-on:loadedmetadata="change" v-on:loadeddata="change"
-             v-on:transitionend="resize">
+             @load="change"
+             @loadedmetadata="change"
+             @loadeddata="change"
+             @transitionend="resize">
 
         <!-- video -->
         <template v-if="current.type === 'video'">
@@ -44,22 +49,36 @@
                             :position="overlayPosition"
                             :size="image"
                             :interaction="interaction"
+                            @interaction="interaction = $event"
                             :filter="filter"
                             :label="label"
                             :video="video"
                             :results="results"
                             :labels="labels"
                             :crop="infoBox"
-                            @crop="infoBox = $event"
-                            :zoom="zoomBox"
-                            @zoom="zoomBox = $event"/>
+                            @labelSelector="openLabelSelector($event)"
+                            @labelBox="labelBox"
+                            @newBox="newBox"
+                            @labelImage="labelImage"
+                            @remove="remove"
+                            @confirm="confirm"
+                            @updateBox="updateBox"
+                            @estimateBox="estimateBox"
+                            @crop="infoBox = $event"/>
       </div>
 
       <cropped-image v-if="infoBox !== false"
                      :labels="labels"
                      :file="current"
                      :box="infoBox"
-                     @close="infoBox=false"/>
+                     @predictBox="predictBox"
+                     @removeBox="remove"
+                     @removeLabel="labelBox($event, null)"
+                     @setLabel="openLabelSelector($event)"
+                     @close="
+                      interaction=defaultInteraction;
+                      infoBox=false"
+                    />
     </template>
   </div>
 </template>
@@ -69,15 +88,83 @@ import AnnotationOverlay from "@/components/media/annotation-overlay";
 import VideoControl from "@/components/media/video-control";
 import OptionsBar from "@/components/media/options-bar";
 import CroppedImage from "@/components/media/cropped-image";
+import LabelSelector from "@/components/media/label-selector";
 
 export default {
   name: "annotated-image",
-  components: {OptionsBar, VideoControl, AnnotationOverlay, CroppedImage},
-  props: ['current'],
+  components: {
+    OptionsBar,
+    VideoControl,
+    AnnotationOverlay,
+    CroppedImage,
+    LabelSelector
+  },
+  props: [
+    'current'
+  ],
+
+  data: function () {
+
+    return {
+      interval: false,
+      resize_interval: 200,
+      container: {
+        top: 0,
+        left: 0,
+        width: 0,
+        height: 0,
+      },
+      image: {
+        top: 0,
+        left: 0,
+        width: 0,
+        height: 0
+      },
+      video: {
+        frame: 0,
+        play: false
+      },
+      infoBox: false,
+      supported: {
+        labeledImages: false,
+        boundingBoxes: false,
+        labeledBoundingBoxes: false,
+      },
+      interaction: 'draw-box',
+      defaultInteraction: 'draw-box',
+      filter: ['user', 'pipeline'],
+      label: false,
+      labelSelector: false,
+      boxTolabel: null,
+      model: null,
+      results: [],
+      labels: []
+    }
+  },
+  watch: {
+    current: {
+      immediate: true,
+      handler: function (newVal) {
+        this.infoBox = false;
+
+        this.video.play = false;
+        this.video.frame = 0;
+
+        this.$root.socket.get(`/data/${newVal.identifier}/results`)
+          .then(response => response.json())
+          .then(results => {
+            this.results = results;
+          });
+      }
+    },
+    infoBox: function () {
+      setTimeout(this.resize, 1);
+    }
+  },
   mounted: function () {
     // add resize listener
     window.addEventListener('resize', this.resize);
-    this.interval = setInterval(this.resize, 1000);
+    this.interval = setInterval(this.resize, this.resize_interval);
     this.resize();
 
     // add result listener
@@ -96,6 +183,7 @@ export default {
     this.$root.socket.get(`/projects/${this.$root.project.identifier}/model`)
       .then(response => response.json())
       .then(model => {
+        this.model = model;
         this.supported.labeledImages = model.supports.includes('labeled-images');
         this.supported.labeledBoundingBoxes = model.supports.includes('labeled-bounding-boxes');
         this.supported.boundingBoxes = this.supported.labeledBoundingBoxes
@@ -115,39 +203,6 @@ export default {
     this.$root.socket.off('remove-label', this.removeLabelFromList);
     this.$root.socket.off('edit-label', this.editLabelInList);
   },
-  data: function () {
-    return {
-      interval: false,
-      container: {
-        top: 0,
-        left: 0,
-        width: 0,
-        height: 0,
-      },
-      image: {
-        top: 0,
-        left: 0,
-        width: 0,
-        height: 0
-      },
-      video: {
-        frame: 0,
-        play: false
-      },
-      infoBox: false,
-      zoomBox: false,
-      supported: {
-        labeledImages: false,
-        boundingBoxes: false,
-        labeledBoundingBoxes: false,
-      },
-      interaction: 'draw-box',
-      filter: ['user', 'pipeline'],
-      label: false,
-      results: [],
-      labels: []
-    }
-  },
   computed: {
     src: function () {
       if (!this.container.width || !this.container.height)
@@ -168,23 +223,124 @@ export default {
         height: this.image.height + 'px'
       }
     },
-    cropPosition: function () {
-      if (!this.zoomBox)
-        return {
-          transform: ``,
-        };
 
-      const posX = 0.5 - (this.zoomBox.x + this.zoomBox.w / 2);
-      const posY = 0.5 - (this.zoomBox.y + this.zoomBox.h / 2);
-      const factor = 0.75 / Math.max(this.zoomBox.w, this.zoomBox.h);
+    labelsEnabled: function() {
 
-      // use a transition to use the transitionend event to recalculate box positions
-      return {
-        transform: `scale(${factor}) translateX(${posX * 100}%) translateY(${posY * 100}%)`
-      };
-    }
+      return this.model
+          && (this.supported.labeledImages || this.supported.labeledBoundingBoxes);
+    },
+
+    modeTooltip: function(){
+      let tip = "Current mode";
+      switch (this.interaction) {
+        case "draw-box":
+          return `${tip}: draw bounding-box`;
+
+        case "estimate-box":
+          return `${tip}: estimate bounding-box`;
+
+        case "extreme-clicking":
+          return `${tip}: extreme clicking`;
+
+        case "move-box":
+          return `${tip}: move or resize`;
+
+        case "label-box":
+          if (this.label.identifier === null)
+            return `${tip}: remove tag`;
+          else
+            return `${tip}: tag as "${this.label.name}"`;
+
+        case "confirm-box":
+          return `${tip}: confirm`;
+
+        case "remove-box":
+          return `${tip}: remove`;
+
+        case "info-box":
+          return `${tip}: bounding-box info`;
+
+        default:
+          return "";
+      }
+    },
   },
   methods: {
+
+    openLabelSelector: function(box) {
+      if (box === null || box === undefined)
+        this.boxTolabel = null
+      else
+        this.boxTolabel = box;
+
+      this.labelSelector = true;
+    },
+
+    closeLabelSelector: function() {
+      this.boxTolabel = null;
+      this.labelSelector = false;
+    },
+
+    labelSelected: function(label) {
+      this.interaction = 'label-box';
+      this.label = label;
+
+      if (this.boxTolabel !== null)
+        this.labelBox(this.boxTolabel.identifier, label.identifier);
+    },
+
+    labelBox: function(box_id, label_id) {
+      this.$root.socket.post(`/results/${box_id}/label`,
+        {label: label_id});
+    },
+
+    newBox: function(file_id, data) {
+      this.$root.socket.post(`/data/${file_id}/results`,
+        {type: 'bounding-box', data: data});
+    },
+
+    labelImage: function(file_id, label_id) {
+      this.$root.socket.post(`/data/${file_id}/results`,
+        {type: 'labeled-image', label: label_id});
+    },
+
+    updateBox: function(box_id, data) {
+      this.$root.socket.post(`/results/${box_id}/data`,
+        {data: data});
+    },
+
+    estimateBox: function(file_id, coordinates) {
+      this.$root.socket.post(`/data/${file_id}/estimate`,
+        coordinates);
+    },
+
+    predictImage: function(file_id) {
+      this.$root.socket.post(`/data/${file_id}/predict`,
+        {predict: true});
+    },
+
+    predictBox: function(file_id, box_id) {
+      this.$root.socket.post(`/data/${file_id}/${box_id}/predict_bounding_box`,
+        {predict: true});
+    },
+
+    // confirm all bboxes of the image
+    confirmAll: function(file_id) {
+      this.$root.socket.post(`/data/${file_id}/confirm_all`,
+        {confirm: true});
+    },
+
+    // confirm either a bbox or image result
+    confirm: function(res_id) {
+      this.$root.socket.post(`/results/${res_id}/confirm`,
+        {confirm: true});
+    },
+    // remove either a bbox or image result
+    remove: function(res_id,) {
+      this.$root.socket.post(`/results/${res_id}/remove`,
+        {remove: true});
+    },
+
     resize: function () {
       const rect = this.$refs.root.getBoundingClientRect();
 
@@ -297,26 +453,6 @@ export default {
       }
     }
   },
-  watch: {
-    current: {
-      immediate: true,
-      handler: function (newVal) {
-        this.video.play = false;
-        this.video.frame = 0;
-
-        this.zoomBox = false;
-
-        this.$root.socket.get(`/data/${newVal.identifier}/results`)
-          .then(response => response.json())
-          .then(results => {
-            this.results = results;
-          });
-      }
-    },
-    infoBox: function () {
-      setTimeout(this.resize, 1);
-    }
-  }
 }
 </script>
 
@@ -331,6 +467,7 @@ export default {
   align-items: center;
 
   overflow: hidden;
+
 }
 
 .options-bar {
@@ -363,7 +500,21 @@ export default {
 
 img, video {
   max-width: 100%;
-  max-height: 100%;
+  max-height: 90%;
   transition: transform 0.01s;
 }
+
+h3 {
+  max-width: 100%;
+  max-height: 5%;
+  margin: 0.5em;
+}
+
+.mode-tooltip {
+  max-width: 100%;
+  max-height: 5%;
+  margin: 0.5em;
+}
+
+
 </style>

Some files were not shown because too many files changed in this diff