From 549911bbc2c4dc9cd3421fa6b61a2536dff8d2f3 Mon Sep 17 00:00:00 2001 From: Maria Khrustaleva Date: Fri, 24 Mar 2023 00:27:21 +0100 Subject: [PATCH] Revert on_success on_failure usage due to their incorrect work --- cvat-core/src/server-proxy.ts | 24 ++++++++--- cvat/apps/dataset_manager/project.py | 6 ++- cvat/apps/dataset_manager/task.py | 8 ++-- cvat/apps/dataset_manager/util.py | 14 +++++++ cvat/apps/engine/backup.py | 24 ++++++++--- cvat/apps/engine/mixins.py | 23 +++++++++-- cvat/apps/engine/utils.py | 5 --- cvat/apps/engine/views.py | 59 +++++++++++++++------------- cvat/settings/base.py | 2 + 9 files changed, 112 insertions(+), 53 deletions(-) diff --git a/cvat-core/src/server-proxy.ts b/cvat-core/src/server-proxy.ts index f75316210aa..65f15aed273 100644 --- a/cvat-core/src/server-proxy.ts +++ b/cvat-core/src/server-proxy.ts @@ -768,15 +768,17 @@ async function importDataset( }; const url = `${backendAPI}/projects/${id}/dataset`; + let rqId: string; async function wait() { return new Promise((resolve, reject) => { async function requestStatus() { try { const response = await Axios.get(url, { - params: { ...params, action: 'import_status' }, + params: { ...params, action: 'import_status', rq_id: rqId }, }); if (response.status === 202) { + rqId = response.data.rq_id; if (response.data.message) { options.updateStatusCallback(response.data.message, response.data.progress || 0); } @@ -797,10 +799,11 @@ async function importDataset( if (isCloudStorage) { try { - await Axios.post(url, + const response = await Axios.post(url, new FormData(), { params, }); + rqId = response.data.rq_id; } catch (errorData) { throw generateError(errorData); } @@ -822,11 +825,12 @@ async function importDataset( headers: { 'Upload-Start': true }, }); await chunkUpload(file, uploadConfig); - await Axios.post(url, + const response = await Axios.post(url, new FormData(), { params, headers: { 'Upload-Finish': true }, }); + rqId = response.data.rq_id; } catch (errorData) { throw generateError(errorData); } @@ -1602,20 +1606,26 @@ async function uploadAnnotations( filename: typeof file === 'string' ? file : file.name, conv_mask_to_poly: options.convMaskToPoly, }; + let rqId: string; const url = `${backendAPI}/${session}s/${id}/annotations`; async function wait() { return new Promise((resolve, reject) => { async function requestStatus() { try { + const data = new FormData(); + if (rqId) { + data.set('rq_id', rqId); + } const response = await Axios.put( url, - new FormData(), + data, { params, }, ); if (response.status === 202) { + rqId = response.data.rq_id; setTimeout(requestStatus, 3000); } else { resolve(); @@ -1631,10 +1641,11 @@ async function uploadAnnotations( if (isCloudStorage) { try { - await Axios.post(url, + const response = await Axios.post(url, new FormData(), { params, }); + rqId = response.data.rq_id; } catch (errorData) { throw generateError(errorData); } @@ -1652,11 +1663,12 @@ async function uploadAnnotations( headers: { 'Upload-Start': true }, }); await chunkUpload(file, uploadConfig); - await Axios.post(url, + const response = await Axios.post(url, new FormData(), { params, headers: { 'Upload-Finish': true }, }); + rqId = response.data.rq_id; } catch (errorData) { throw generateError(errorData); } diff --git a/cvat/apps/dataset_manager/project.py b/cvat/apps/dataset_manager/project.py index e52fb2ebab8..3cde9e2b3cd 100644 --- a/cvat/apps/dataset_manager/project.py +++ b/cvat/apps/dataset_manager/project.py @@ -18,6 +18,7 @@ from .annotation import AnnotationIR from .bindings import ProjectData, load_dataset_data from .formats.registry import make_exporter, make_importer +from .util import remove_resources def export_project(project_id, dst_file, format_name, server_url=None, save_images=False): @@ -160,7 +161,8 @@ def data(self) -> dict: raise NotImplementedError() @transaction.atomic -def import_dataset_as_project(project_id, dataset_file, format_name, conv_mask_to_poly): +@remove_resources +def import_dataset_as_project(src_file, project_id, format_name, conv_mask_to_poly): rq_job = rq.get_current_job() rq_job.meta['status'] = 'Dataset import has been started...' rq_job.meta['progress'] = 0. @@ -170,5 +172,5 @@ def import_dataset_as_project(project_id, dataset_file, format_name, conv_mask_t project.init_from_db() importer = make_importer(format_name) - with open(dataset_file, 'rb') as f: + with open(src_file, 'rb') as f: project.import_dataset(f, importer, conv_mask_to_poly=conv_mask_to_poly) diff --git a/cvat/apps/dataset_manager/task.py b/cvat/apps/dataset_manager/task.py index f85005ff4cd..72b66874907 100644 --- a/cvat/apps/dataset_manager/task.py +++ b/cvat/apps/dataset_manager/task.py @@ -19,7 +19,7 @@ from .annotation import AnnotationIR, AnnotationManager from .bindings import TaskData, JobData from .formats.registry import make_exporter, make_importer -from .util import bulk_create +from .util import bulk_create, remove_resources class dotdict(OrderedDict): @@ -789,7 +789,8 @@ def export_task(task_id, dst_file, format_name, server_url=None, save_images=Fal task.export(f, exporter, host=server_url, save_images=save_images) @transaction.atomic -def import_task_annotations(task_id, src_file, format_name, conv_mask_to_poly): +@remove_resources +def import_task_annotations(src_file, task_id, format_name, conv_mask_to_poly): task = TaskAnnotation(task_id) task.init_from_db() @@ -798,7 +799,8 @@ def import_task_annotations(task_id, src_file, format_name, conv_mask_to_poly): task.import_annotations(f, importer, conv_mask_to_poly=conv_mask_to_poly) @transaction.atomic -def import_job_annotations(job_id, src_file, format_name, conv_mask_to_poly): +@remove_resources +def import_job_annotations(src_file, job_id, format_name, conv_mask_to_poly): job = JobAnnotation(job_id) job.init_from_db() diff --git a/cvat/apps/dataset_manager/util.py b/cvat/apps/dataset_manager/util.py index 8c4a4c7ed8e..739ab135079 100644 --- a/cvat/apps/dataset_manager/util.py +++ b/cvat/apps/dataset_manager/util.py @@ -6,6 +6,8 @@ import inspect import os, os.path as osp import zipfile +import functools +from contextlib import suppress from django.conf import settings @@ -35,3 +37,15 @@ def bulk_create(db_model, objects, flt_param): return db_model.objects.bulk_create(objects) return [] + + +def remove_resources(func): + @functools.wraps(func) + def wrapper(src_file, *args, **kwargs): + try: + func(src_file, *args, **kwargs) + finally: + with suppress(FileNotFoundError): + os.remove(src_file) + return None + return wrapper diff --git a/cvat/apps/engine/backup.py b/cvat/apps/engine/backup.py index 8448a2ec660..ace3cc7ba30 100644 --- a/cvat/apps/engine/backup.py +++ b/cvat/apps/engine/backup.py @@ -9,11 +9,13 @@ import re import shutil import tempfile +import functools from typing import Any, Dict, Iterable import uuid from zipfile import ZipFile from datetime import datetime from tempfile import NamedTemporaryFile +from contextlib import suppress import django_rq from django.conf import settings @@ -36,7 +38,7 @@ LabeledDataSerializer, SegmentSerializer, SimpleJobSerializer, TaskReadSerializer, ProjectReadSerializer, ProjectFileSerializer, TaskFileSerializer) from cvat.apps.engine.utils import ( - av_scan_paths, process_failed_job, configure_dependent_job, get_rq_job_meta, handle_finished_or_failed_job + av_scan_paths, process_failed_job, configure_dependent_job, get_rq_job_meta ) from cvat.apps.engine.models import ( StorageChoice, StorageMethodChoice, DataChoice, Task, Project, Location, @@ -55,6 +57,17 @@ class Version(Enum): IMPORT_CACHE_FAILED_TTL = timedelta(hours=10).total_seconds() IMPORT_CACHE_SUCCESS_TTL = timedelta(hours=1).total_seconds() +def remove_resources(func): + @functools.wraps(func) + def wrapper(filename, *args, **kwargs): + try: + result = func(filename, *args, **kwargs) + finally: + with suppress(FileNotFoundError): + os.remove(filename) + return result + return wrapper + def _get_label_mapping(db_labels): label_mapping = {db_label.id: db_label.name for db_label in db_labels} for db_label in db_labels: @@ -633,6 +646,7 @@ def import_task(self): return self._db_task @transaction.atomic +@remove_resources def _import_task(filename, user, org_id): av_scan_paths(filename) task_importer = TaskImporter(filename, user, org_id) @@ -752,6 +766,7 @@ def import_project(self): return self._db_project @transaction.atomic +@remove_resources def _import_project(filename, user, org_id): av_scan_paths(filename) project_importer = ProjectImporter(filename, user, org_id) @@ -947,15 +962,12 @@ def _import(importer, request, queue, rq_id, Serializer, file_field_name, locati **get_rq_job_meta(request=request, db_obj=None) }, depends_on=dependent_job, - on_success=handle_finished_or_failed_job, - on_failure=handle_finished_or_failed_job, result_ttl=IMPORT_CACHE_SUCCESS_TTL, failure_ttl=IMPORT_CACHE_FAILED_TTL ) else: if rq_job.is_finished: project_id = rq_job.return_value() - handle_finished_or_failed_job(rq_job) rq_job.delete() return Response({'id': project_id}, status=status.HTTP_201_CREATED) elif rq_job.is_failed or \ @@ -981,7 +993,7 @@ def import_project(request, queue_name, filename=None): if 'rq_id' in request.data: rq_id = request.data['rq_id'] else: - rq_id = f"import:project.{uuid.uuid4()}-by-{request.user}" + rq_id = settings.COMMON_IMPORT_RQ_ID_TEMPLATE.format('project', uuid.uuid4(), 'backup', request.user) Serializer = ProjectFileSerializer file_field_name = 'project_file' @@ -1007,7 +1019,7 @@ def import_task(request, queue_name, filename=None): if 'rq_id' in request.data: rq_id = request.data['rq_id'] else: - rq_id = f"import:task.{uuid.uuid4()}-by-{request.user}" + rq_id = settings.COMMON_IMPORT_RQ_ID_TEMPLATE.format('task', uuid.uuid4(), 'backup', request.user) Serializer = TaskFileSerializer file_field_name = 'task_file' diff --git a/cvat/apps/engine/mixins.py b/cvat/apps/engine/mixins.py index 35834323eb3..247f4ad4f9a 100644 --- a/cvat/apps/engine/mixins.py +++ b/cvat/apps/engine/mixins.py @@ -7,6 +7,7 @@ import base64 from unittest import mock import uuid +import django_rq from django.conf import settings from django.core.cache import cache @@ -170,12 +171,26 @@ def init_tus_upload(self, request): if message_id: metadata["message_id"] = base64.b64decode(message_id) - file_exists = os.path.lexists(os.path.join(self.get_upload_dir(), filename)) + file_path = os.path.join(self.get_upload_dir(), filename) + file_exists = os.path.lexists(file_path) + + if file_exists: + # check whether the rw_job is in progress or has been finished/failed + object_class_name = self._object.__class__.__name__.lower() + import_type = request.path.strip('/').split('/')[-1] + if import_type != 'backup': + template = settings.COMMON_IMPORT_RQ_ID_TEMPLATE.format(object_class_name, self._object.pk, import_type, request.user) + queue = django_rq.get_queue(settings.CVAT_QUEUES.IMPORT_DATA.value) + finished_job_ids = queue.finished_job_registry.get_job_ids() + failed_job_ids = queue.failed_job_registry.get_job_ids() + if template in finished_job_ids or template in failed_job_ids: + os.remove(file_path) + file_exists = False + if file_exists: return self._tus_response(status=status.HTTP_409_CONFLICT, data="File with same name already exists") - # TODO: check if rq job exists and is active file_size = int(request.META.get("HTTP_UPLOAD_LENGTH", "0")) if file_size > int(self._tus_max_file_size): return self._tus_response(status=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE, @@ -283,7 +298,7 @@ def export_annotations(self, request, db_obj, export_func, callback, get_data=No if serializer.is_valid(raise_exception=True): return Response(serializer.data) - def import_annotations(self, request, db_obj, import_func, rq_func, rq_id): + def import_annotations(self, request, db_obj, import_func, rq_func, rq_id_template): is_tus_request = request.headers.get('Upload-Length', None) is not None or \ request.method == 'OPTIONS' if is_tus_request: @@ -305,7 +320,7 @@ def import_annotations(self, request, db_obj, import_func, rq_func, rq_id): return import_func( request=request, - rq_id=rq_id, + rq_id_template=rq_id_template, rq_func=rq_func, db_obj=self._object, format_name=format_name, diff --git a/cvat/apps/engine/utils.py b/cvat/apps/engine/utils.py index 63743c6c03d..dfab982bbae 100644 --- a/cvat/apps/engine/utils.py +++ b/cvat/apps/engine/utils.py @@ -131,7 +131,6 @@ def parse_exception_message(msg): return parsed_msg def process_failed_job(rq_job: Job): - handle_finished_or_failed_job(rq_job) exc_info = str(rq_job.exc_info or rq_job.dependency.exc_info) if rq_job.dependency: rq_job.dependency.delete() @@ -142,9 +141,6 @@ def process_failed_job(rq_job: Job): log.error(msg) return msg -def handle_finished_or_failed_job(rq_job: Job, *args, **kwargs): - if os.path.exists(rq_job.meta['tmp_file']): - os.remove(rq_job.meta['tmp_file']) def configure_dependent_job( queue: DjangoRQ, @@ -166,7 +162,6 @@ def configure_dependent_job( args=(db_storage, filename, key), job_id=rq_job_id_download_file, meta=get_rq_job_meta(request=request, db_obj=db_storage), - on_failure=handle_finished_or_failed_job, result_ttl=result_ttl, failure_ttl=failure_ttl ) diff --git a/cvat/apps/engine/views.py b/cvat/apps/engine/views.py index c2a0aa5ffa9..0ddf1e639ef 100644 --- a/cvat/apps/engine/views.py +++ b/cvat/apps/engine/views.py @@ -65,7 +65,7 @@ from utils.dataset_manifest import ImageManifestManager from cvat.apps.engine.utils import ( - av_scan_paths, process_failed_job, configure_dependent_job, parse_exception_message, get_rq_job_meta, handle_finished_or_failed_job + av_scan_paths, process_failed_job, configure_dependent_job, parse_exception_message, get_rq_job_meta ) from cvat.apps.engine import backup from cvat.apps.engine.mixins import PartialUpdateModelMixin, UploadMixin, AnnotationMixin, SerializeMixin, DestroyModelMixin, CreateModelMixin @@ -235,6 +235,7 @@ class ProjectViewSet(viewsets.GenericViewSet, mixins.ListModelMixin, ordering = "-id" lookup_fields = {'owner': 'owner__username', 'assignee': 'assignee__username'} iam_organization_field = 'organization' + IMPORT_RQ_ID_TEMPLATE = settings.COMMON_IMPORT_RQ_ID_TEMPLATE.format('project', {}, 'dataset', {}) def get_serializer_class(self): if self.request.method in SAFE_METHODS: @@ -310,7 +311,6 @@ def perform_create(self, serializer, **kwargs): url_path=r'dataset/?$') def dataset(self, request, pk): self._object = self.get_object() # force call of check_object_permissions() - rq_id = f"import:dataset-for-project.id{pk}-by-{request.user}" if request.method in {'POST', 'OPTIONS'}: return self.import_annotations( @@ -318,17 +318,19 @@ def dataset(self, request, pk): db_obj=self._object, import_func=_import_project_dataset, rq_func=dm.project.import_dataset_as_project, - rq_id=rq_id, + rq_id_template=self.IMPORT_RQ_ID_TEMPLATE ) else: action = request.query_params.get("action", "").lower() if action in ("import_status",): queue = django_rq.get_queue(settings.CVAT_QUEUES.IMPORT_DATA.value) + rq_id = request.query_params.get('rq_id') + if not rq_id: + return Response('The rq_id param should be specified in the query parameters', status=status.HTTP_400_BAD_REQUEST) rq_job = queue.fetch_job(rq_id) if rq_job is None: return Response(status=status.HTTP_404_NOT_FOUND) elif rq_job.is_finished: - handle_finished_or_failed_job(rq_job) if rq_job.dependency: rq_job.dependency.delete() rq_job.delete() @@ -391,7 +393,7 @@ def upload_finished(self, request): return _import_project_dataset( request=request, filename=uploaded_file, - rq_id=f"import:dataset-for-project.id{self._object.pk}-by-{request.user}", + rq_id_template=self.IMPORT_RQ_ID_TEMPLATE, rq_func=dm.project.import_dataset_as_project, db_obj=self._object, format_name=format_name, @@ -700,6 +702,7 @@ class TaskViewSet(viewsets.GenericViewSet, mixins.ListModelMixin, ordering_fields = list(filter_fields) ordering = "-id" iam_organization_field = 'organization' + IMPORT_RQ_ID_TEMPLATE = settings.COMMON_IMPORT_RQ_ID_TEMPLATE.format('task', {}, 'annotations', {}) def get_serializer_class(self): if self.request.method in SAFE_METHODS: @@ -819,8 +822,7 @@ def upload_finished(self, request): return _import_annotations( request=request, filename=annotation_file, - rq_id=(f"import:annotations-for-task.id{self._object.pk}-" - f"in-{format_name.replace(' ', '_')}-by-{request.user}"), + rq_id_template=self.IMPORT_RQ_ID_TEMPLATE, rq_func=dm.task.import_task_annotations, db_obj=self._object, format_name=format_name, @@ -1054,7 +1056,7 @@ def annotations(self, request, pk): db_obj=self._object, import_func=_import_annotations, rq_func=dm.task.import_task_annotations, - rq_id = f"import:annotations-for-task.id{pk}-in-{format_name.replace(' ', '_')}-by-{request.user}" + rq_id_template=self.IMPORT_RQ_ID_TEMPLATE ) elif request.method == 'PUT': format_name = request.query_params.get('format', '') @@ -1067,7 +1069,7 @@ def annotations(self, request, pk): ) return _import_annotations( request=request, - rq_id = f"import:annotations-for-task.id{pk}-in-{format_name.replace(' ', '_')}-by-{request.user}", + rq_id_template=self.IMPORT_RQ_ID_TEMPLATE, rq_func=dm.task.import_task_annotations, db_obj=self._object, format_name=format_name, @@ -1300,6 +1302,7 @@ class JobViewSet(viewsets.GenericViewSet, mixins.ListModelMixin, 'project_name': 'segment__task__project__name', 'assignee': 'assignee__username' } + IMPORT_RQ_ID_TEMPLATE = settings.COMMON_IMPORT_RQ_ID_TEMPLATE.format('job', {}, 'annotations', {}) def get_queryset(self): queryset = super().get_queryset() @@ -1332,8 +1335,7 @@ def upload_finished(self, request): return _import_annotations( request=request, filename=annotation_file, - rq_id=(f"import:annotations-for-job.id{self._object.pk}-" - f"in-{format_name.replace(' ', '_')}-by-{request.user}"), + rq_id_template=self.IMPORT_RQ_ID_TEMPLATE, rq_func=dm.task.import_job_annotations, db_obj=self._object, format_name=format_name, @@ -1440,13 +1442,13 @@ def annotations(self, request, pk): elif request.method == 'POST' or request.method == 'OPTIONS': format_name = request.query_params.get('format', '') + # TODO: fix return self.import_annotations( request=request, db_obj=self._object.segment.task, import_func=_import_annotations, rq_func=dm.task.import_job_annotations, - rq_id=(f"import:annotations-for-job.id{self._object.pk}-" - f"in-{format_name.replace(' ', '_')}-by-{request.user}"), + rq_id_template=self.IMPORT_RQ_ID_TEMPLATE ) elif request.method == 'PUT': @@ -1460,8 +1462,7 @@ def annotations(self, request, pk): ) return _import_annotations( request=request, - rq_id=(f"import:annotations-for-job.id{pk}-" - f"in-{format_name.replace(' ', '_')}-by-{request.user}"), + rq_id_template=self.IMPORT_RQ_ID_TEMPLATE, rq_func=dm.task.import_job_annotations, db_obj=self._object, format_name=format_name, @@ -2243,7 +2244,7 @@ def _download_file_from_bucket(db_storage, filename, key): with open(filename, 'wb+') as f: f.write(data.getbuffer()) -def _import_annotations(request, rq_id, rq_func, db_obj, format_name, +def _import_annotations(request, rq_id_template, rq_func, db_obj, format_name, filename=None, location_conf=None, conv_mask_to_poly=True): format_desc = {f.DISPLAY_NAME: f for f in dm.views.get_import_formats()}.get(format_name) @@ -2253,6 +2254,8 @@ def _import_annotations(request, rq_id, rq_func, db_obj, format_name, elif not format_desc.ENABLED: return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED) + rq_id = request.data.get('rq_id', rq_id_template.format(db_obj.pk, request.user)) + queue = django_rq.get_queue(settings.CVAT_QUEUES.IMPORT_DATA.value) rq_job = queue.fetch_job(rq_id) @@ -2301,18 +2304,15 @@ def _import_annotations(request, rq_id, rq_func, db_obj, format_name, } rq_job = queue.enqueue_call( func=rq_func, - args=(db_obj.pk, filename, format_name, conv_mask_to_poly), + args=(filename, db_obj.pk, format_name, conv_mask_to_poly), job_id=rq_id, depends_on=dependent_job, meta={**meta, **get_rq_job_meta(request=request, db_obj=db_obj)}, - on_success=handle_finished_or_failed_job, - on_failure=handle_finished_or_failed_job, result_ttl=IMPORT_CACHE_SUCCESS_TTL, failure_ttl=IMPORT_CACHE_FAILED_TTL ) else: if rq_job.is_finished: - handle_finished_or_failed_job(rq_job) rq_job.delete() return Response(status=status.HTTP_201_CREATED) elif rq_job.is_failed or \ @@ -2331,7 +2331,7 @@ def _import_annotations(request, rq_id, rq_func, db_obj, format_name, return Response(data=exc_info, status=status.HTTP_500_INTERNAL_SERVER_ERROR) - return Response(status=status.HTTP_202_ACCEPTED) + return Response({'rq_id': rq_id}, status=status.HTTP_202_ACCEPTED) def _export_annotations(db_instance, rq_id, request, format_name, action, callback, filename, location_conf): @@ -2432,7 +2432,7 @@ def _export_annotations(db_instance, rq_id, request, format_name, action, callba result_ttl=ttl, failure_ttl=ttl) return Response(status=status.HTTP_202_ACCEPTED) -def _import_project_dataset(request, rq_id, rq_func, db_obj, format_name, filename=None, conv_mask_to_poly=True, location_conf=None): +def _import_project_dataset(request, rq_id_template, rq_func, db_obj, format_name, filename=None, conv_mask_to_poly=True, location_conf=None): format_desc = {f.DISPLAY_NAME: f for f in dm.views.get_import_formats()}.get(format_name) if format_desc is None: @@ -2441,10 +2441,17 @@ def _import_project_dataset(request, rq_id, rq_func, db_obj, format_name, filena elif not format_desc.ENABLED: return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED) + rq_id = rq_id_template.format(db_obj.pk, request.user) + queue = django_rq.get_queue(settings.CVAT_QUEUES.IMPORT_DATA.value) rq_job = queue.fetch_job(rq_id) - if not rq_job: + if not rq_job or rq_job.is_finished or rq_job.is_failed: + if rq_job and (rq_job.is_finished or rq_job.is_failed): + # for some reason the previous job has not been deleted + # (e.g the user closed the browser tab when job has been created + # but no one requests for checking status were not made) + rq_job.delete() dependent_job = None location = location_conf.get('location') if location_conf else None if not filename and location != Location.CLOUD_STORAGE: @@ -2480,19 +2487,17 @@ def _import_project_dataset(request, rq_id, rq_func, db_obj, format_name, filena rq_job = queue.enqueue_call( func=rq_func, - args=(db_obj.pk, filename, format_name, conv_mask_to_poly), + args=(filename, db_obj.pk, format_name, conv_mask_to_poly), job_id=rq_id, meta={ 'tmp_file': filename, **get_rq_job_meta(request=request, db_obj=db_obj), }, depends_on=dependent_job, - on_success=handle_finished_or_failed_job, - on_failure=handle_finished_or_failed_job, result_ttl=IMPORT_CACHE_SUCCESS_TTL, failure_ttl=IMPORT_CACHE_FAILED_TTL ) else: return Response(status=status.HTTP_409_CONFLICT, data='Import job already exists') - return Response(status=status.HTTP_202_ACCEPTED) + return Response({'rq_id': rq_id}, status=status.HTTP_202_ACCEPTED) diff --git a/cvat/settings/base.py b/cvat/settings/base.py index 4d1d5611305..558dd043178 100644 --- a/cvat/settings/base.py +++ b/cvat/settings/base.py @@ -338,6 +338,8 @@ class CVAT_QUEUES(Enum): 'cvat.apps.events.handlers.handle_rq_exception', ] +# import:---by- +COMMON_IMPORT_RQ_ID_TEMPLATE="import:{}-{}-{}-by-{}" # JavaScript and CSS compression # https://django-compressor.readthedocs.io