Skip to content

Commit

Permalink
Duplicate uploads and invert the editor zoom buttons
Browse files Browse the repository at this point in the history
  • Loading branch information
jcougnaud committed Mar 6, 2024
1 parent cb751e9 commit 425fbf7
Show file tree
Hide file tree
Showing 25 changed files with 2,675 additions and 1,412 deletions.
8 changes: 4 additions & 4 deletions frontend/components/ToolbarZoom.vue
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,9 @@
<button
class="btn btn-default"
type="button"
@click="$emit('onZoomIn')"
@click="$emit('onZoomOut')"
>
<span class="fas fa-magnifying-glass-plus" />
<span class="fas fa-magnifying-glass-minus" />
</button>
<div
class="btn-group dropup"
Expand Down Expand Up @@ -66,9 +66,9 @@
<button
class="btn btn-default"
type="button"
@click="$emit('onZoomOut')"
@click="$emit('onZoomIn')"
>
<span class="fas fa-magnifying-glass-minus" />
<span class="fas fa-magnifying-glass-plus" />
</button>
</div>
</template>
Expand Down
11 changes: 10 additions & 1 deletion osis_document/api/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def get_schema(self, *args, **kwargs):
schema = super().get_schema(*args, **kwargs)
schema["openapi"] = "3.0.0"
schema["info"]["title"] = "OSIS Document Service"
schema["info"]["version"] = "1.0.5"
schema["info"]["version"] = "1.0.6"
schema["info"]["description"] = "A set of API endpoints that allow you to get information about uploads"
schema["servers"] = [
{
Expand Down Expand Up @@ -87,6 +87,15 @@ def get_schema(self, *args, **kwargs):
},
},
}
schema["components"]["schemas"]["Upload"] = {
"type": "object",
"properties": {
"upload_id": {
"type": "string",
"format": "uuid",
},
}
}
for path, path_content in schema['paths'].items():
for method, method_content in path_content.items():
method_content['responses'].update(
Expand Down
23 changes: 23 additions & 0 deletions osis_document/api/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -248,3 +248,26 @@ class DeclareFilesAsDeletedSerializer(serializers.Serializer):
help_text="A list of files UUID",
required=True,
)


class UploadDuplicationSerializer(serializers.Serializer):
uuids = serializers.ListField(
help_text="The list of the uuids of the documents to duplicate.",
required=True,
)
with_modified_upload = serializers.BooleanField(
help_text=(
"Boolean that defines if the duplication is also necessary for the modified version of the files. Note "
"that the uuids of the modified uploads don't must be passed and the duplicated ones are not returned."
),
required=False,
default=False,
)
upload_path_by_uuid = serializers.DictField(
required=False,
help_text=(
"A dictionary associating for each uuid, where the duplicated file should be saved. If the path is not "
"specified for one file, the duplicated file will be saved in the same location as the original file."
),
default=dict,
)
106 changes: 83 additions & 23 deletions osis_document/api/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
# see http://www.gnu.org/licenses/.
#
# ##############################################################################
from typing import Union, List, Dict, Iterable
from typing import Union, List, Dict, Iterable, Optional
from urllib.parse import urlparse
from uuid import UUID

Expand All @@ -36,11 +36,10 @@
from rest_framework.views import APIView




def get_remote_metadata(token: str) -> Union[dict, None]:
"""Given a token, return the remote metadata."""
import requests

url = "{}metadata/{}".format(settings.OSIS_DOCUMENT_BASE_URL, token)
try:
response = requests.get(url)
Expand All @@ -54,6 +53,7 @@ def get_remote_metadata(token: str) -> Union[dict, None]:
def get_several_remote_metadata(tokens: List[str]) -> Dict[str, dict]:
"""Given a list of tokens, return a dictionary associating each token to upload metadata."""
import requests

url = "{}metadata".format(settings.OSIS_DOCUMENT_BASE_URL)
try:
response = requests.post(
Expand Down Expand Up @@ -82,14 +82,21 @@ def get_raw_content_remotely(token: str):
return None


def get_remote_token(uuid: Union[str, UUID], write_token: bool = False, wanted_post_process: str = None, custom_ttl=None, for_modified_upload: bool = False):
def get_remote_token(
uuid: Union[str, UUID],
write_token: bool = False,
wanted_post_process: str = None,
custom_ttl=None,
for_modified_upload: bool = False,
):
"""
Given an uuid, return a writing or reading remote token.
The custom_ttl parameter is used to define the validity period of the token
The wanted_post_process parameter is used to specify which post-processing action you want the output files for
(example : PostProcessingWanted.CONVERT.name)
"""
import requests

is_valid_uuid = stringify_uuid_and_check_uuid_validity(uuid_input=uuid)
if not is_valid_uuid.get('uuid_valid'):
return None
Expand All @@ -115,8 +122,8 @@ def get_remote_token(uuid: Union[str, UUID], write_token: bool = False, wanted_p
return UploadInvalidException.__class__.__name__
json = response.json()
if (
response.status_code == status.HTTP_500_INTERNAL_SERVER_ERROR
and json.get('detail', '') == FileInfectedException.default_detail
response.status_code == status.HTTP_500_INTERNAL_SERVER_ERROR
and json.get('detail', '') == FileInfectedException.default_detail
):
return FileInfectedException.__class__.__name__
return json.get('token') or json
Expand Down Expand Up @@ -159,6 +166,61 @@ def get_remote_tokens(uuids: List[str], wanted_post_process=None, custom_ttl=Non
return {}


def documents_remote_duplicate(
uuids: List[str],
with_modified_upload: bool = False,
upload_path_by_uuid: Optional[Dict[str, str]] = None,
) -> Dict[str, str]:
"""
Duplicate a list of documents.
uuids: List of uuids of the documents to duplicate.
with_modified_upload: boolean to know if the duplication is also necessary for the modified uploads. Note that
the uuids of the modified uploads don't must be passed to the API and the duplicated ones are not returned (only
the original uuids must be used).
upload_path_by_uuid: dict {uuid: str} to specify for each uuid, where the duplicated file should be saved. If not
specified for one file, the duplicated file will be saved in the same location as the original file.
:return: dict {uuid: uuid} A dictionary associating each document uuid with the uuid of the duplicated document. If
an error occurs for one specific document, the uuid of this document is not returned.
"""
import requests

validated_uuids = []

# Check the validity of the uuids
for document_uuid in uuids:
is_valid_uuid = stringify_uuid_and_check_uuid_validity(uuid_input=document_uuid)
if is_valid_uuid.get('uuid_valid'):
validated_uuids.append(is_valid_uuid.get('uuid_stringify'))

if len(uuids) != len(validated_uuids):
raise TypeError

url = "{base_url}duplicate".format(base_url=settings.OSIS_DOCUMENT_BASE_URL)

try:
response = requests.post(
url,
json={
'uuids': validated_uuids,
'with_modified_upload': with_modified_upload,
'upload_path_by_uuid': upload_path_by_uuid,
},
headers={'X-Api-Key': settings.OSIS_DOCUMENT_API_SHARED_SECRET},
)

if response.status_code == status.HTTP_201_CREATED:
return {
original_uuid: item['upload_id']
for original_uuid, item in response.json().items()
if 'upload_id' in item
}

except HTTPError:
pass

return {}


def confirm_remote_upload(
token,
upload_to=None,
Expand Down Expand Up @@ -197,19 +259,20 @@ def confirm_remote_upload(


def launch_post_processing(
uuid_list: List,
async_post_processing: bool,
post_processing_types: List[str],
post_process_params: Dict[str, Dict[str, str]]
uuid_list: List,
async_post_processing: bool,
post_processing_types: List[str],
post_process_params: Dict[str, Dict[str, str]],
):
import requests

url = "{}post-processing".format(settings.OSIS_DOCUMENT_BASE_URL)
data = {'async_post_processing': async_post_processing,
'post_process_types': post_processing_types,
'files_uuid': uuid_list,
'post_process_params': post_process_params
}
data = {
'async_post_processing': async_post_processing,
'post_process_types': post_processing_types,
'files_uuid': uuid_list,
'post_process_params': post_process_params,
}
response = requests.post(
url,
json=data,
Expand All @@ -218,9 +281,7 @@ def launch_post_processing(
return response.json() if not async_post_processing else response


def declare_remote_files_as_deleted(
uuid_list: Iterable[UUID]
):
def declare_remote_files_as_deleted(uuid_list: Iterable[UUID]):
import requests

url = "{}declare-files-as-deleted".format(settings.OSIS_DOCUMENT_BASE_URL)
Expand All @@ -239,15 +300,14 @@ def declare_remote_files_as_deleted(

def get_progress_async_post_processing(uuid: str, wanted_post_process: str = None):
"""Given an uuid and a type of post-processing,
returns an int corresponding to the post-processing progress percentage
The wanted_post_process parameter is used to specify the post-processing action you want to get progress to.
(example : PostProcessingType.CONVERT.name)
returns an int corresponding to the post-processing progress percentage
The wanted_post_process parameter is used to specify the post-processing action you want to get progress to.
(example : PostProcessingType.CONVERT.name)
"""
import requests

url = "{base_url}get-progress-async-post-processing/{uuid}".format(
base_url=settings.OSIS_DOCUMENT_BASE_URL,
uuid=uuid
base_url=settings.OSIS_DOCUMENT_BASE_URL, uuid=uuid
)
response = requests.post(
url,
Expand Down
2 changes: 2 additions & 0 deletions osis_document/api/views/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
from .security import DeclareFileAsInfectedView
from .token import GetTokenView, GetTokenListView
from .upload import ConfirmUploadView, RequestUploadView, DeclareFilesAsDeletedView
from .duplicate import UploadDuplicationView

__all__ = [
"RawFileView",
Expand All @@ -47,4 +48,5 @@
'PostProcessingView',
"SaveEditorView",
"GetProgressAsyncPostProcessingView",
"UploadDuplicationView",
]
Loading

0 comments on commit 425fbf7

Please sign in to comment.