Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix timeout problem on client for large chunked uploads. #92

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions LICENSE
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
BSD 3-Clause License

Copyright (c) 2018, Jeremy Cohen, Imperial College London
2023, Equinor ASA
All rights reserved.

Redistribution and use in source and binary forms, with or without
Expand Down
36 changes: 29 additions & 7 deletions django_drf_filepond/uploaders.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import logging
import os
import time

from django.core.files.uploadedfile import UploadedFile, InMemoryUploadedFile
from rest_framework import status
Expand Down Expand Up @@ -248,8 +249,18 @@ def _handle_chunk_upload(self, request, chunk_id):
try:
tuc = TemporaryUploadChunked.objects.get(upload_id=chunk_id)
except TemporaryUploadChunked.DoesNotExist:
return Response('Invalid chunk upload request data',
status=status.HTTP_400_BAD_REQUEST)
# If this is was a rather big file with many chunks, the putting
# the chunks together might take longer than the client's timeout
# allows and it will restart the last chunk. Hence we check here
# whether we now have a correct temporary upload with the correct id
# and return success in that case
temp_objects = TemporaryUpload.objects.filter(upload_id=chunk_id)
if len(temp_objects):
return Response(chunk_id, status=status.HTTP_200_OK,
content_type='text/plain')
else:
return Response('Invalid chunk upload request data',
status=status.HTTP_400_BAD_REQUEST)

# Get the required header information to handle the new data
uoffset = request.META.get('HTTP_UPLOAD_OFFSET', None)
Expand Down Expand Up @@ -281,11 +292,22 @@ def _handle_chunk_upload(self, request, chunk_id):
# Check that our recorded offset matches the offset provided by the
# client...if not, there's an error.
if not (int(uoffset) == tuc.offset):
LOG.error('Offset provided by client <%s> doesn\'t match the '
'stored offset <%s> for chunked upload id <%s>'
% (uoffset, tuc.offset, chunk_id))
return Response('ERROR: Chunked upload metadata is invalid.',
status=status.HTTP_400_BAD_REQUEST)
# We might get here, because ther server is still busy putting
# the chunks together and the client tries to reupload because
# of a timeout.
if tuc.upload_complete:
LOG.debug('Client is trying to reupload last chunk because '
' we are busy creating the resulting file from chunks.')
time.sleep(100)
return Response('ERROR: Still busy creating resulting file from chunks.'
' Please retry later.',
status=status.HTTP_408_REQUEST_TIMEOUT)
else:
LOG.error('Offset provided by client <%s> doesn\'t match the '
'stored offset <%s> for chunked upload id <%s>'
% (uoffset, tuc.offset, chunk_id))
return Response('ERROR: Chunked upload metadata is invalid.',
status=status.HTTP_400_BAD_REQUEST)

file_data_len = len(file_data)
LOG.debug('Got data from request with length %s bytes'
Expand Down