Python 我如何配置Django视图类以将文件上载到某个文件夹?
我正在结合两个Django应用程序,Python 我如何配置Django视图类以将文件上载到某个文件夹?,python,django,upload,django-views,pythonanywhere,Python,Django,Upload,Django Views,Pythonanywhere,我正在结合两个Django应用程序,chunked_upload和filer,这样我就可以有一个文件系统,通过chunking上传大文件filer基于ajax\u upload,它规定了文件大小限制 我即将完成,但我仍然需要将我的chunked_upload文件定向到我当前所在的文件夹中(目前,所有文件都上载到unsorted uploads,无论我所在的文件夹url如何) 由于chunked\u uploadupload按钮是基于ajax/javascript的,因此我必须参考我的Django
chunked_upload
和filer
,这样我就可以有一个文件系统,通过chunking上传大文件filer
基于ajax\u upload
,它规定了文件大小限制
我即将完成,但我仍然需要将我的chunked_upload
文件定向到我当前所在的文件夹中(目前,所有文件都上载到unsorted uploads
,无论我所在的文件夹url如何)
由于chunked\u upload
upload按钮是基于ajax/javascript的,因此我必须参考我的DjangoURL
,然后才能访问视图。有没有办法将旧的基于ajax的上传按钮从文件管理器
转换为新的基于django的上传按钮从分块上传
,以引用文件夹id
?(或者基本上将filer-ajax\u upload
转换为api\u chunked\u upload
,如我的URL中所示)
分块上传.views.py(新上传视图所在位置)
class ChunkedUploadView(ChunkedUploadBaseView):
"""
以多个块上载大型文件。此外,还可以恢复
如果上传被中断。
"""
字段名称='文件'
content\u range\u头='HTTP\u content\u range'
内容\范围\模式=重新编译(
r'^bytes(?P\d+)-(?P\d+)/(?P\d+)$'
)
max_bytes=max_bytes#可以上载的最大数据量
#如果“fail\u If\u no\u header”为True,则如果
#找不到内容范围标头。默认值为False以匹配Jquery文件
#上载行为(如果文件小于块,则不发送头)
如果\u无\u头=False,则失败
def获取额外属性(自我、请求):
"""
要传递给新ChunkedUpload实例的额外属性值。
应返回类似于字典的对象。
"""
返回{}
def get_max_字节(自身、请求):
"""
用于限制可上载的最大数据量。`None`表示
没有限制。
您可以覆盖此选项以自定义“最大字节数”,例如基于
登录用户。
"""
返回self.max_字节
def create_chunked_upload(self,save=False,**attrs):
"""
创建新的分块上载实例。如果未指定“upload\u id”,则调用该实例
在POST数据中找到。
"""
分块上传=self.model(**attrs)
#文件开始为空
chunked_upload.file.save(name=MyChunkedUpload.filename,content=ContentFile(“”),save=save)
返回分块上传
def是否有效?分块上传(自我,分块上传):
"""
检查分块上载是否已过期或已完成。
"""
如果chunked_upload.expired:
提升ChunkedUploadError(status=http\u status.http\u 410\u),
详细信息=“上载已过期”)
错误消息='上载已标记为“%s”'
如果chunked_upload.status==完成:
引发ChunkedUploadError(status=http\u status.http\u 400\u BAD\u请求,
详细信息=错误消息%“完成”)
def get_response_数据(自我、分块上传、请求):
"""
响应的数据。应返回类似字典的对象。
"""
返回{
“upload\u id”:分块的\u upload.upload\u id,
}
def_post(self、request、*args、**kwargs):
chunk=request.FILES.get(self.field\u name)
如果chunk为None:
引发ChunkedUploadError(status=http\u status.http\u 400\u BAD\u请求,
detail='没有提交区块文件')
自我验证(请求)
upload\u id=request.POST.get('upload\u id')
如果要上传您的id:
分块上传=获取对象或404(self.get\u queryset(请求),
upload\u id=upload\u id)
self.u有效吗?分块上传(分块上传)
其他:
attrs={'filename':chunk.name}
如果hasattr(请求,'user')已通过身份验证(请求.用户):
attrs['user']=request.user
属性更新(自我获取额外属性(请求))
chunked_upload=self.create_chunked_upload(save=False,**attrs)
content\u range=request.META.get(self.content\u range\u头“”)
match=self.content\u range\u pattern.match(content\u range)
如果匹配:
start=int(match.group('start'))
end=int(match.group('end'))
total=int(match.group('total'))
如果没有标题,则elif self.fail\u:
引发ChunkedUploadError(status=http\u status.http\u 400\u BAD\u请求,
detail='请求头中有错误')
其他:
#如果未提供HTTP\u内容\u范围,请使用整个大小
开始=0
end=chunk.size-1
总计=chunk.size
区块大小=结束-开始+1
max_bytes=self.get_max_bytes(请求)
如果“最大字节数”不是“无”,且总计>最大字节数:
提升块式装载机(
status=http\u status.http\u 400\u错误请求,
detail='file的大小超过了限制(%s字节)'%max\u字节
)
如果分块_upload.offset!=开始:
引发ChunkedUploadError(status=http\u status.http\u 400\u BAD\u请求,
详细信息=“偏移量不匹配”,
偏移量=分块(上传。偏移量)
如果chunk.size!=块大小:
引发ChunkedUploadError(status=http\u status.http\u 400\u BAD\u请求,
detail=“文件大小与标题不匹配”)
分块上传。追加分块(分块,分块大小=分块大小,保存=False)
自我保存(分块上传)
返回响应(自获取响应数据(分块上传、请求),
状态=http_
class ChunkedUploadView(ChunkedUploadBaseView):
"""
Uploads large files in multiple chunks. Also, has the ability to resume
if the upload is interrupted.
"""
field_name = 'file'
content_range_header = 'HTTP_CONTENT_RANGE'
content_range_pattern = re.compile(
r'^bytes (?P<start>\d+)-(?P<end>\d+)/(?P<total>\d+)$'
)
max_bytes = MAX_BYTES # Max amount of data that can be uploaded
# If `fail_if_no_header` is True, an exception will be raised if the
# content-range header is not found. Default is False to match Jquery File
# Upload behavior (doesn't send header if the file is smaller than chunk)
fail_if_no_header = False
def get_extra_attrs(self, request):
"""
Extra attribute values to be passed to the new ChunkedUpload instance.
Should return a dictionary-like object.
"""
return {}
def get_max_bytes(self, request):
"""
Used to limit the max amount of data that can be uploaded. `None` means
no limit.
You can override this to have a custom `max_bytes`, e.g. based on
logged user.
"""
return self.max_bytes
def create_chunked_upload(self, save=False, **attrs):
"""
Creates new chunked upload instance. Called if no 'upload_id' is
found in the POST data.
"""
chunked_upload = self.model(**attrs)
# file starts empty
chunked_upload.file.save(name= MyChunkedUpload.filename, content=ContentFile(''), save=save)
return chunked_upload
def is_valid_chunked_upload(self, chunked_upload):
"""
Check if chunked upload has already expired or is already complete.
"""
if chunked_upload.expired:
raise ChunkedUploadError(status=http_status.HTTP_410_GONE,
detail='Upload has expired')
error_msg = 'Upload has already been marked as "%s"'
if chunked_upload.status == COMPLETE:
raise ChunkedUploadError(status=http_status.HTTP_400_BAD_REQUEST,
detail=error_msg % 'complete')
def get_response_data(self, chunked_upload, request):
"""
Data for the response. Should return a dictionary-like object.
"""
return {
'upload_id': chunked_upload.upload_id,
}
def _post(self, request, *args, **kwargs):
chunk = request.FILES.get(self.field_name)
if chunk is None:
raise ChunkedUploadError(status=http_status.HTTP_400_BAD_REQUEST,
detail='No chunk file was submitted')
self.validate(request)
upload_id = request.POST.get('upload_id')
if upload_id:
chunked_upload = get_object_or_404(self.get_queryset(request),
upload_id=upload_id)
self.is_valid_chunked_upload(chunked_upload)
else:
attrs = {'filename': chunk.name}
if hasattr(request, 'user') and is_authenticated(request.user):
attrs['user'] = request.user
attrs.update(self.get_extra_attrs(request))
chunked_upload = self.create_chunked_upload(save=False, **attrs)
content_range = request.META.get(self.content_range_header, '')
match = self.content_range_pattern.match(content_range)
if match:
start = int(match.group('start'))
end = int(match.group('end'))
total = int(match.group('total'))
elif self.fail_if_no_header:
raise ChunkedUploadError(status=http_status.HTTP_400_BAD_REQUEST,
detail='Error in request headers')
else:
# Use the whole size when HTTP_CONTENT_RANGE is not provided
start = 0
end = chunk.size - 1
total = chunk.size
chunk_size = end - start + 1
max_bytes = self.get_max_bytes(request)
if max_bytes is not None and total > max_bytes:
raise ChunkedUploadError(
status=http_status.HTTP_400_BAD_REQUEST,
detail='Size of file exceeds the limit (%s bytes)' % max_bytes
)
if chunked_upload.offset != start:
raise ChunkedUploadError(status=http_status.HTTP_400_BAD_REQUEST,
detail='Offsets do not match',
offset=chunked_upload.offset)
if chunk.size != chunk_size:
raise ChunkedUploadError(status=http_status.HTTP_400_BAD_REQUEST,
detail="File size doesn't match headers")
chunked_upload.append_chunk(chunk, chunk_size=chunk_size, save=False)
self._save(chunked_upload)
return Response(self.get_response_data(chunked_upload, request),
status=http_status.HTTP_200_OK)
def get_urls(self):
return [
url(r'^operations/upload/(?P<folder_id>[0-9]+)/$',
ajax_upload,
name='filer-ajax_upload'),
url(r'^operations/upload/no_folder/$',
ajax_upload,
name='filer-ajax_upload'),
] + super(ClipboardAdmin, self).get_urls()
@csrf_exempt
def ajax_upload(request, folder_id=None):
"""
Receives an upload from the uploader. Receives only one file at a time.
"""
folder = None
if folder_id:
try:
# Get folder
folder = MyChunkedFolder.objects.get(pk=folder_id)
except MyChunkedFolder.DoesNotExist:
return JsonResponse({'error': NO_FOLDER_ERROR})
# check permissions
if folder and not folder.has_add_children_permission(request):
return JsonResponse({'error': NO_PERMISSIONS_FOR_FOLDER})
try:
if len(request.FILES) == 1:
# dont check if request is ajax or not, just grab the file
upload, filename, is_raw = handle_request_files_upload(request)
else:
# else process the request as usual
upload, filename, is_raw = handle_upload(request)
# TODO: Deprecated/refactor
# Get clipboad
# clipboard = Clipboard.objects.get_or_create(user=request.user)[0]
# find the file type
for filer_class in filer_settings.FILER_FILE_MODELS:
FileSubClass = load_model(filer_class)
# TODO: What if there are more than one that qualify?
if FileSubClass.matches_file_type(filename, upload, request):
FileForm = modelform_factory(
model=FileSubClass,
fields=('original_filename', 'owner', 'file')
)
break
uploadform = FileForm({'original_filename': filename,
'owner': request.user.pk},
{'file': upload})
if uploadform.is_valid():
file_obj = uploadform.save(commit=False)
# Enforce the FILER_IS_PUBLIC_DEFAULT
file_obj.is_public = filer_settings.FILER_IS_PUBLIC_DEFAULT
file_obj.folder = folder
file_obj.save()
# TODO: Deprecated/refactor
# clipboard_item = ClipboardItem(
# clipboard=clipboard, file=file_obj)
# clipboard_item.save()
# Try to generate thumbnails.
if not file_obj.icons:
# There is no point to continue, as we can't generate
# thumbnails for this file. Usual reasons: bad format or
# filename.
file_obj.delete()
# This would be logged in BaseImage._generate_thumbnails()
# if FILER_ENABLE_LOGGING is on.
return JsonResponse(
{'error': 'failed to generate icons for file'},
status=500,
)
thumbnail = None
# Backwards compatibility: try to get specific icon size (32px)
# first. Then try medium icon size (they are already sorted),
# fallback to the first (smallest) configured icon.
for size in (['32']
+ filer_settings.FILER_ADMIN_ICON_SIZES[1::-1]):
try:
thumbnail = file_obj.icons[size]
break
except KeyError:
continue
data = {
'thumbnail': thumbnail,
'alt_text': '',
'label': str(file_obj),
'file_id': file_obj.pk,
}
# prepare preview thumbnail
if type(file_obj) == Image:
thumbnail_180_options = {
'size': (180, 180),
'crop': True,
'upscale': True,
}
thumbnail_180 = file_obj.file.get_thumbnail(
thumbnail_180_options)
data['thumbnail_180'] = thumbnail_180.url
data['original_image'] = file_obj.url
return JsonResponse(data)
else:
form_errors = '; '.join(['%s: %s' % (
field,
', '.join(errors)) for field, errors in list(
uploadform.errors.items())
])
raise UploadException(
"AJAX request not valid: form invalid '%s'" % (
form_errors,))
except UploadException as e:
return JsonResponse({'error': str(e)}, status=500)
{% block object-tools-items %}
<div class="navigator-button-wrapper">
{% if folder.can_have_subfolders and can_make_folder %}
<a href="{% url 'admin:filer-directory_listing-make_root_folder' %}?parent_id={{ folder.id }}{% if is_popup %}&_popup=1{% endif %}"
title="{% trans 'Adds a new Folder' %}"
class="navigator-button"
onclick="return showAddAnotherPopup(this);">
{% trans "New Folder" %}
</a>
{% endif %}
{% if permissions.has_add_children_permission and not folder.is_root %}
{% csrf_token %}
<input id="chunked_upload" type="file" name="the_file">
<p id="progress"></p>
<div id="messages"></div>
<script type="text/javascript">
var md5 = "",
csrf = $("input[name='csrfmiddlewaretoken']")[0].value,
form_data = [{"name": "csrfmiddlewaretoken", "value": csrf}];
function calculate_md5(file, chunk_size) {
var slice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice,
chunks = chunks = Math.ceil(file.size / chunk_size),
current_chunk = 0,
spark = new SparkMD5.ArrayBuffer();
function onload(e) {
spark.append(e.target.result); // append chunk
current_chunk++;
if (current_chunk < chunks) {
read_next_chunk();
} else {
md5 = spark.end();
}
};
function read_next_chunk() {
var reader = new FileReader();
reader.onload = onload;
var start = current_chunk * chunk_size,
end = Math.min(start + chunk_size, file.size);
reader.readAsArrayBuffer(slice.call(file, start, end));
};
read_next_chunk();
}
$("#chunked_upload").fileupload({
url: "{% url 'api_chunked_upload' folder_id=folder.id %}",
dataType: "json",
maxChunkSize: 100000, // Chunks of 100 kB
formData: form_data,
add: function(e, data) { // Called before starting upload
$("#messages").empty();
// If this is the second file you're uploading we need to remove the
// old upload_id and just keep the csrftoken (which is always first).
form_data.splice(1);
calculate_md5(data.files[0], 100000); // Again, chunks of 100 kB
data.submit();
},
chunkdone: function (e, data) { // Called after uploading each chunk
if (form_data.length < 2) {
form_data.push(
{"name": "upload_id", "value": data.result.upload_id}
);
}
$("#messages").append($('<p>').text(JSON.stringify(data.result)));
var progress = parseInt(data.loaded / data.total * 100.0, 10);
$("#progress").text(Array(progress).join("=") + "> " + progress + "%");
},
done: function (e, data) { // Called when the file has completely uploaded
$.ajax({
type: "POST",
url: "{% url 'api_chunked_upload_complete' %}",
data: {
csrfmiddlewaretoken: csrf,
upload_id: data.result.upload_id,
md5: md5
},
dataType: "json",
success: function(data) {
$("#messages").append($('<p>').text(JSON.stringify(data)));
}
});
},
});
</script>
{% elif folder.is_unsorted_uploads %}
{% csrf_token %}
<input id="chunked_upload" type="file" name="the_file">
<p id="progress"></p>
<div id="messages"></div>
<script type="text/javascript">
var md5 = "",
csrf = $("input[name='csrfmiddlewaretoken']")[0].value,
form_data = [{ "name": "csrfmiddlewaretoken", "value": csrf }];
function calculate_md5(file, chunk_size) {
var slice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice,
chunks = chunks = Math.ceil(file.size / chunk_size),
current_chunk = 0,
spark = new SparkMD5.ArrayBuffer();
function onload(e) {
spark.append(e.target.result); // append chunk
current_chunk++;
if (current_chunk < chunks) {
read_next_chunk();
} else {
md5 = spark.end();
}
};
function read_next_chunk() {
var reader = new FileReader();
reader.onload = onload;
var start = current_chunk * chunk_size,
end = Math.min(start + chunk_size, file.size);
reader.readAsArrayBuffer(slice.call(file, start, end));
};
read_next_chunk();
}
$("#chunked_upload").fileupload({
url: "{% url 'api_chunked_upload' %}",
dataType: "json",
maxChunkSize: 100000, // Chunks of 100 kB
formData: form_data,
add: function (e, data) { // Called before starting upload
$("#messages").empty();
// If this is the second file you're uploading we need to remove the
// old upload_id and just keep the csrftoken (which is always first).
form_data.splice(1);
calculate_md5(data.files[0], 100000); // Again, chunks of 100 kB
data.submit();
},
chunkdone: function (e, data) { // Called after uploading each chunk
if (form_data.length < 2) {
form_data.push(
{ "name": "upload_id", "value": data.result.upload_id }
);
}
$("#messages").append($('<p>').text(JSON.stringify(data.result)));
var progress = parseInt(data.loaded / data.total * 100.0, 10);
$("#progress").text(Array(progress).join("") + "> " + progress + "%");
},
done: function (e, data) { // Called when the file has completely uploaded
$.ajax({
type: "POST",
url: "{% url 'api_chunked_upload_complete' %}",
data: {
csrfmiddlewaretoken: csrf,
upload_id: data.result.upload_id,
md5: md5
},
dataType: "json",
success: function (data) {
$("#messages").append($('<p>').text(JSON.stringify(data)));
}
});
},
});
</script>
{% endif %}
urlpatterns = [
url(r'^operations/upload/(?P<folder_id>[0-9]+)/$', MyChunkedUploadView.as_view(), name='api_chunked_upload'),
url(r'^operations/upload/no_folder/$', MyChunkedUploadView.as_view(), name='api_chunked_upload'),
]