I am having trouble deleting a file from S3 using Fineuploader and Django/boto. I am able to successfully upload files to S3 with Fineuploader, and retrieve and display the image url, but deleting hasn't been successful.
From looking at the boto debug logs, it looks like boto is not sending the token as part of the request to S3 and I think that may be my problem.
First I have the boto debug output because I suspect someone more familiar with it can help after just looking at it, but I have my full setup afterwards (which follows the example at https://github.com/Widen/fine-uploader-server/blob/master/python/django-fine-uploader-s3/ as closely as possible)
terminal output on delete
bucket_name: XXXXXXXX
key_name: b45069b8-dc44-45fe-8b67-b25fc088bdea.jpg
aws_bucket: <Bucket: XXXXXXXXX>
aws_key: <Key: XXXXXXXX,b45069b8-dc44-45fe-8b67-b25fc088bdea.jpg>
2014-04-17 15:01:56,576 boto [DEBUG]:path=/b45069b8-dc44-45fe-8b67-b25fc088bdea.jpg
2014-04-17 15:01:56,577 boto [DEBUG]:auth_path=/thisorthis/b45069b8-dc44-45fe-8b67-b25fc088bdea.jpg
2014-04-17 15:01:56,577 boto [DEBUG]:Method: DELETE
2014-04-17 15:01:56,577 boto [DEBUG]:Path: /b45069b8-dc44-45fe-8b67-b25fc088bdea.jpg
2014-04-17 15:01:56,577 boto [DEBUG]:Data:
2014-04-17 15:01:56,577 boto [DEBUG]:Headers: {}
2014-04-17 15:01:56,577 boto [DEBUG]:Host: XXXXXXX.s3.amazonaws.com
2014-04-17 15:01:56,578 boto [DEBUG]:Port: 443
2014-04-17 15:01:56,578 boto [DEBUG]:Params: {}
2014-04-17 15:01:56,578 boto [DEBUG]:establishing HTTPS connection: host=thisorthis.s3.amazonaws.com, kwargs={'port': 443, 'timeout': 70}
2014-04-17 15:01:56,578 boto [DEBUG]:Token: None
2014-04-17 15:01:56,578 boto [DEBUG]:StringToSign:
DELETE
Thu, 17 Apr 2014 15:01:56 GMT
/XXXXXXXX/b45069b8-dc44-45fe-8b67-b25fc088bdea.jpg
2014-04-17 15:01:56,579 boto [DEBUG]:Signature:
AWS AKIAJYS27FQSNHPH3CXQ:dVKlBpulsY9LrOtHOa+xQmurIEM=
[17/Apr/2014 15:01:57] "DELETE /s3/delete/b45069b8-dc44-45fe-8b67-b25fc088bdea?key=b45069b8-dc44-45fe-8b67-b25fc088bdea.jpg&bucket=XXXXXXXX HTTP/1.1" 500 15975
settings.py:
AWS_CLIENT_SECRET_KEY = os.getenv("AWS_CLIENT_SECRET_KEY")
AWS_SERVER_PUBLIC_KEY = os.getenv("AWS_SERVER_PUBLIC_KEY")
AWS_SERVER_SECRET_KEY = os.getenv("AWS_SERVER_SECRET_KEY")
AWS_EXPECTED_BUCKET = 'mybucketname'
AWS_MAX_SIZE = 15000000
Obviously I have my actual bucket name there, as I said uploading is working so I don't think the issue is in the settings.
Fineuploader Instance
$("#fine-uploader).fineUploaderS3({
debug: true,
request: {
endpoint: 'XXXXX',
accessKey: 'XXXXXXXX'
},
template: "simple-previews-template",
signature: {
endpoint: '/s3/signature/'
},
uploadSuccess: {
endpoint: '/s3/success/'
},
iframeSupport: {
localBlankPagePath: '/success.html'
},
deleteFile: {
enabled: true,
endpoint: '/s3/delete/'
},
classes: {
dropActive: "cssClassToAddToDropZoneOnEnter"
},
})
urls.py
url(r'^s3/signature/', views.handle_s3, name="s3_signee"),
url(r'^s3/delete/', views.handle_s3, name='s3_delete'),
url(r'^s3/success/', views.success_redirect_endpoint, name="s3_succes_endpoint")
views.py
try:
import boto
from boto.s3.connection import Key, S3Connection
boto.set_stream_logger('boto')
S3 = S3Connection(development.AWS_SERVER_PUBLIC_KEY, development.AWS_SERVER_SECRET_KEY)
except ImportError, e:
print("Could not import boto, the Amazon SDK for Python.")
print("Deleting files will not work.")
print("Install boto with")
print("$ pip install boto")
@csrf_exempt
def success_redirect_endpoint(request):
""" This is where the upload will snd a POST request after the
file has been stored in S3.
"""
key = request.POST.get('key')
response = {}
response['url'] = key
return HttpResponse(json.dumps(response), content_type="application/json")
@csrf_exempt
def handle_s3(request):
""" View which handles all POST and DELETE requests sent by Fine Uploader
S3. You will need to adjust these paths/conditions based on your setup.
"""
if request.method == "POST":
return handle_POST(request)
elif request.method == "DELETE":
return handle_DELETE(request)
else:
return HttpResponse(status=405)
def handle_POST(request):
""" Handle S3 uploader POST requests here. For files <=5MiB this is a simple
request to sign the policy document. For files >5MiB this is a request
to sign the headers to start a multipart encoded request.
"""
if request.POST.get('success', None):
return make_response(200)
else:
request_payload = json.loads(request.body)
headers = request_payload.get('headers', None)
if headers:
print "headers"
# The presence of the 'headers' property in the request payload
# means this is a request to sign a REST/multipart request
# and NOT a policy document
response_data = sign_headers(headers)
else:
print "no headers"
if not is_valid_policy(request_payload):
print "is not valid"
return make_response(400, {'invalid': True})
response_data = sign_policy_document(request_payload)
response_payload = json.dumps(response_data)
return make_response(200, response_payload)
def handle_DELETE(request):
""" Handle file deletion requests. For this, we use the Amazon Python SDK,
boto.
"""
print "handle delete"
if boto:
bucket_name = request.REQUEST.get('bucket')
print "bucket_name: ", bucket_name
key_name = request.REQUEST.get('key')
print "key_name:", key_name
aws_bucket = S3.get_bucket(bucket_name, validate=False)
print "aws_bucket: ", aws_bucket
aws_key = Key(aws_bucket, key_name)
print "aws_key: ", aws_key
aws_key.delete()
print "after aws_key.delete()"
return make_response(200)
else:
return make_response(500)
def make_response(status=200, content=None):
""" Construct an HTTP response. Fine Uploader expects 'application/json'.
"""
response = HttpResponse()
response.status_code = status
response['Content-Type'] = "application/json"
response.content = content
return response
def is_valid_policy(policy_document):
""" Verify the policy document has not been tampered with client-side
before sending it off.
"""
bucket = development.AWS_EXPECTED_BUCKET
parsed_max_size = development.AWS_MAX_SIZE
print "check validity"
# bucket = ''
# parsed_max_size = 0
for condition in policy_document['conditions']:
if isinstance(condition, list) and condition[0] == 'content-length-range':
parsed_max_size = condition[2]
else:
if condition.get('bucket', None):
bucket = condition['bucket']
return bucket == development.AWS_EXPECTED_BUCKET and parsed_max_size == development.AWS_MAX_SIZE
def sign_policy_document(policy_document):
""" Sign and return the policy doucument for a simple upload.
http://aws.amazon.com/articles/1434/#signyours3postform
"""
policy = base64.b64encode(json.dumps(policy_document))
signature = base64.b64encode(hmac.new(development.AWS_CLIENT_SECRET_KEY, policy, hashlib.sha1).digest())
return {
'policy': policy,
'signature': signature
}
def sign_headers(headers):
""" Sign and return the headers for a chunked upload. """
print "sign headers"
return {
'signature': base64.b64encode(hmac.new(development.AWS_CLIENT_SECRET_KEY, headers, hashlib.sha1).digest())
}
It turned out I had not properly configured my S3 bucket policy to also allow DELETE requests from my server, hence the bucket logs were showing an error 204. I had allowed PUT and GET requests, hence uploading and retrieval worked, but not DELETE. I changed my bucket policy to be more like:
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": ["s3:ListAllMyBuckets"],
"Resource": "arn:aws:s3:::*"
},
{
"Effect": "Allow",
"Action": [
"s3:ListBucket",
"s3:GetBucketLocation"
],
"Resource": "arn:aws:s3:::xxxxx"
},
{
"Effect": "Allow",
"Action": [
"s3:PutObject",
"s3:GetObject",
"s3:DeleteObject"
],
"Resource": "arn:aws:s3:::xxxxx/*"
}
]
}
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With