Skip to content

Commit

Permalink
Add tests for saving both File/ContentFile with string/bytes.
Browse files Browse the repository at this point in the history
Add these tests in a new test class that uses moto. Remove old test for saving ContentFile
Move test for detecting content-type to this new class. Add some more tests around this.
Fix tests that fail because settings.AWS_STORAGE_BUCKET_NAME is now defined.
Fix tests that fail because content is always wrapped.
Fix test for gzipped file since that now only takes bytes.
  • Loading branch information
LincolnPuzey committed Sep 19, 2020
1 parent 7699236 commit ca9c466
Show file tree
Hide file tree
Showing 2 changed files with 111 additions and 42 deletions.
5 changes: 5 additions & 0 deletions tests/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,3 +10,8 @@
SECRET_KEY = 'hailthesunshine'

USE_TZ = True

# the following test settings are required for moto to work.
AWS_STORAGE_BUCKET_NAME = "test_bucket"
AWS_ACCESS_KEY_ID = "testing_key_id"
AWS_SECRET_ACCESS_KEY = "testing_access_key"
148 changes: 106 additions & 42 deletions tests/test_s3boto3.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,19 @@
import pickle
import threading
from datetime import datetime
from io import BytesIO, StringIO
from textwrap import dedent
from unittest import mock, skipIf
from urllib.parse import urlparse

from boto3 import resource
from botocore.exceptions import ClientError
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.files.base import ContentFile
from django.core.files.base import ContentFile, File
from django.test import TestCase, override_settings
from django.utils.timezone import is_aware, utc
from moto import mock_s3

from storages.backends import s3boto3

Expand Down Expand Up @@ -97,23 +100,6 @@ def test_storage_url_slashes(self):
self.assertEqual(self.storage.url('path/1'), 'https://example.com/path/1')
self.assertEqual(self.storage.url('path/1/'), 'https://example.com/path/1/')

def test_storage_save(self):
"""
Test saving a file
"""
name = 'test_storage_save.txt'
content = ContentFile('new content')
self.storage.save(name, content)
self.storage.bucket.Object.assert_called_once_with(name)

obj = self.storage.bucket.Object.return_value
obj.upload_fileobj.assert_called_with(
content,
ExtraArgs={
'ContentType': 'text/plain',
}
)

def test_storage_save_with_default_acl(self):
"""
Test saving a file with user defined ACL.
Expand All @@ -126,7 +112,7 @@ def test_storage_save_with_default_acl(self):

obj = self.storage.bucket.Object.return_value
obj.upload_fileobj.assert_called_with(
content,
mock.ANY,
ExtraArgs={
'ContentType': 'text/plain',
'ACL': 'private',
Expand All @@ -146,31 +132,13 @@ def test_storage_object_parameters_not_overwritten_by_default(self):

obj = self.storage.bucket.Object.return_value
obj.upload_fileobj.assert_called_with(
content,
mock.ANY,
ExtraArgs={
'ContentType': 'text/plain',
'ACL': 'private',
}
)

def test_content_type(self):
"""
Test saving a file with a None content type.
"""
name = 'test_image.jpg'
content = ContentFile('data')
content.content_type = None
self.storage.save(name, content)
self.storage.bucket.Object.assert_called_once_with(name)

obj = self.storage.bucket.Object.return_value
obj.upload_fileobj.assert_called_with(
content,
ExtraArgs={
'ContentType': 'image/jpeg',
}
)

def test_storage_save_gzipped(self):
"""
Test saving a gzipped file
Expand All @@ -180,7 +148,7 @@ def test_storage_save_gzipped(self):
self.storage.save(name, content)
obj = self.storage.bucket.Object.return_value
obj.upload_fileobj.assert_called_with(
content,
mock.ANY,
ExtraArgs={
'ContentType': 'application/octet-stream',
'ContentEncoding': 'gzip',
Expand Down Expand Up @@ -240,7 +208,7 @@ def test_compress_content_len(self):
Test that file returned by _compress_content() is readable.
"""
self.storage.gzip = True
content = ContentFile("I should be gzip'd")
content = ContentFile(b"I should be gzip'd")
content = self.storage._compress_content(content)
self.assertTrue(len(content.read()) > 0)

Expand Down Expand Up @@ -459,7 +427,7 @@ def test_storage_listdir_base(self):
self.storage._connections.connection.meta.client.get_paginator.return_value = paginator

dirs, files = self.storage.listdir('')
paginator.paginate.assert_called_with(Bucket=None, Delimiter='/', Prefix='')
paginator.paginate.assert_called_with(Bucket=settings.AWS_STORAGE_BUCKET_NAME, Delimiter='/', Prefix='')

self.assertEqual(dirs, ['some', 'other'])
self.assertEqual(files, ['2.txt', '4.txt'])
Expand All @@ -484,7 +452,7 @@ def test_storage_listdir_subdir(self):
self.storage._connections.connection.meta.client.get_paginator.return_value = paginator

dirs, files = self.storage.listdir('some/')
paginator.paginate.assert_called_with(Bucket=None, Delimiter='/', Prefix='some/')
paginator.paginate.assert_called_with(Bucket=settings.AWS_STORAGE_BUCKET_NAME, Delimiter='/', Prefix='some/')

self.assertEqual(dirs, ['path'])
self.assertEqual(files, ['2.txt'])
Expand Down Expand Up @@ -670,3 +638,99 @@ def test_override_init_argument(self):
self.assertEqual(storage.location, 'foo1')
storage = s3boto3.S3Boto3Storage(location='foo2')
self.assertEqual(storage.location, 'foo2')


@mock_s3
class S3Boto3StorageTestsWithMoto(TestCase):
"""
These tests use the moto library to mock S3, rather than unittest.mock.
This is better because more of boto3's internal code will be run in tests.
For example this issue
https://github.com/jschneier/django-storages/issues/708
wouldn't be caught using unittest.mock, since the error occurs in boto3's internals.
Using mock_s3 as a class decorator automatically decorates methods,
but NOT classmethods or staticmethods.
"""
@classmethod
@mock_s3
def setUpClass(cls):
super().setUpClass()
# create a bucket specified in settings.
cls.bucket = resource("s3").Bucket(settings.AWS_STORAGE_BUCKET_NAME)
cls.bucket.create()
# create a S3Boto3Storage backend instance.
cls.s3boto3_storage = s3boto3.S3Boto3Storage()

def test_save_bytes_file(self):
self.s3boto3_storage.save("bytes_file.txt", File(BytesIO(b"foo1")))

self.assertEqual(
b"foo1",
self.bucket.Object("bytes_file.txt").get()['Body'].read(),
)

def test_save_string_file(self):
self.s3boto3_storage.save("string_file.txt", File(StringIO("foo2")))

self.assertEqual(
b"foo2",
self.bucket.Object("string_file.txt").get()['Body'].read(),
)

def test_save_bytes_content_file(self):
self.s3boto3_storage.save("bytes_content.txt", ContentFile(b"foo3"))

self.assertEqual(
b"foo3",
self.bucket.Object("bytes_content.txt").get()['Body'].read(),
)

def test_save_string_content_file(self):
self.s3boto3_storage.save("string_content.txt", ContentFile("foo4"))

self.assertEqual(
b"foo4",
self.bucket.Object("string_content.txt").get()['Body'].read(),
)

def test_content_type_guess(self):
"""
Test saving a file where the ContentType is guessed from the filename.
"""
name = 'test_image.jpg'
content = ContentFile(b'data')
content.content_type = None
self.s3boto3_storage.save(name, content)

s3_object_fetched = self.bucket.Object(name).get()
self.assertEqual(b"data", s3_object_fetched['Body'].read())
self.assertEqual(s3_object_fetched["ContentType"], "image/jpeg")

def test_content_type_attribute(self):
"""
Test saving a file with a custom content type attribute.
"""
content = ContentFile(b'data')
content.content_type = "test/foo"
self.s3boto3_storage.save("test_file", content)

s3_object_fetched = self.bucket.Object("test_file").get()
self.assertEqual(b"data", s3_object_fetched['Body'].read())
self.assertEqual(s3_object_fetched["ContentType"], "test/foo")

def test_content_type_not_detectable(self):
"""
Test saving a file with no detectable content type.
"""
content = ContentFile(b'data')
content.content_type = None
self.s3boto3_storage.save("test_file", content)

s3_object_fetched = self.bucket.Object("test_file").get()
self.assertEqual(b"data", s3_object_fetched['Body'].read())
self.assertEqual(
s3_object_fetched["ContentType"],
s3boto3.S3Boto3Storage.default_content_type,
)

0 comments on commit ca9c466

Please sign in to comment.