Skip to content

Commit

Permalink
Merge pull request #297 from pebble/feature/file-model-refactor
Browse files Browse the repository at this point in the history
Refactor S3 model logic into abstract base classes
  • Loading branch information
Katharine authored Jul 5, 2016
2 parents e1c9622 + 2ae9d9d commit 80e6d9f
Show file tree
Hide file tree
Showing 15 changed files with 189 additions and 167 deletions.
4 changes: 2 additions & 2 deletions ide/api/project.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,10 +184,10 @@ def create_project(request):
template.copy_into_project(project)
elif project_type == 'simplyjs':
f = SourceFile.objects.create(project=project, file_name="app.js")
f.save_file(open('{}/src/html/demo.js'.format(settings.SIMPLYJS_ROOT)).read())
f.save_text(open('{}/src/html/demo.js'.format(settings.SIMPLYJS_ROOT)).read())
elif project_type == 'pebblejs':
f = SourceFile.objects.create(project=project, file_name="app.js")
f.save_file(open('{}/src/js/app.js'.format(settings.PEBBLEJS_ROOT)).read())
f.save_text(open('{}/src/js/app.js'.format(settings.PEBBLEJS_ROOT)).read())
if settings.NPM_MANIFEST_SUPPORT and sdk_version != '2':
project.app_keys = '[]'
project.save()
Expand Down
6 changes: 3 additions & 3 deletions ide/api/resource.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def create_resource(request, project_id):
resources.append(ResourceIdentifier.objects.create(resource_file=rf, **resource_options))
if posted_file is not None:
variant = ResourceVariant.objects.create(resource_file=rf, tags=",".join(str(int(t)) for t in new_tags))
variant.save_file(posted_file, posted_file.size)
variant.save_file(posted_file, file_size=posted_file.size)

rf.save()
except IntegrityError as e:
Expand Down Expand Up @@ -186,13 +186,13 @@ def update_resource(request, project_id, resource_id):
variant.save()
if 'file' in request.FILES:
variant = resource.variants.create(tags=",".join(str(int(t)) for t in new_tags))
variant.save_file(request.FILES['file'], request.FILES['file'].size)
variant.save_file(request.FILES['file'], file_size=request.FILES['file'].size)

# We may get sent a list of pairs telling us which variant gets which replacement file
for tags, file_index in replacement_map:
variant = resource.variants.get(tags=tags)
replacement = replacement_files[int(file_index)]
variant.save_file(replacement, replacement.size)
variant.save_file(replacement, file_size=replacement.size)

if file_name and resource.file_name != file_name:
resource.file_name = file_name
Expand Down
6 changes: 3 additions & 3 deletions ide/api/source.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def create_source_file(request, project_id):
f = SourceFile.objects.create(project=project,
file_name=request.POST['name'],
target=request.POST.get('target', 'app'))
f.save_file(request.POST.get('content', ''))
f.save_text(request.POST.get('content', ''))
except IntegrityError as e:
raise BadRequest(str(e))

Expand Down Expand Up @@ -125,7 +125,6 @@ def rename_source_file(request, project_id, file_id):
def save_source_file(request, project_id, file_id):
project = get_object_or_404(Project, pk=project_id, owner=request.user)
source_file = get_object_or_404(SourceFile, pk=file_id, project=project)

if source_file.was_modified_since(int(request.POST['modified'])):
send_td_event('cloudpebble_save_abort_unsafe', data={
'data': {
Expand All @@ -134,7 +133,8 @@ def save_source_file(request, project_id, file_id):
}
}, request=request, project=project)
raise Exception(_("Could not save: file has been modified since last save."))
source_file.save_file(request.POST['content'], folded_lines=request.POST['folded_lines'])
source_file.save_text(request.POST['content'])
source_file.save_lines(folded_lines=request.POST['folded_lines'])

send_td_event('cloudpebble_save_file', data={
'data': {
Expand Down
162 changes: 21 additions & 141 deletions ide/models/files.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,15 @@
import os
import shutil
import datetime
import json
import logging

from django.conf import settings
from django.db import models
from django.db.models.signals import post_delete
from django.dispatch import receiver
from django.utils.timezone import now
from django.utils.translation import ugettext as _
from django.core.validators import RegexValidator, ValidationError
from django.utils.translation import ugettext_lazy as _

import utils.s3 as s3
from ide.models.s3file import S3File
from ide.models.textfile import TextFile
from ide.models.meta import IdeModel

__author__ = 'katharine'
Expand Down Expand Up @@ -91,7 +88,7 @@ class Meta(IdeModel.Meta):
unique_together = (('project', 'file_name'),)


class ResourceVariant(IdeModel):
class ResourceVariant(S3File):
resource_file = models.ForeignKey(ResourceFile, related_name='variants')

VARIANT_DEFAULT = 0
Expand All @@ -118,6 +115,19 @@ class ResourceVariant(IdeModel):
tags = models.CommaSeparatedIntegerField(max_length=50, blank=True)
is_legacy = models.BooleanField(default=False) # True for anything migrated out of ResourceFile

# The following three properties are overridden to support is_legacy
@property
def padded_id(self):
return '%05d' % self.resource_file.id if self.is_legacy else '%09d' % self.id

@property
def s3_id(self):
return self.resource_file.id if self.is_legacy else self.id

@property
def folder(self):
return 'resources' if self.is_legacy else 'resources/variants'

def get_tags(self):
return [int(tag) for tag in self.tags.split(",") if tag]

Expand All @@ -130,65 +140,6 @@ def get_tag_names(self):
def get_tags_string(self):
return "".join(self.get_tag_names())

def get_local_filename(self, create=False):
if self.is_legacy:
padded_id = '%05d' % self.resource_file.id
filename = '%sresources/%s/%s/%s' % (settings.FILE_STORAGE, padded_id[0], padded_id[1], padded_id)
else:
padded_id = '%09d' % self.id
filename = '%sresources/variants/%s/%s/%s' % (settings.FILE_STORAGE, padded_id[0], padded_id[1], padded_id)
if create:
if not os.path.exists(os.path.dirname(filename)):
os.makedirs(os.path.dirname(filename))
return filename

def get_s3_path(self):
if self.is_legacy:
return 'resources/%s' % self.resource_file.id
else:
return 'resources/variants/%s' % self.id

local_filename = property(get_local_filename)
s3_path = property(get_s3_path)

def save_file(self, stream, file_size=0):
if file_size > 5*1024*1024:
raise Exception(_("Uploaded file too big."))
if not settings.AWS_ENABLED:
if not os.path.exists(os.path.dirname(self.local_filename)):
os.makedirs(os.path.dirname(self.local_filename))
with open(self.local_filename, 'wb') as out:
out.write(stream.read())
else:
s3.save_file('source', self.s3_path, stream.read())

self.resource_file.project.last_modified = now()
self.resource_file.project.save()

def save_string(self, string):
if not settings.AWS_ENABLED:
if not os.path.exists(os.path.dirname(self.local_filename)):
os.makedirs(os.path.dirname(self.local_filename))
with open(self.local_filename, 'wb') as out:
out.write(string)
else:
s3.save_file('source', self.s3_path, string)

self.resource_file.project.last_modified = now()
self.resource_file.project.save()

def copy_to_path(self, path):
if not settings.AWS_ENABLED:
shutil.copy(self.local_filename, path)
else:
s3.read_file_to_filesystem('source', self.s3_path, path)

def get_contents(self):
if not settings.AWS_ENABLED:
return open(self.local_filename).read()
else:
return s3.read_file('source', self.s3_path)

def save(self, *args, **kwargs):
self.full_clean()
self.resource_file.save()
Expand All @@ -203,16 +154,15 @@ def get_root_path(self):
suffix = self.get_tags_string()
if not name_parts[0].endswith(suffix):
raise Exception(_("No root path found for resource variant %s") % self.path)
root_path = name_parts[0][:len(name_parts[0])-len(suffix)] + name_parts[1]
root_path = name_parts[0][:len(name_parts[0]) - len(suffix)] + name_parts[1]
if "~" in root_path:
raise ValueError(_("Filenames are not allowed to contain the tilde (~) character, except for specifying tags"))
return root_path

path = property(get_path)
root_path = property(get_root_path)


class Meta(IdeModel.Meta):
class Meta(S3File.Meta):
unique_together = (('resource_file', 'tags'),)


Expand Down Expand Up @@ -273,93 +223,23 @@ def save(self, *args, **kwargs):
super(ResourceIdentifier, self).save(*args, **kwargs)


class SourceFile(IdeModel):
class SourceFile(TextFile):
project = models.ForeignKey('Project', related_name='source_files')
file_name = models.CharField(max_length=100, validators=[RegexValidator(r"^[/a-zA-Z0-9_.-]+\.(c|h|js)$", message=_("Invalid filename."))])
last_modified = models.DateTimeField(blank=True, null=True, auto_now=True)
folded_lines = models.TextField(default="[]")
folder = 'sources'

TARGETS = (
('app', _('App')),
('worker', _('Worker')),
)
target = models.CharField(max_length=10, choices=TARGETS, default='app')

def get_local_filename(self):
padded_id = '%05d' % self.id
return '%ssources/%s/%s/%s' % (settings.FILE_STORAGE, padded_id[0], padded_id[1], padded_id)

def get_s3_path(self):
return 'sources/%d' % self.id

def get_contents(self):
if not settings.AWS_ENABLED:
try:
return open(self.local_filename).read()
except IOError:
return ''
else:
return s3.read_file('source', self.s3_path)

def was_modified_since(self, expected_modification_time):
if isinstance(expected_modification_time, int):
expected_modification_time = datetime.datetime.fromtimestamp(expected_modification_time)
assert isinstance(expected_modification_time, datetime.datetime)
return self.last_modified.replace(tzinfo=None, microsecond=0) > expected_modification_time

def save_file(self, content, folded_lines=None):
if not settings.AWS_ENABLED:
if not os.path.exists(os.path.dirname(self.local_filename)):
os.makedirs(os.path.dirname(self.local_filename))
open(self.local_filename, 'w').write(content.encode('utf-8'))
else:
s3.save_file('source', self.s3_path, content.encode('utf-8'))
if folded_lines:
self.folded_lines = folded_lines
self.save()

def copy_to_path(self, path):
if not settings.AWS_ENABLED:
try:
shutil.copy(self.local_filename, path)
except IOError as err:
if err.errno == 2:
open(path, 'w').close() # create the file if it's missing.
else:
raise
else:
s3.read_file_to_filesystem('source', self.s3_path, path)

def save(self, *args, **kwargs):
self.full_clean()
self.project.last_modified = now()
self.project.save()
super(SourceFile, self).save(*args, **kwargs)

@property
def project_path(self):
if self.target == 'app':
return 'src/%s' % self.file_name
else:
return 'worker_src/%s' % self.file_name

local_filename = property(get_local_filename)
s3_path = property(get_s3_path)

class Meta(IdeModel.Meta):
unique_together = (('project', 'file_name'))


@receiver(post_delete)
def delete_file(sender, instance, **kwargs):
if sender == SourceFile or sender == ResourceVariant:
if settings.AWS_ENABLED:
try:
s3.delete_file('source', instance.s3_path)
except:
logger.exception("Failed to delete S3 file")
else:
try:
os.unlink(instance.local_filename)
except OSError:
pass
2 changes: 1 addition & 1 deletion ide/models/project.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,7 @@ def copy_into_project(self, project):

for source_file in self.source_files.all():
new_file = SourceFile.objects.create(project=project, file_name=source_file.file_name)
new_file.save_file(source_file.get_contents().replace("__UUID_GOES_HERE__", uuid_string))
new_file.save_text(source_file.get_contents().replace("__UUID_GOES_HERE__", uuid_string))

# Copy over relevant project properties.
# NOTE: If new, relevant properties are added, they must be copied here.
Expand Down
Loading

0 comments on commit 80e6d9f

Please sign in to comment.