Skip to content

Commit 82d8725

Browse files
author
Joseph Atkins-Turkish
committed
Remove support for AWS_ENABLED==False
1 parent 0bd8d7d commit 82d8725

File tree

11 files changed

+45
-156
lines changed

11 files changed

+45
-156
lines changed

app.json

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77
"AWS_ACCESS_KEY_ID": {
88
"required": true
99
},
10-
"AWS_ENABLED": "yes",
1110
"AWS_S3_BUILDS_BUCKET": "builds-staging.cloudpebble.net",
1211
"AWS_S3_EXPORT_BUCKET": "export-staging.cloudpebble.net",
1312
"AWS_S3_SOURCE_BUCKET": "source-staging.cloudpebble.net",

cloudpebble/settings.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -302,7 +302,6 @@
302302
MAILCHIMP_API_KEY = _environ.get('MAILCHIMP_API_KEY', None)
303303
MAILCHIMP_LIST_ID = _environ.get('MAILCHIMP_LIST_ID', None)
304304

305-
AWS_ENABLED = 'AWS_ENABLED' in _environ
306305
AWS_ACCESS_KEY_ID = _environ.get('AWS_ACCESS_KEY_ID', None)
307306
AWS_SECRET_ACCESS_KEY = _environ.get('AWS_SECRET_ACCESS_KEY', None)
308307

ide/api/project.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,4 @@
1-
import os
21
import re
3-
import tempfile
42
import time
53
import json
64
from django.conf import settings

ide/api/resource.py

Lines changed: 5 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
import json
2-
from django.conf import settings
32
from django.contrib.auth.decorators import login_required
43
from django.db import transaction
54
from django.http import HttpResponse, HttpResponseRedirect
@@ -241,13 +240,8 @@ def show_resource(request, project_id, resource_id, variant):
241240
}
242241
content_disposition = "attachment; filename=\"%s\"" % resource.file_name
243242
content_type = content_types[resource.kind]
244-
if settings.AWS_ENABLED:
245-
headers = {
246-
'response-content-disposition': content_disposition,
247-
'Content-Type': content_type
248-
}
249-
return HttpResponseRedirect(s3.get_signed_url('source', variant.s3_path, headers=headers))
250-
else:
251-
response = HttpResponse(open(variant.local_filename), content_type=content_type)
252-
response['Content-Disposition'] = content_disposition
253-
return response
243+
headers = {
244+
'response-content-disposition': content_disposition,
245+
'Content-Type': content_type
246+
}
247+
return HttpResponseRedirect(s3.get_signed_url('source', variant.s3_path, headers=headers))

ide/models/build.py

Lines changed: 7 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,5 @@
11
import uuid
22
import json
3-
import shutil
4-
import os
5-
import os.path
63
from django.conf import settings
74
from django.db import models
85
from ide.models.project import Project
@@ -40,19 +37,10 @@ class BuildResult(IdeModel):
4037
finished = models.DateTimeField(blank=True, null=True)
4138

4239
def _get_dir(self):
43-
if settings.AWS_ENABLED:
44-
return '%s/' % self.uuid
45-
else:
46-
path = '%s%s/%s/%s/' % (settings.MEDIA_ROOT, self.uuid[0], self.uuid[1], self.uuid)
47-
if not os.path.exists(path):
48-
os.makedirs(path)
49-
return path
40+
return '%s/' % self.uuid
5041

5142
def get_url(self):
52-
if settings.AWS_ENABLED:
53-
return "%s%s/" % (settings.MEDIA_URL, self.uuid)
54-
else:
55-
return '%s%s/%s/%s/' % (settings.MEDIA_URL, self.uuid[0], self.uuid[1], self.uuid)
43+
return "%s%s/" % (settings.MEDIA_URL, self.uuid)
5644

5745
def get_pbw_filename(self):
5846
return '%swatchface.pbw' % self._get_dir()
@@ -79,39 +67,20 @@ def get_simplyjs_url(self):
7967
return '%ssimply.js' % self.get_url()
8068

8169
def save_build_log(self, text):
82-
if not settings.AWS_ENABLED:
83-
with open(self.build_log, 'w') as f:
84-
f.write(text)
85-
else:
86-
s3.save_file('builds', self.build_log, text, public=True, content_type='text/plain')
70+
s3.save_file('builds', self.build_log, text, public=True, content_type='text/plain')
8771

8872
def read_build_log(self):
89-
if not settings.AWS_ENABLED:
90-
with open(self.build_log, 'r') as f:
91-
return f.read()
92-
else:
93-
return s3.read_file('builds', self.build_log)
73+
return s3.read_file('builds', self.build_log)
9474

9575
def save_debug_info(self, json_info, platform, kind):
9676
text = json.dumps(json_info)
97-
if not settings.AWS_ENABLED:
98-
with open(self.get_debug_info_filename(platform, kind), 'w') as f:
99-
f.write(text)
100-
else:
101-
s3.save_file('builds', self.get_debug_info_filename(platform, kind), text, public=True, content_type='application/json')
77+
s3.save_file('builds', self.get_debug_info_filename(platform, kind), text, public=True, content_type='application/json')
10278

10379
def save_pbw(self, pbw_path):
104-
if not settings.AWS_ENABLED:
105-
shutil.move(pbw_path, self.pbw)
106-
else:
107-
s3.upload_file('builds', self.pbw, pbw_path, public=True, download_filename='%s.pbw' % self.project.app_short_name.replace('/','-'))
80+
s3.upload_file('builds', self.pbw, pbw_path, public=True, download_filename='%s.pbw' % self.project.app_short_name.replace('/','-'))
10881

10982
def save_simplyjs(self, javascript):
110-
if not settings.AWS_ENABLED:
111-
with open(self.simplyjs, 'w') as f:
112-
f.write(javascript)
113-
else:
114-
s3.save_file('builds', self.simplyjs, javascript, public=True, content_type='text/javascript')
83+
s3.save_file('builds', self.simplyjs, javascript, public=True, content_type='text/javascript')
11584

11685
pbw = property(get_pbw_filename)
11786
build_log = property(get_build_log)

ide/models/files.py

Lines changed: 4 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,6 @@
11
import os
2-
import shutil
32
import traceback
4-
import datetime
53
import json
6-
from django.conf import settings
7-
from django.core.validators import RegexValidator
84
from django.db import models
95
from django.db.models.signals import post_delete
106
from django.dispatch import receiver
@@ -251,13 +247,7 @@ class Meta(IdeModel.Meta):
251247
@receiver(post_delete)
252248
def delete_file(sender, instance, **kwargs):
253249
if issubclass(sender, S3File):
254-
if settings.AWS_ENABLED:
255-
try:
256-
s3.delete_file('source', instance.s3_path)
257-
except:
258-
traceback.print_exc()
259-
else:
260-
try:
261-
os.unlink(instance.local_filename)
262-
except OSError:
263-
pass
250+
try:
251+
s3.delete_file('source', instance.s3_path)
252+
except:
253+
traceback.print_exc()

ide/models/meta.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
from django.db import models
22

3+
34
class IdeModel(models.Model):
45
class Meta:
56
abstract = True

ide/models/s3file.py

Lines changed: 3 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,3 @@
1-
import shutil
2-
import os
3-
41
from django.utils.translation import ugettext as _
52
from django.conf import settings
63
from django.utils.timezone import now
@@ -13,7 +10,6 @@ class S3File(IdeModel):
1310
bucket_name = 'source'
1411
folder = None
1512
project = None
16-
_create_local_if_not_exists = False
1713

1814
@property
1915
def padded_id(self):
@@ -32,41 +28,11 @@ def s3_id(self):
3228
def s3_path(self):
3329
return '%s/%s' % (self.folder, self.s3_id)
3430

35-
def _get_contents_local(self):
36-
try:
37-
return open(self.local_filename).read()
38-
except IOError:
39-
if self._create_local_if_not_exists:
40-
return ''
41-
else:
42-
raise
43-
44-
def _save_string_local(self, string):
45-
if not os.path.exists(os.path.dirname(self.local_filename)):
46-
os.makedirs(os.path.dirname(self.local_filename))
47-
with open(self.local_filename, 'wb') as out:
48-
out.write(string)
49-
50-
def _copy_to_path_local(self, path):
51-
try:
52-
shutil.copy(self.local_filename, path)
53-
except IOError as err:
54-
if err.errno == 2 and self._crete_local_if_not_exists:
55-
open(path, 'w').close() # create the file if it's missing.
56-
else:
57-
raise
58-
5931
def get_contents(self):
60-
if not settings.AWS_ENABLED:
61-
return self._get_contents_local()
62-
else:
63-
return s3.read_file(self.bucket_name, self.s3_path)
32+
return s3.read_file(self.bucket_name, self.s3_path)
6433

6534
def save_string(self, string):
66-
if not settings.AWS_ENABLED:
67-
self._save_string_local(string)
68-
else:
69-
s3.save_file(self.bucket_name, self.s3_path, string)
35+
s3.save_file(self.bucket_name, self.s3_path, string)
7036
if self.project:
7137
self.project.last_modified = now()
7238
self.project.save()
@@ -80,10 +46,7 @@ def save_text(self, content):
8046
self.save_string(content.encode('utf-8'))
8147

8248
def copy_to_path(self, path):
83-
if not settings.AWS_ENABLED:
84-
self._copy_to_path_local(path)
85-
else:
86-
s3.read_file_to_filesystem(self.bucket_name, self.s3_path, path)
49+
s3.read_file_to_filesystem(self.bucket_name, self.s3_path, path)
8750

8851
class Meta(IdeModel.Meta):
8952
abstract = True

ide/models/scriptfile.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@ class ScriptFile(S3File):
1010
""" ScriptFiles add support to TextFiles for last-modified timestamps and code folding """
1111
last_modified = models.DateTimeField(blank=True, null=True, auto_now=True)
1212
folded_lines = models.TextField(default="[]")
13-
_create_local_if_not_exists = True
1413

1514
def was_modified_since(self, expected_modification_time):
1615
if isinstance(expected_modification_time, int):

ide/tasks/archive.py

Lines changed: 6 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -63,16 +63,9 @@ def create_archive(project_id):
6363

6464
send_td_event('cloudpebble_export_project', project=project)
6565

66-
if not settings.AWS_ENABLED:
67-
outfile = '%s%s/%s.zip' % (settings.EXPORT_DIRECTORY, u, prefix)
68-
os.makedirs(os.path.dirname(outfile), 0755)
69-
shutil.copy(filename, outfile)
70-
os.chmod(outfile, 0644)
71-
return '%s%s/%s.zip' % (settings.EXPORT_ROOT, u, prefix)
72-
else:
73-
outfile = '%s/%s.zip' % (u, prefix)
74-
s3.upload_file('export', outfile, filename, public=True, content_type='application/zip')
75-
return '%s%s' % (settings.EXPORT_ROOT, outfile)
66+
outfile = '%s/%s.zip' % (u, prefix)
67+
s3.upload_file('export', outfile, filename, public=True, content_type='application/zip')
68+
return '%s%s' % (settings.EXPORT_ROOT, outfile)
7669

7770

7871
@task(acks_late=True)
@@ -105,7 +98,7 @@ def get_filename_variant(file_name, resource_suffix_map):
10598
split = file_name_parts[0].split("~")
10699
tags = split[1:]
107100
try:
108-
ids = [resource_suffix_map['~'+tag] for tag in tags]
101+
ids = [resource_suffix_map['~' + tag] for tag in tags]
109102
except KeyError as key:
110103
raise ValueError('Unrecognised tag %s' % key)
111104
root_file_name = split[0] + file_name_parts[1]
@@ -146,7 +139,6 @@ def do_import_archive(project_id, archive, delete_project=False):
146139
raise Exception("Too many files in zip file.")
147140
file_list = [x.filename for x in contents]
148141

149-
150142
base_dir = find_project_root(file_list)
151143
dir_end = len(base_dir)
152144

@@ -223,9 +215,9 @@ def make_valid_filename(zip_entry):
223215
filename = make_valid_filename(zipitem)
224216
if filename is False or not filename.startswith(RES_PATH):
225217
continue
226-
filename = filename[len(RES_PATH)+1:]
218+
filename = filename[len(RES_PATH) + 1:]
227219
try:
228-
extracted = z.open("%s%s/%s"%(base_dir, RES_PATH, filename))
220+
extracted = z.open("%s%s/%s" % (base_dir, RES_PATH, filename))
229221
except KeyError:
230222
print "Failed to open %s" % filename
231223
continue

utils/s3.py

Lines changed: 19 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
from django.conf import settings
55
import urllib
66

7+
78
def _ensure_bucket_exists(s3, bucket):
89
try:
910
s3.create_bucket(bucket)
@@ -12,57 +13,43 @@ def _ensure_bucket_exists(s3, bucket):
1213
else:
1314
print "Created bucket %s" % bucket
1415

15-
if settings.AWS_ENABLED:
16-
if settings.AWS_S3_FAKE_S3 is None:
17-
_s3 = boto.connect_s3(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
18-
else:
19-
host, port = (settings.AWS_S3_FAKE_S3.split(':', 2) + [80])[:2]
20-
port = int(port)
21-
_s3 = boto.connect_s3("key_id", "secret_key", is_secure=False, port=port,
22-
host=host, calling_format=OrdinaryCallingFormat())
23-
_ensure_bucket_exists(_s3, settings.AWS_S3_SOURCE_BUCKET)
24-
_ensure_bucket_exists(_s3, settings.AWS_S3_EXPORT_BUCKET)
25-
_ensure_bucket_exists(_s3, settings.AWS_S3_BUILDS_BUCKET)
26-
27-
_buckets = {
28-
'source': _s3.get_bucket(settings.AWS_S3_SOURCE_BUCKET),
29-
'export': _s3.get_bucket(settings.AWS_S3_EXPORT_BUCKET),
30-
'builds': _s3.get_bucket(settings.AWS_S3_BUILDS_BUCKET),
31-
}
32-
else:
33-
_s3 = None
34-
_buckets = None
3516

17+
if settings.AWS_S3_FAKE_S3 is None:
18+
_s3 = boto.connect_s3(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
19+
else:
20+
host, port = (settings.AWS_S3_FAKE_S3.split(':', 2) + [80])[:2]
21+
port = int(port)
22+
_s3 = boto.connect_s3("key_id", "secret_key", is_secure=False, port=port,
23+
host=host, calling_format=OrdinaryCallingFormat())
24+
_ensure_bucket_exists(_s3, settings.AWS_S3_SOURCE_BUCKET)
25+
_ensure_bucket_exists(_s3, settings.AWS_S3_EXPORT_BUCKET)
26+
_ensure_bucket_exists(_s3, settings.AWS_S3_BUILDS_BUCKET)
3627

37-
def _requires_aws(fn):
38-
if settings.AWS_ENABLED:
39-
return fn
40-
else:
41-
def complain(*args, **kwargs):
42-
raise Exception("AWS_ENABLED must be True to call %s" % fn.__name__)
43-
return complain
28+
_buckets = {
29+
'source': _s3.get_bucket(settings.AWS_S3_SOURCE_BUCKET),
30+
'export': _s3.get_bucket(settings.AWS_S3_EXPORT_BUCKET),
31+
'builds': _s3.get_bucket(settings.AWS_S3_BUILDS_BUCKET),
32+
}
4433

4534

46-
@_requires_aws
4735
def read_file(bucket_name, path):
4836
bucket = _buckets[bucket_name]
4937
key = bucket.get_key(path)
5038
return key.get_contents_as_string()
5139

5240

53-
@_requires_aws
5441
def read_file_to_filesystem(bucket_name, path, destination):
5542
bucket = _buckets[bucket_name]
5643
key = bucket.get_key(path)
5744
key.get_contents_to_filename(destination)
5845

59-
@_requires_aws
46+
6047
def delete_file(bucket_name, path):
6148
bucket = _buckets[bucket_name]
6249
key = bucket.get_key(path)
6350
key.delete()
6451

65-
@_requires_aws
52+
6653
def save_file(bucket_name, path, value, public=False, content_type='application/octet-stream'):
6754
bucket = _buckets[bucket_name]
6855
key = Key(bucket)
@@ -76,7 +63,6 @@ def save_file(bucket_name, path, value, public=False, content_type='application/
7663
key.set_contents_from_string(value, policy=policy, headers={'Content-Type': content_type})
7764

7865

79-
@_requires_aws
8066
def upload_file(bucket_name, dest_path, src_path, public=False, content_type='application/octet-stream', download_filename=None):
8167
bucket = _buckets[bucket_name]
8268
key = Key(bucket)
@@ -92,12 +78,11 @@ def upload_file(bucket_name, dest_path, src_path, public=False, content_type='ap
9278
}
9379

9480
if download_filename is not None:
95-
headers['Content-Disposition'] = 'attachment;filename="%s"' % download_filename.replace(' ','_')
81+
headers['Content-Disposition'] = 'attachment;filename="%s"' % download_filename.replace(' ', '_')
9682

9783
key.set_contents_from_filename(src_path, policy=policy, headers=headers)
9884

9985

100-
@_requires_aws
10186
def get_signed_url(bucket_name, path, headers=None):
10287
bucket = _buckets[bucket_name]
10388
key = bucket.get_key(path)

0 commit comments

Comments
 (0)