1 | | |
2 | | Here's a class I created to integrate my site with amazon's storage service. |
3 | | |
4 | | I like this because in the admin interface it looks pretty much like a file field and you can just click to upload. In theory you could integrate it into any oldforms system. |
5 | | |
6 | | You can set a default bucket value for the entire system into your settings module or for specific fields. You can manually specify a key or let the class generate one for you. |
7 | | |
8 | | This hasn't been tested extensively, so I invite you to contribute any fixes right here on the page. Maybe the django gods will see fit to include this right into django. |
9 | | |
10 | | Call get_WHATEVER_url() on the object to get the public URL to view/download the given file. |
11 | | |
12 | | Currently if you change the content type in the object, that change doesn't take effect until the next time you upload a file. |
13 | | |
14 | | {{{ |
15 | | #!python |
16 | | from amazon import S3 |
17 | | from django.conf import settings |
18 | | from django.db import models |
19 | | from mimetypes import guess_type |
20 | | from django.core import validators |
21 | | from django import oldforms |
22 | | from django.dispatch import dispatcher |
23 | | from django.utils.functional import curry |
24 | | from django.utils.translation import gettext_lazy |
25 | | from django.db.models import signals |
26 | | from PIL import Image |
27 | | from StringIO import StringIO |
28 | | import os |
29 | | |
30 | | conn = S3.AWSAuthConnection(settings.AWS_ACCESS_KEY_ID, |
31 | | settings.AWS_SECRET_ACCESS_KEY) |
32 | | generator = S3.QueryStringAuthGenerator(settings.AWS_ACCESS_KEY_ID, |
33 | | settings.AWS_SECRET_ACCESS_KEY) |
34 | | |
35 | | class S3FileField(models.FileField): |
36 | | def __init__(self, verbose_name=None, name=None, bucket='', is_image=False, **kwargs): |
37 | | models.FileField.__init__(self, verbose_name, name, upload_to="s3", **kwargs) |
38 | | self.bucket = bucket |
39 | | self.is_image = is_image |
40 | | |
41 | | def get_manipulator_fields(self, opts, manipulator, change, name_prefix='', rel=False, follow=True): |
42 | | field_list = models.Field.get_manipulator_fields(self, opts, manipulator, change, name_prefix, rel, follow) |
43 | | if not self.blank: |
44 | | if rel: |
45 | | # This validator makes sure FileFields work in a related context. |
46 | | class RequiredFileField(object): |
47 | | def __init__(self, other_field_names, other_file_field_name): |
48 | | self.other_field_names = other_field_names |
49 | | self.other_file_field_name = other_file_field_name |
50 | | self.always_test = True |
51 | | def __call__(self, field_data, all_data): |
52 | | if not all_data.get(self.other_file_field_name, False): |
53 | | c = validators.RequiredIfOtherFieldsGiven(self.other_field_names, gettext_lazy("This field is required.")) |
54 | | c(field_data, all_data) |
55 | | # First, get the core fields, if any. |
56 | | core_field_names = [] |
57 | | for f in opts.fields: |
58 | | if f.core and f != self: |
59 | | core_field_names.extend(f.get_manipulator_field_names(name_prefix)) |
60 | | # Now, if there are any, add the validator to this FormField. |
61 | | if core_field_names: |
62 | | field_list[0].validator_list.append(RequiredFileField(core_field_names, field_list[1].field_name)) |
63 | | else: |
64 | | v = validators.RequiredIfOtherFieldNotGiven(field_list[1].field_name, gettext_lazy("This field is required.")) |
65 | | v.always_test = True |
66 | | field_list[0].validator_list.append(v) |
67 | | field_list[0].is_required = field_list[1].is_required = False |
68 | | |
69 | | return field_list |
70 | | |
71 | | def get_internal_type(self): |
72 | | return "FileField" |
73 | | |
74 | | def contribute_to_class(self, cls, name): |
75 | | super(S3FileField, self).contribute_to_class(cls, name) |
76 | | models.CharField(maxlength=200, blank=self.blank, null=self.null).contribute_to_class(cls, "%s_key"%(self.name)) |
77 | | models.CharField(maxlength=200, blank=self.blank, null=self.null, default=(self.bucket or settings.DEFAULT_BUCKET)).contribute_to_class(cls, "%s_bucket"%(self.name)) |
78 | | models.CharField(maxlength=200, blank=True, null=True, default="").contribute_to_class(cls, "%s_content_type"%(self.name)) |
79 | | models.IntegerField(blank=True, null=True).contribute_to_class(cls, "%s_size"%(self.name)) |
80 | | if self.is_image: |
81 | | models.IntegerField(blank=True, null=True).contribute_to_class(cls, "%s_width"%(self.name)) |
82 | | models.IntegerField(blank=True, null=True).contribute_to_class(cls, "%s_height"%(self.name)) |
83 | | |
84 | | # Getter for the file url |
85 | | def get_url(instance, field): |
86 | | return field.get_url(instance) |
87 | | setattr(cls, 'get_%s_url' % self.name, curry(get_url, field=self)) |
88 | | |
89 | | dispatcher.connect(self.delete_file, signal=signals.post_delete, sender=cls) |
90 | | |
91 | | def delete_file(self, instance): |
92 | | if getattr(instance, self.attname): |
93 | | bucket = self.get_bucket(instance) |
94 | | key = self.get_key(instance) |
95 | | conn.delete(bucket, key) |
96 | | |
97 | | def get_url(self, instance): |
98 | | bucket = self.get_bucket(instance) |
99 | | key = self.get_key(instance) |
100 | | if bucket and key: |
101 | | url = generator.make_bare_url(bucket, key) |
102 | | return url |
103 | | return None |
104 | | |
105 | | def get_bucket(self, instance): |
106 | | return getattr(instance, "%s_bucket"%self.name) |
107 | | |
108 | | def set_bucket(self, instance, bucket): |
109 | | setattr(instance, "%s_bucket"%self.name, bucket) |
110 | | |
111 | | def get_key(self, instance): |
112 | | return getattr(instance, "%s_key"%self.name) |
113 | | |
114 | | def set_key(self, instance, key): |
115 | | setattr(instance, "%s_key"%self.name, key) |
116 | | |
117 | | def get_content_type(self, instance): |
118 | | return getattr(instance, "%s_content_type"%self.name) |
119 | | |
120 | | def set_content_type(self, instance, content_type): |
121 | | setattr(instance, "%s_content_type"%self.name, content_type) |
122 | | |
123 | | def get_size(self, instance): |
124 | | return getattr(instance, "%s_size"%self.name) |
125 | | |
126 | | def set_size(self, instance, size): |
127 | | setattr(instance, "%s_size"%self.name, size) |
128 | | |
129 | | def get_filename(self, instance): |
130 | | return getattr(instance, self.name) |
131 | | |
132 | | def set_filename(self, instance, filename): |
133 | | setattr(instance, self.name, filename) |
134 | | |
135 | | def get_width(self, instance): |
136 | | return getattr(instance, "%s_width"%self.name) |
137 | | |
138 | | def set_width(self, instance, width): |
139 | | setattr(instance, "%s_width"%self.name, width) |
140 | | |
141 | | def get_height(self, instance): |
142 | | return getattr(instance, "%s_height"%self.name) |
143 | | |
144 | | def set_height(self, instance, height): |
145 | | setattr(instance, "%s_height"%self.name, height) |
146 | | |
147 | | def get_manipulator_field_objs(self): |
148 | | if self.is_image: uploadType = oldforms.ImageUploadField |
149 | | else: uploadType = oldforms.FileUploadField |
150 | | return [uploadType, oldforms.HiddenField] |
151 | | |
152 | | def get_manipulator_field_names(self, name_prefix): |
153 | | return [name_prefix + self.name + '_file', name_prefix + self.name] |
154 | | |
155 | | def save_file(self, new_data, new_object, original_object, change, rel, save=True): |
156 | | upload_field_name = self.get_manipulator_field_names('')[0] |
157 | | if new_data.get(upload_field_name, False): |
158 | | if rel: |
159 | | new_content = new_data[upload_field_name][0]["content"] |
160 | | new_filename = new_data[upload_field_name][0]["filename"] |
161 | | else: |
162 | | new_content = new_data[upload_field_name]["content"] |
163 | | new_filename = new_data[upload_field_name]["filename"] |
164 | | |
165 | | self.set_filename(new_object, new_filename) |
166 | | self.set_size(new_object, len(new_content)) |
167 | | |
168 | | key = new_data["%s_key"%self.name] |
169 | | if not key: |
170 | | key = "_".join((new_object.__class__.__name__.lower(), str(new_object.id), self.name)) |
171 | | self.set_key(new_object, key) |
172 | | bucket = new_data["%s_bucket"%self.name] |
173 | | if not bucket: |
174 | | bucket = self.bucket |
175 | | content_type = new_data["%s_content_type"%self.name] |
176 | | if new_filename and not content_type: |
177 | | content_type = guess_type(new_filename)[0] |
178 | | if not content_type: |
179 | | content_type = "application/x-octet-stream" |
180 | | conn.put(bucket, key, S3.S3Object(new_content), |
181 | | { 'x-amz-acl': 'public-read' , 'Content-Type': content_type}) |
182 | | |
183 | | if self.is_image: |
184 | | # Calculate image width/height |
185 | | img = Image.open(StringIO(new_content)) |
186 | | width, height = img.size |
187 | | self.set_width(new_object, width) |
188 | | self.set_height(new_object, height) |
189 | | }}} |
| 1 | [http://bitbucket.org/david/django-storages/wiki/Home Django Storages] using S3 Boto is the best solution for using Amazon S3 as a custom file storage backend in Django. |