summaryrefslogtreecommitdiff
path: root/fbin/file_storage/s3.py
blob: 25b204c0e85dafdb19ecab69cd69abd9bccd6ee9 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
import contextlib
import tempfile

import boto3
import botocore.exceptions
from flask import request, send_file

from .base import BaseStorage


class Storage(BaseStorage):
    def __init__(self, app):
        super().__init__(app)
        self.client = boto3.resource('s3', **self.app.config['S3_CONFIG'])

    def _get_object_key(self, file_hash, user_id):
        return '{}_{}'.format(file_hash, user_id)

    def get_object_key(self, f, thumb=False):
        key = self._get_object_key(f.hash, f.user_id if f.user_id else 0)
        if thumb:
            key += '_thumb'
        return key

    def upload_file(self, uploaded_file, file_hash, user):
        bucket = self.client.Bucket(self.app.config['S3_BUCKET'])
        key = self._get_object_key(file_hash, user.id if user else 0)
        obj = bucket.upload_fileobj(Fileobj=uploaded_file.stream, Key=key)
        size = uploaded_file.content_length
        if not size:
            obj = self.client.ObjectSummary(self.app.config['S3_BUCKET'], key)
            size = obj.size
        return obj, size

    def store_file(self, uploaded_file, file_hash, user, ip):
        obj, size = self.upload_file(uploaded_file, file_hash, user)
        try:
            return self.add_file(file_hash, uploaded_file.filename, size, user, ip)
        except:  # noqa: E722; we want to delete and re-raise on all exceptions
            obj.delete()
            raise

    def file_exists(self, f):
        key = self.get_object_key(f)
        bucket = self.app.config['S3_BUCKET']
        obj = self.client.Object(bucket, key)
        try:
            obj.load()
            return True
        except botocore.exceptions.ClientError as e:
            if e.response['Error']['Code'] == '404':
                return False
            raise

    def get_file(self, f, thumb=False):
        key = self.get_object_key(f, thumb=thumb)
        if thumb:
            bucket = self.app.config['S3_THUMB_BUCKET']
        else:
            bucket = self.app.config['S3_BUCKET']
        obj = self.client.Object(bucket, key)
        kwargs = {}
        if 'Range' in request.headers:
            kwargs['Range'] = request.headers['Range']
        try:
            data = obj.get(**kwargs)
        except botocore.exceptions.ClientError as e:
            if e.response['Error']['Code'] == 'NoSuchKey':
                return
            raise
        rv = send_file(data['Body'], attachment_filename=f.filename)
        rv.headers['Content-Length'] = data['ContentLength']
        rv.headers['Accept-Ranges'] = data['AcceptRanges']
        if 'ContentRange' in data:
            rv.headers['Content-Range'] = data['ContentRange']
            rv.status_code = 206
        return rv

    def delete_file(self, f):
        obj = self.client.Object(self.app.config['S3_BUCKET'], self.get_object_key(f))
        obj.delete()
        obj = self.client.Object(self.app.config['S3_BUCKET'], self.get_object_key(f, thumb=True))
        obj.delete()

    @contextlib.contextmanager
    def temp_file(self, f):
        obj = self.client.Object(self.app.config['S3_BUCKET'], self.get_object_key(f))
        with tempfile.NamedTemporaryFile() as f:
            obj.download_fileobj(f)
            f.seek(0)
            yield f

    def get_thumbnail(self, f):
        try:
            return self.get_file(f, thumb=True)
        except botocore.exceptions.ClientError as e:
            if e.response['Error']['Code'] == 'NoSuchKey':
                # If thumbnail does not exist, just return None.
                return
            raise

    def store_thumbnail(self, f, stream):
        bucket = self.client.Bucket(self.app.config['S3_THUMB_BUCKET'])
        key = self.get_object_key(f, thumb=True)
        bucket.upload_fileobj(Fileobj=stream, Key=key)