forked from Unidata/ldm-alchemy
-
Notifications
You must be signed in to change notification settings - Fork 0
/
aws.py
112 lines (84 loc) · 3.08 KB
/
aws.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
# Copyright (c) 2020 University Corporation for Atmospheric Research/Unidata.
# Distributed under the terms of the MIT License.
# SPDX-License-Identifier: MIT
import asyncio
import base64
from contextlib import contextmanager
import hashlib
import logging
import threading
import boto3
import botocore.exceptions
from async_base import Job, Main
logger = logging.getLogger('alchemy.aws')
class UploadS3(Job):
def __init__(self, bucket, name='UploadS3'):
super().__init__(name)
self.bucket_pool = S3BucketPool(bucket)
# self.sns_pool = SNSBucketPool(args.sns)
@staticmethod
def make_key(prod_id, data):
return prod_id
def run(self, item):
with self.bucket_pool.use() as bucket: #, sns_pool.use() as topic:
try:
# Calculate MD5 checksum for integrity
digest = base64.b64encode(hashlib.md5(item.data).digest()).decode('ascii')
key = self.make_key(*item)
# Write to S3
logger.info('Uploading to S3 as: %s', key)
bucket.put_object(Key=key, Body=item.data, ContentMD5=digest)
except botocore.exceptions.ClientError as e:
logger.exception('Error putting object on S3:', exc_info=e)
raise IOError from e
class SharedObjectPool(object):
"""A shared pool of managed objects
Objects are created while a lock is held, then are doled out and returned using
a context manager.
"""
_create_lock = threading.Lock() # We want one lock among all subclasses
def __init__(self):
import queue
self._queue = queue.Queue()
def _create_new(self):
pass
def borrow(self):
if self._queue.empty():
with self._create_lock:
return self._create_new()
return self._queue.get()
def put(self, item):
self._queue.put(item)
@contextmanager
def use(self):
obj = self.borrow()
try:
yield obj
finally:
self.put(obj)
class S3BucketPool(SharedObjectPool):
def __init__(self, bucket):
super().__init__()
self.bucket_name = bucket
def _create_new(self):
import boto3
return boto3.session.Session().resource('s3').Bucket(self.bucket_name)
class SNSBucketPool(SharedObjectPool):
def __init__(self, name):
import boto3
super().__init__()
self.sns_arn = boto3.client('sns').create_topic(Name=name)['TopicArn']
def _create_new(self):
import boto3
return boto3.session.Session().resource('sns').Topic(self.sns_arn)
async def read_s3_objects(bucket, count=None, **kwargs):
for obj in bucket.objects.filter(**kwargs).limit(count):
yield obj
await asyncio.sleep(0.001)
class S3Reader(Main):
def __init__(self, bucket, **kwargs):
self.bucket = boto3.session.Session().resource('s3').Bucket(bucket)
super().__init__(nthreads=kwargs.pop('nthreads'))
self.filter_kwargs = kwargs
def __aiter__(self):
return read_s3_objects(self.bucket, **self.filter_kwargs)