Skip to content

Instantly share code, notes, and snippets.

@metadaddy
Last active October 30, 2024 01:13
Show Gist options
  • Save metadaddy/5658329ac44034ffed9b54d7fa5906ab to your computer and use it in GitHub Desktop.
Save metadaddy/5658329ac44034ffed9b54d7fa5906ab to your computer and use it in GitHub Desktop.
Test aborting an S3 multipart upload while a part is in flight
import asyncio
import datetime
import functools
from dotenv import load_dotenv
from aiobotocore.session import get_session
ITERATIONS = 100
# Set the following environment variables, or add them to a .env file
#
# AWS_ACCESS_KEY_ID
# AWS_SECRET_ACCESS_KEY
# AWS_REGION
# AWS_ENDPOINT_URL
load_dotenv()
def handler(name, task):
exc = task.exception()
print(datetime.datetime.now(), name, task.result() if exc is None else exc)
async def go():
"""
Based on https://github.com/aio-libs/aiobotocore/blob/master/examples/simple.py
"""
bucket = 'metadaddy-tester'
filename = 'dummy.bin'
folder = 'aiobotocore'
key = f'{folder}/{filename}'
session = get_session()
async with session.create_client('s3') as client:
data = b'\x01' * 1024
failures = 0
for count in range(0, ITERATIONS):
create = await client.create_multipart_upload(Bucket=bucket, Key=key)
print(datetime.datetime.now(), 'create_multipart_upload', create)
upload = asyncio.ensure_future(client.upload_part(
Body=data,
Bucket=bucket,
Key=key,
PartNumber=1,
UploadId=create['UploadId'],
))
upload.add_done_callback(
functools.partial(handler, 'upload_part'))
delete = asyncio.ensure_future(client.abort_multipart_upload(
Bucket=bucket,
Key=key,
UploadId=create['UploadId'],
))
delete.add_done_callback(
functools.partial(handler, 'abort_multipart_upload'))
await asyncio.wait([upload, delete])
if upload.exception() or delete.exception():
failures += 1
print(f'{failures} failures in {ITERATIONS} iterations')
if __name__ == '__main__':
asyncio.run(go())
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment