Testing applications that integrate with AWS services can be challenging and expensive. Moto is a Python library that allows you to mock AWS services locally, making your tests faster, more reliable, and cost-effective.
What is Moto?
Moto is a library that allows you to easily mock out tests that use AWS services. It provides mock implementations of AWS services like S3, DynamoDB, SQS, Lambda, and many others, so you can test your code without making actual AWS API calls.
Installation
$ pip install moto
For specific AWS services, you can install individual packages:
$ pip install moto[s3,dynamodb,sqs]
Basic Usage
S3 Mocking
Let’s start with a simple S3 example:
import boto3
from moto import mock_s3
import pytest
@mock_s3
def test_s3_operations():
# Create a mock S3 client
s3_client = boto3.client('s3', region_name='us-east-1')
# Create a bucket
s3_client.create_bucket(Bucket='test-bucket')
# Upload a file
s3_client.put_object(
Bucket='test-bucket',
Key='test-file.txt',
Body=b'Hello, World!'
)
# List objects
response = s3_client.list_objects_v2(Bucket='test-bucket')
assert len(response['Contents']) == 1
assert response['Contents'][0]['Key'] == 'test-file.txt'
# Get object
obj = s3_client.get_object(Bucket='test-bucket', Key='test-file.txt')
assert obj['Body'].read() == b'Hello, World!'
DynamoDB Mocking
from moto import mock_dynamodb
import boto3
@mock_dynamodb
def test_dynamodb_operations():
# Create DynamoDB client
dynamodb = boto3.resource('dynamodb', region_name='us-east-1')
# Create table
table = dynamodb.create_table(
TableName='test-table',
KeySchema=[
{'AttributeName': 'id', 'KeyType': 'HASH'}
],
AttributeDefinitions=[
{'AttributeName': 'id', 'AttributeType': 'S'}
],
BillingMode='PAY_PER_REQUEST'
)
# Wait for table to be active
table.wait_until_exists()
# Put item
table.put_item(Item={'id': '1', 'name': 'John', 'age': 30})
# Get item
response = table.get_item(Key={'id': '1'})
assert response['Item']['name'] == 'John'
assert response['Item']['age'] == 30
Advanced Usage
Using Decorators with Classes
from moto import mock_s3, mock_dynamodb
import boto3
@mock_s3
@mock_dynamodb
class TestAWSIntegration:
def setup_method(self):
"""Setup method runs before each test"""
self.s3_client = boto3.client('s3', region_name='us-east-1')
self.dynamodb = boto3.resource('dynamodb', region_name='us-east-1')
# Create S3 bucket
self.s3_client.create_bucket(Bucket='my-app-bucket')
# Create DynamoDB table
self.table = self.dynamodb.create_table(
TableName='user-data',
KeySchema=[{'AttributeName': 'user_id', 'KeyType': 'HASH'}],
AttributeDefinitions=[{'AttributeName': 'user_id', 'AttributeType': 'S'}],
BillingMode='PAY_PER_REQUEST'
)
self.table.wait_until_exists()
def test_file_upload_and_metadata_storage(self):
"""Test uploading file to S3 and storing metadata in DynamoDB"""
# Upload file to S3
self.s3_client.put_object(
Bucket='my-app-bucket',
Key='user-uploads/user123/document.pdf',
Body=b'PDF content here'
)
# Store metadata in DynamoDB
self.table.put_item(Item={
'user_id': 'user123',
'file_key': 'user-uploads/user123/document.pdf',
'file_size': 1024,
'upload_date': '2025-01-15'
})
# Verify S3 object exists
response = self.s3_client.get_object(
Bucket='my-app-bucket',
Key='user-uploads/user123/document.pdf'
)
assert response['Body'].read() == b'PDF content here'
# Verify DynamoDB record
item = self.table.get_item(Key={'user_id': 'user123'})
assert item['Item']['file_key'] == 'user-uploads/user123/document.pdf'
Using Context Managers
from moto import mock_s3
import boto3
def test_with_context_manager():
with mock_s3():
s3_client = boto3.client('s3', region_name='us-east-1')
s3_client.create_bucket(Bucket='context-bucket')
# Your test code here
response = s3_client.list_buckets()
assert len(response['Buckets']) == 1
Real-World Example: File Processing Service
Let’s create a more realistic example - a service that processes uploaded files:
import boto3
from moto import mock_s3, mock_sqs
import json
class FileProcessor:
def __init__(self):
self.s3_client = boto3.client('s3')
self.sqs_client = boto3.client('sqs')
self.queue_url = 'https://sqs.us-east-1.amazonaws.com/123456789012/file-processing-queue'
def upload_file(self, bucket_name, file_key, file_content):
"""Upload file to S3 and send processing message to SQS"""
# Upload to S3
self.s3_client.put_object(
Bucket=bucket_name,
Key=file_key,
Body=file_content
)
# Send processing message to SQS
message = {
'bucket': bucket_name,
'key': file_key,
'action': 'process'
}
self.sqs_client.send_message(
QueueUrl=self.queue_url,
MessageBody=json.dumps(message)
)
def process_file(self, bucket_name, file_key):
"""Process file and create metadata"""
# Get file from S3
response = self.s3_client.get_object(Bucket=bucket_name, Key=file_key)
content = response['Body'].read()
# Create metadata
metadata = {
'file_size': len(content),
'content_type': 'application/octet-stream',
'processed': True
}
# Store metadata as a separate object
self.s3_client.put_object(
Bucket=bucket_name,
Key=f"{file_key}.metadata",
Body=json.dumps(metadata)
)
@mock_s3
@mock_sqs
def test_file_processing_workflow():
# Setup
s3_client = boto3.client('s3', region_name='us-east-1')
sqs_client = boto3.client('sqs', region_name='us-east-1')
# Create S3 bucket
s3_client.create_bucket(Bucket='file-processing-bucket')
# Create SQS queue
queue_response = sqs_client.create_queue(QueueName='file-processing-queue')
queue_url = queue_response['QueueUrl']
# Initialize processor
processor = FileProcessor()
processor.queue_url = queue_url
# Test file upload and processing
test_content = b'This is a test file content'
processor.upload_file('file-processing-bucket', 'test-file.txt', test_content)
# Verify file exists in S3
response = s3_client.get_object(Bucket='file-processing-bucket', Key='test-file.txt')
assert response['Body'].read() == test_content
# Verify message in SQS
messages = sqs_client.receive_message(QueueUrl=queue_url)
assert 'Messages' in messages
assert len(messages['Messages']) == 1
message_body = json.loads(messages['Messages'][0]['Body'])
assert message_body['bucket'] == 'file-processing-bucket'
assert message_body['key'] == 'test-file.txt'
# Test file processing
processor.process_file('file-processing-bucket', 'test-file.txt')
# Verify metadata file was created
metadata_response = s3_client.get_object(
Bucket='file-processing-bucket',
Key='test-file.txt.metadata'
)
metadata = json.loads(metadata_response['Body'].read())
assert metadata['file_size'] == len(test_content)
assert metadata['processed'] is True
Testing with Pytest Fixtures
import pytest
from moto import mock_s3
import boto3
@pytest.fixture
def s3_client():
with mock_s3():
yield boto3.client('s3', region_name='us-east-1')
@pytest.fixture
def test_bucket(s3_client):
bucket_name = 'test-bucket'
s3_client.create_bucket(Bucket=bucket_name)
return bucket_name
def test_s3_operations_with_fixtures(s3_client, test_bucket):
# Upload file
s3_client.put_object(
Bucket=test_bucket,
Key='test-file.txt',
Body=b'Hello, World!'
)
# Verify upload
response = s3_client.get_object(Bucket=test_bucket, Key='test-file.txt')
assert response['Body'].read() == b'Hello, World!'
Supported Services
Moto supports a wide range of AWS services:
- Storage: S3, EBS, EFS
- Database: DynamoDB, RDS, ElastiCache
- Compute: EC2, Lambda, ECS, EKS
- Messaging: SQS, SNS, EventBridge
- Security: IAM, KMS, Secrets Manager
- Analytics: CloudWatch, X-Ray
- And many more…
Best Practices
1. Use Specific Service Decorators
# Good - specific to what you're testing
@mock_s3
def test_s3_operation():
pass
# Avoid - too broad
@mock_aws
def test_s3_operation():
pass
2. Reset State Between Tests
from moto import mock_s3
import boto3
@mock_s3
def test_isolated_s3_operations():
# Each test gets a fresh S3 state
s3_client = boto3.client('s3', region_name='us-east-1')
# Your test code here
3. Use Environment Variables for Configuration
import os
from moto import mock_s3
import boto3
@mock_s3
def test_with_config():
# Use environment variables for configuration
bucket_name = os.getenv('TEST_BUCKET', 'default-test-bucket')
s3_client = boto3.client('s3', region_name='us-east-1')
s3_client.create_bucket(Bucket=bucket_name)
Common Gotchas
1. Region Requirements
Some AWS services require specific regions:
# DynamoDB requires a region
@mock_dynamodb
def test_dynamodb():
dynamodb = boto3.resource('dynamodb', region_name='us-east-1')
# Your test code
2. Resource vs Client
# Use client for simple operations
s3_client = boto3.client('s3')
# Use resource for object-oriented operations
s3_resource = boto3.resource('s3')
bucket = s3_resource.Bucket('my-bucket')
3. Mock State Persistence
# Mock state persists within the decorated function
@mock_s3
def test_s3_persistence():
s3_client = boto3.client('s3', region_name='us-east-1')
s3_client.create_bucket(Bucket='persistent-bucket')
# This will work - bucket still exists
response = s3_client.list_buckets()
assert len(response['Buckets']) == 1
Conclusion
Moto is an essential tool for testing AWS integrations in Python. It allows you to:
- Test locally without AWS credentials
- Run tests faster without network calls
- Reduce costs by avoiding AWS API charges
- Improve reliability by eliminating external dependencies
By following the patterns shown in this guide, you can effectively mock AWS services in your tests, making your code more robust and maintainable.
Remember: Moto mocks are not 100% identical to real AWS behavior, so always test critical paths with real AWS services in staging environments.
There are currently no comments on this article, be the first to add one below