Merge pull request #2443 from mikegrima/configquery
Adding support for querying AWS Config
This commit is contained in:
commit
d925335f05
10 changed files with 675 additions and 2 deletions
|
|
@ -4,6 +4,7 @@ import boto3
|
|||
from botocore.exceptions import ClientError
|
||||
from nose.tools import assert_raises
|
||||
|
||||
from moto import mock_s3
|
||||
from moto.config import mock_config
|
||||
|
||||
|
||||
|
|
@ -1009,3 +1010,177 @@ def test_delete_delivery_channel():
|
|||
with assert_raises(ClientError) as ce:
|
||||
client.delete_delivery_channel(DeliveryChannelName='testchannel')
|
||||
assert ce.exception.response['Error']['Code'] == 'NoSuchDeliveryChannelException'
|
||||
|
||||
|
||||
@mock_config
|
||||
@mock_s3
|
||||
def test_list_discovered_resource():
|
||||
"""NOTE: We are only really testing the Config part. For each individual service, please add tests
|
||||
for that individual service's "list_config_service_resources" function.
|
||||
"""
|
||||
client = boto3.client('config', region_name='us-west-2')
|
||||
|
||||
# With nothing created yet:
|
||||
assert not client.list_discovered_resources(resourceType='AWS::S3::Bucket')['resourceIdentifiers']
|
||||
|
||||
# Create some S3 buckets:
|
||||
s3_client = boto3.client('s3', region_name='us-west-2')
|
||||
for x in range(0, 10):
|
||||
s3_client.create_bucket(Bucket='bucket{}'.format(x), CreateBucketConfiguration={'LocationConstraint': 'us-west-2'})
|
||||
|
||||
# Now try:
|
||||
result = client.list_discovered_resources(resourceType='AWS::S3::Bucket')
|
||||
assert len(result['resourceIdentifiers']) == 10
|
||||
for x in range(0, 10):
|
||||
assert result['resourceIdentifiers'][x] == {
|
||||
'resourceType': 'AWS::S3::Bucket',
|
||||
'resourceId': 'bucket{}'.format(x),
|
||||
'resourceName': 'bucket{}'.format(x)
|
||||
}
|
||||
assert not result.get('nextToken')
|
||||
|
||||
# Test that pagination places a proper nextToken in the response and also that the limit works:
|
||||
result = client.list_discovered_resources(resourceType='AWS::S3::Bucket', limit=1, nextToken='bucket1')
|
||||
assert len(result['resourceIdentifiers']) == 1
|
||||
assert result['nextToken'] == 'bucket2'
|
||||
|
||||
# Try with a resource name:
|
||||
result = client.list_discovered_resources(resourceType='AWS::S3::Bucket', limit=1, resourceName='bucket1')
|
||||
assert len(result['resourceIdentifiers']) == 1
|
||||
assert not result.get('nextToken')
|
||||
|
||||
# Try with a resource ID:
|
||||
result = client.list_discovered_resources(resourceType='AWS::S3::Bucket', limit=1, resourceIds=['bucket1'])
|
||||
assert len(result['resourceIdentifiers']) == 1
|
||||
assert not result.get('nextToken')
|
||||
|
||||
# Try with duplicated resource IDs:
|
||||
result = client.list_discovered_resources(resourceType='AWS::S3::Bucket', limit=1, resourceIds=['bucket1', 'bucket1'])
|
||||
assert len(result['resourceIdentifiers']) == 1
|
||||
assert not result.get('nextToken')
|
||||
|
||||
# Test with an invalid resource type:
|
||||
assert not client.list_discovered_resources(resourceType='LOL::NOT::A::RESOURCE::TYPE')['resourceIdentifiers']
|
||||
|
||||
# Test with an invalid page num > 100:
|
||||
with assert_raises(ClientError) as ce:
|
||||
client.list_discovered_resources(resourceType='AWS::S3::Bucket', limit=101)
|
||||
assert '101' in ce.exception.response['Error']['Message']
|
||||
|
||||
# Test by supplying both resourceName and also resourceIds:
|
||||
with assert_raises(ClientError) as ce:
|
||||
client.list_discovered_resources(resourceType='AWS::S3::Bucket', resourceName='whats', resourceIds=['up', 'doc'])
|
||||
assert 'Both Resource ID and Resource Name cannot be specified in the request' in ce.exception.response['Error']['Message']
|
||||
|
||||
# More than 20 resourceIds:
|
||||
resource_ids = ['{}'.format(x) for x in range(0, 21)]
|
||||
with assert_raises(ClientError) as ce:
|
||||
client.list_discovered_resources(resourceType='AWS::S3::Bucket', resourceIds=resource_ids)
|
||||
assert 'The specified list had more than 20 resource ID\'s.' in ce.exception.response['Error']['Message']
|
||||
|
||||
|
||||
@mock_config
|
||||
@mock_s3
|
||||
def test_list_aggregate_discovered_resource():
|
||||
"""NOTE: We are only really testing the Config part. For each individual service, please add tests
|
||||
for that individual service's "list_config_service_resources" function.
|
||||
"""
|
||||
client = boto3.client('config', region_name='us-west-2')
|
||||
|
||||
# Without an aggregator:
|
||||
with assert_raises(ClientError) as ce:
|
||||
client.list_aggregate_discovered_resources(ConfigurationAggregatorName='lolno', ResourceType='AWS::S3::Bucket')
|
||||
assert 'The configuration aggregator does not exist' in ce.exception.response['Error']['Message']
|
||||
|
||||
# Create the aggregator:
|
||||
account_aggregation_source = {
|
||||
'AccountIds': [
|
||||
'012345678910',
|
||||
'111111111111',
|
||||
'222222222222'
|
||||
],
|
||||
'AllAwsRegions': True
|
||||
}
|
||||
client.put_configuration_aggregator(
|
||||
ConfigurationAggregatorName='testing',
|
||||
AccountAggregationSources=[account_aggregation_source]
|
||||
)
|
||||
|
||||
# With nothing created yet:
|
||||
assert not client.list_aggregate_discovered_resources(ConfigurationAggregatorName='testing',
|
||||
ResourceType='AWS::S3::Bucket')['ResourceIdentifiers']
|
||||
|
||||
# Create some S3 buckets:
|
||||
s3_client = boto3.client('s3', region_name='us-west-2')
|
||||
for x in range(0, 10):
|
||||
s3_client.create_bucket(Bucket='bucket{}'.format(x), CreateBucketConfiguration={'LocationConstraint': 'us-west-2'})
|
||||
|
||||
s3_client_eu = boto3.client('s3', region_name='eu-west-1')
|
||||
for x in range(10, 12):
|
||||
s3_client_eu.create_bucket(Bucket='eu-bucket{}'.format(x), CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'})
|
||||
|
||||
# Now try:
|
||||
result = client.list_aggregate_discovered_resources(ConfigurationAggregatorName='testing', ResourceType='AWS::S3::Bucket')
|
||||
assert len(result['ResourceIdentifiers']) == 12
|
||||
for x in range(0, 10):
|
||||
assert result['ResourceIdentifiers'][x] == {
|
||||
'SourceAccountId': '123456789012',
|
||||
'ResourceType': 'AWS::S3::Bucket',
|
||||
'ResourceId': 'bucket{}'.format(x),
|
||||
'ResourceName': 'bucket{}'.format(x),
|
||||
'SourceRegion': 'us-west-2'
|
||||
}
|
||||
for x in range(11, 12):
|
||||
assert result['ResourceIdentifiers'][x] == {
|
||||
'SourceAccountId': '123456789012',
|
||||
'ResourceType': 'AWS::S3::Bucket',
|
||||
'ResourceId': 'eu-bucket{}'.format(x),
|
||||
'ResourceName': 'eu-bucket{}'.format(x),
|
||||
'SourceRegion': 'eu-west-1'
|
||||
}
|
||||
|
||||
assert not result.get('NextToken')
|
||||
|
||||
# Test that pagination places a proper nextToken in the response and also that the limit works:
|
||||
result = client.list_aggregate_discovered_resources(ConfigurationAggregatorName='testing', ResourceType='AWS::S3::Bucket',
|
||||
Limit=1, NextToken='bucket1')
|
||||
assert len(result['ResourceIdentifiers']) == 1
|
||||
assert result['NextToken'] == 'bucket2'
|
||||
|
||||
# Try with a resource name:
|
||||
result = client.list_aggregate_discovered_resources(ConfigurationAggregatorName='testing', ResourceType='AWS::S3::Bucket',
|
||||
Limit=1, NextToken='bucket1', Filters={'ResourceName': 'bucket1'})
|
||||
assert len(result['ResourceIdentifiers']) == 1
|
||||
assert not result.get('NextToken')
|
||||
|
||||
# Try with a resource ID:
|
||||
result = client.list_aggregate_discovered_resources(ConfigurationAggregatorName='testing', ResourceType='AWS::S3::Bucket',
|
||||
Limit=1, NextToken='bucket1', Filters={'ResourceId': 'bucket1'})
|
||||
assert len(result['ResourceIdentifiers']) == 1
|
||||
assert not result.get('NextToken')
|
||||
|
||||
# Try with a region specified:
|
||||
result = client.list_aggregate_discovered_resources(ConfigurationAggregatorName='testing', ResourceType='AWS::S3::Bucket',
|
||||
Filters={'Region': 'eu-west-1'})
|
||||
assert len(result['ResourceIdentifiers']) == 2
|
||||
assert result['ResourceIdentifiers'][0]['SourceRegion'] == 'eu-west-1'
|
||||
assert not result.get('NextToken')
|
||||
|
||||
# Try with both name and id set to the incorrect values:
|
||||
assert not client.list_aggregate_discovered_resources(ConfigurationAggregatorName='testing', ResourceType='AWS::S3::Bucket',
|
||||
Filters={'ResourceId': 'bucket1',
|
||||
'ResourceName': 'bucket2'})['ResourceIdentifiers']
|
||||
|
||||
# Test with an invalid resource type:
|
||||
assert not client.list_aggregate_discovered_resources(ConfigurationAggregatorName='testing',
|
||||
ResourceType='LOL::NOT::A::RESOURCE::TYPE')['ResourceIdentifiers']
|
||||
|
||||
# Try with correct name but incorrect region:
|
||||
assert not client.list_aggregate_discovered_resources(ConfigurationAggregatorName='testing', ResourceType='AWS::S3::Bucket',
|
||||
Filters={'ResourceId': 'bucket1',
|
||||
'Region': 'us-west-1'})['ResourceIdentifiers']
|
||||
|
||||
# Test with an invalid page num > 100:
|
||||
with assert_raises(ClientError) as ce:
|
||||
client.list_aggregate_discovered_resources(ConfigurationAggregatorName='testing', ResourceType='AWS::S3::Bucket', Limit=101)
|
||||
assert '101' in ce.exception.response['Error']['Message']
|
||||
|
|
|
|||
|
|
@ -32,6 +32,7 @@ import sure # noqa
|
|||
|
||||
from moto import settings, mock_s3, mock_s3_deprecated
|
||||
import moto.s3.models as s3model
|
||||
from moto.core.exceptions import InvalidNextTokenException
|
||||
|
||||
if settings.TEST_SERVER_MODE:
|
||||
REDUCED_PART_SIZE = s3model.UPLOAD_PART_MIN_SIZE
|
||||
|
|
@ -273,6 +274,7 @@ def test_multipart_invalid_order():
|
|||
bucket.complete_multipart_upload.when.called_with(
|
||||
multipart.key_name, multipart.id, xml).should.throw(S3ResponseError)
|
||||
|
||||
|
||||
@mock_s3_deprecated
|
||||
@reduced_min_part_size
|
||||
def test_multipart_etag_quotes_stripped():
|
||||
|
|
@ -297,6 +299,7 @@ def test_multipart_etag_quotes_stripped():
|
|||
# we should get both parts as the key contents
|
||||
bucket.get_key("the-key").etag.should.equal(EXPECTED_ETAG)
|
||||
|
||||
|
||||
@mock_s3_deprecated
|
||||
@reduced_min_part_size
|
||||
def test_multipart_duplicate_upload():
|
||||
|
|
@ -670,6 +673,7 @@ def test_delete_keys_invalid():
|
|||
result.deleted.should.have.length_of(0)
|
||||
result.errors.should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_boto3_delete_empty_keys_list():
|
||||
with assert_raises(ClientError) as err:
|
||||
|
|
@ -1644,6 +1648,7 @@ def test_boto3_delete_versioned_bucket():
|
|||
|
||||
client.delete_bucket(Bucket='blah')
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_boto3_get_object_if_modified_since():
|
||||
s3 = boto3.client('s3', region_name='us-east-1')
|
||||
|
|
@ -1667,6 +1672,7 @@ def test_boto3_get_object_if_modified_since():
|
|||
e = err.exception
|
||||
e.response['Error'].should.equal({'Code': '304', 'Message': 'Not Modified'})
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_boto3_head_object_if_modified_since():
|
||||
s3 = boto3.client('s3', region_name='us-east-1')
|
||||
|
|
@ -1834,6 +1840,7 @@ def test_boto3_put_bucket_tagging():
|
|||
e.response["Error"]["Code"].should.equal("InvalidTag")
|
||||
e.response["Error"]["Message"].should.equal("Cannot provide multiple Tags with the same key")
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_boto3_get_bucket_tagging():
|
||||
s3 = boto3.client("s3", region_name="us-east-1")
|
||||
|
|
@ -2734,6 +2741,7 @@ def test_boto3_list_object_versions_with_versioning_enabled_late():
|
|||
response = s3.get_object(Bucket=bucket_name, Key=key)
|
||||
response['Body'].read().should.equal(items[-1])
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_boto3_bad_prefix_list_object_versions():
|
||||
s3 = boto3.client('s3', region_name='us-east-1')
|
||||
|
|
@ -2936,6 +2944,7 @@ TEST_XML = """\
|
|||
</ns0:WebsiteConfiguration>
|
||||
"""
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_boto3_bucket_name_too_long():
|
||||
s3 = boto3.client('s3', region_name='us-east-1')
|
||||
|
|
@ -2943,6 +2952,7 @@ def test_boto3_bucket_name_too_long():
|
|||
s3.create_bucket(Bucket='x'*64)
|
||||
exc.exception.response['Error']['Code'].should.equal('InvalidBucketName')
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_boto3_bucket_name_too_short():
|
||||
s3 = boto3.client('s3', region_name='us-east-1')
|
||||
|
|
@ -2950,6 +2960,7 @@ def test_boto3_bucket_name_too_short():
|
|||
s3.create_bucket(Bucket='x'*2)
|
||||
exc.exception.response['Error']['Code'].should.equal('InvalidBucketName')
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_accelerated_none_when_unspecified():
|
||||
bucket_name = 'some_bucket'
|
||||
|
|
@ -2958,6 +2969,7 @@ def test_accelerated_none_when_unspecified():
|
|||
resp = s3.get_bucket_accelerate_configuration(Bucket=bucket_name)
|
||||
resp.shouldnt.have.key('Status')
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_can_enable_bucket_acceleration():
|
||||
bucket_name = 'some_bucket'
|
||||
|
|
@ -2972,6 +2984,7 @@ def test_can_enable_bucket_acceleration():
|
|||
resp.should.have.key('Status')
|
||||
resp['Status'].should.equal('Enabled')
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_can_suspend_bucket_acceleration():
|
||||
bucket_name = 'some_bucket'
|
||||
|
|
@ -2990,6 +3003,7 @@ def test_can_suspend_bucket_acceleration():
|
|||
resp.should.have.key('Status')
|
||||
resp['Status'].should.equal('Suspended')
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_suspending_acceleration_on_not_configured_bucket_does_nothing():
|
||||
bucket_name = 'some_bucket'
|
||||
|
|
@ -3003,6 +3017,7 @@ def test_suspending_acceleration_on_not_configured_bucket_does_nothing():
|
|||
resp = s3.get_bucket_accelerate_configuration(Bucket=bucket_name)
|
||||
resp.shouldnt.have.key('Status')
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_accelerate_configuration_status_validation():
|
||||
bucket_name = 'some_bucket'
|
||||
|
|
@ -3015,6 +3030,7 @@ def test_accelerate_configuration_status_validation():
|
|||
)
|
||||
exc.exception.response['Error']['Code'].should.equal('MalformedXML')
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_accelerate_configuration_is_not_supported_when_bucket_name_has_dots():
|
||||
bucket_name = 'some.bucket.with.dots'
|
||||
|
|
@ -3027,6 +3043,7 @@ def test_accelerate_configuration_is_not_supported_when_bucket_name_has_dots():
|
|||
)
|
||||
exc.exception.response['Error']['Code'].should.equal('InvalidRequest')
|
||||
|
||||
|
||||
def store_and_read_back_a_key(key):
|
||||
s3 = boto3.client('s3', region_name='us-east-1')
|
||||
bucket_name = 'mybucket'
|
||||
|
|
@ -3042,10 +3059,12 @@ def store_and_read_back_a_key(key):
|
|||
response = s3.get_object(Bucket=bucket_name, Key=key)
|
||||
response['Body'].read().should.equal(body)
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_paths_with_leading_slashes_work():
|
||||
store_and_read_back_a_key('/a-key')
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_root_dir_with_empty_name_works():
|
||||
if os.environ.get('TEST_SERVER_MODE', 'false').lower() == 'true':
|
||||
|
|
@ -3087,3 +3106,70 @@ def test_delete_objects_with_url_encoded_key(key):
|
|||
s3.delete_objects(Bucket=bucket_name, Delete={'Objects': [{'Key': key}]})
|
||||
assert_deleted()
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_list_config_discovered_resources():
|
||||
from moto.s3.config import s3_config_query
|
||||
|
||||
# Without any buckets:
|
||||
assert s3_config_query.list_config_service_resources("global", "global", None, None, 100, None) == ([], None)
|
||||
|
||||
# With 10 buckets in us-west-2:
|
||||
for x in range(0, 10):
|
||||
s3_config_query.backends['global'].create_bucket('bucket{}'.format(x), 'us-west-2')
|
||||
|
||||
# With 2 buckets in eu-west-1:
|
||||
for x in range(10, 12):
|
||||
s3_config_query.backends['global'].create_bucket('eu-bucket{}'.format(x), 'eu-west-1')
|
||||
|
||||
result, next_token = s3_config_query.list_config_service_resources(None, None, 100, None)
|
||||
assert not next_token
|
||||
assert len(result) == 12
|
||||
for x in range(0, 10):
|
||||
assert result[x] == {
|
||||
'type': 'AWS::S3::Bucket',
|
||||
'id': 'bucket{}'.format(x),
|
||||
'name': 'bucket{}'.format(x),
|
||||
'region': 'us-west-2'
|
||||
}
|
||||
for x in range(10, 12):
|
||||
assert result[x] == {
|
||||
'type': 'AWS::S3::Bucket',
|
||||
'id': 'eu-bucket{}'.format(x),
|
||||
'name': 'eu-bucket{}'.format(x),
|
||||
'region': 'eu-west-1'
|
||||
}
|
||||
|
||||
# With a name:
|
||||
result, next_token = s3_config_query.list_config_service_resources(None, 'bucket0', 100, None)
|
||||
assert len(result) == 1 and result[0]['name'] == 'bucket0' and not next_token
|
||||
|
||||
# With a region:
|
||||
result, next_token = s3_config_query.list_config_service_resources(None, None, 100, None, resource_region='eu-west-1')
|
||||
assert len(result) == 2 and not next_token and result[1]['name'] == 'eu-bucket11'
|
||||
|
||||
# With resource ids:
|
||||
result, next_token = s3_config_query.list_config_service_resources(['bucket0', 'bucket1'], None, 100, None)
|
||||
assert len(result) == 2 and result[0]['name'] == 'bucket0' and result[1]['name'] == 'bucket1' and not next_token
|
||||
|
||||
# With duplicated resource ids:
|
||||
result, next_token = s3_config_query.list_config_service_resources(['bucket0', 'bucket0'], None, 100, None)
|
||||
assert len(result) == 1 and result[0]['name'] == 'bucket0' and not next_token
|
||||
|
||||
# Pagination:
|
||||
result, next_token = s3_config_query.list_config_service_resources(None, None, 1, None)
|
||||
assert len(result) == 1 and result[0]['name'] == 'bucket0' and next_token == 'bucket1'
|
||||
|
||||
# Last Page:
|
||||
result, next_token = s3_config_query.list_config_service_resources(None, None, 1, 'eu-bucket11', resource_region='eu-west-1')
|
||||
assert len(result) == 1 and result[0]['name'] == 'eu-bucket11' and not next_token
|
||||
|
||||
# With a list of buckets:
|
||||
result, next_token = s3_config_query.list_config_service_resources(['bucket0', 'bucket1'], None, 1, None)
|
||||
assert len(result) == 1 and result[0]['name'] == 'bucket0' and next_token == 'bucket1'
|
||||
|
||||
# With an invalid page:
|
||||
with assert_raises(InvalidNextTokenException) as inte:
|
||||
s3_config_query.list_config_service_resources(None, None, 1, 'notabucket')
|
||||
|
||||
assert 'The nextToken provided is invalid' in inte.exception.message
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue